diff --git a/CLOBBER b/CLOBBER index 8aca6cea4018..c247f750ad88 100644 --- a/CLOBBER +++ b/CLOBBER @@ -23,4 +23,4 @@ # don't change CLOBBER for WebIDL changes any more. -Merge day clobber 2023-04-10 \ No newline at end of file +Modified build files in third_party/libwebrtc - Bug 1822194 - Vendor libwebrtc from f72bc5f1e2 diff --git a/dom/media/webrtc/jsapi/RTCRtpReceiver.h b/dom/media/webrtc/jsapi/RTCRtpReceiver.h index 6c180215b585..7a7a8634e7e5 100644 --- a/dom/media/webrtc/jsapi/RTCRtpReceiver.h +++ b/dom/media/webrtc/jsapi/RTCRtpReceiver.h @@ -11,7 +11,6 @@ #include "mozilla/StateMirroring.h" #include "mozilla/Maybe.h" #include "js/RootingAPI.h" -#include "libwebrtcglue/MediaConduitInterface.h" #include "libwebrtcglue/RtpRtcpConfig.h" #include "nsTArray.h" #include "mozilla/dom/RTCRtpCapabilitiesBinding.h" diff --git a/dom/media/webrtc/jsapi/RTCRtpSender.h b/dom/media/webrtc/jsapi/RTCRtpSender.h index 4c74de10e5f1..65342bb29bef 100644 --- a/dom/media/webrtc/jsapi/RTCRtpSender.h +++ b/dom/media/webrtc/jsapi/RTCRtpSender.h @@ -11,7 +11,6 @@ #include "mozilla/StateMirroring.h" #include "mozilla/Maybe.h" #include "js/RootingAPI.h" -#include "libwebrtcglue/MediaConduitInterface.h" #include "libwebrtcglue/RtpRtcpConfig.h" #include "nsTArray.h" #include "mozilla/dom/RTCStatsReportBinding.h" diff --git a/dom/media/webrtc/jsapi/RTCRtpTransceiver.h b/dom/media/webrtc/jsapi/RTCRtpTransceiver.h index 289b04c2a0c4..895711cff55d 100644 --- a/dom/media/webrtc/jsapi/RTCRtpTransceiver.h +++ b/dom/media/webrtc/jsapi/RTCRtpTransceiver.h @@ -5,7 +5,7 @@ #define _TRANSCEIVERIMPL_H_ #include -#include "libwebrtcglue/MediaConduitControl.h" +#include "mozilla/StateMirroring.h" #include "mozilla/RefPtr.h" #include "nsCOMPtr.h" #include "nsISerialEventTarget.h" diff --git a/dom/media/webrtc/libwebrtcglue/AudioConduit.cpp b/dom/media/webrtc/libwebrtcglue/AudioConduit.cpp index edf38bdc55b6..49e3d79652bb 100644 --- a/dom/media/webrtc/libwebrtcglue/AudioConduit.cpp +++ b/dom/media/webrtc/libwebrtcglue/AudioConduit.cpp @@ -336,7 +336,9 @@ void WebrtcAudioConduit::OnControlConfigChange() { if (mSendStream && sendStreamReconfigureNeeded) { MOZ_ASSERT(!sendStreamRecreationNeeded); - mSendStream->Reconfigure(mSendStreamConfig); + // TODO: Pass a callback here, so we can react to RTCErrors thrown by + // libwebrtc. + mSendStream->Reconfigure(mSendStreamConfig, nullptr); } if (!mControl.mReceiving) { diff --git a/dom/media/webrtc/libwebrtcglue/MediaConduitControl.h b/dom/media/webrtc/libwebrtcglue/MediaConduitControl.h index 892b9f958e50..a860fab1467a 100644 --- a/dom/media/webrtc/libwebrtcglue/MediaConduitControl.h +++ b/dom/media/webrtc/libwebrtcglue/MediaConduitControl.h @@ -7,13 +7,22 @@ #ifndef DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_MEDIACONDUITCONTROL_H_ #define DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_MEDIACONDUITCONTROL_H_ -#include "jsapi/RTCDTMFSender.h" -#include "MediaConduitInterface.h" +#include "jsapi/RTCDTMFSender.h" // For DtmfEvent #include "mozilla/StateMirroring.h" #include "RtpRtcpConfig.h" +#include +#include +#include "mozilla/Maybe.h" +#include "CodecConfig.h" // For Audio/VideoCodecConfig +#include "api/rtp_parameters.h" // For webrtc::RtpExtension +#include "api/video_codecs/video_codec.h" // For webrtc::VideoCodecMode namespace mozilla { +using RtpExtList = std::vector; +using Ssrc = uint32_t; +using Ssrcs = std::vector; + /** * These are the interfaces used to control the async conduits. Some parameters * are common, and some are tied to the conduit type. See diff --git a/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.h b/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.h index 2de439509746..e555a9c9823b 100644 --- a/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.h +++ b/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.h @@ -20,6 +20,7 @@ #include "mozilla/dom/RTCRtpSourcesBinding.h" #include "PerformanceRecorder.h" #include "transport/mediapacket.h" +#include "MediaConduitControl.h" // libwebrtc includes #include "api/audio/audio_frame.h" @@ -50,16 +51,10 @@ struct RTCRtpSourceEntry; enum class MediaSessionConduitLocalDirection : int { kSend, kRecv }; -class VideoConduitControlInterface; -class AudioConduitControlInterface; class VideoSessionConduit; class AudioSessionConduit; class WebrtcCallWrapper; -using RtpExtList = std::vector; -using Ssrc = uint32_t; -using Ssrcs = std::vector; - /** * 1. Abstract renderer for video data * 2. This class acts as abstract interface between the video-engine and diff --git a/dom/media/webrtc/libwebrtcglue/TaskQueueWrapper.h b/dom/media/webrtc/libwebrtcglue/TaskQueueWrapper.h index 8af40ab626a9..da11bfdf8b28 100644 --- a/dom/media/webrtc/libwebrtcglue/TaskQueueWrapper.h +++ b/dom/media/webrtc/libwebrtcglue/TaskQueueWrapper.h @@ -8,6 +8,7 @@ #include "api/task_queue/task_queue_factory.h" #include "mozilla/DataMutex.h" +#include "mozilla/RecursiveMutex.h" #include "mozilla/ProfilerRunnable.h" #include "mozilla/TaskQueue.h" #include "VideoUtils.h" diff --git a/dom/media/webrtc/third_party_build/elm_rebase.sh b/dom/media/webrtc/third_party_build/elm_rebase.sh index a575d74792d1..4d53373d35db 100644 --- a/dom/media/webrtc/third_party_build/elm_rebase.sh +++ b/dom/media/webrtc/third_party_build/elm_rebase.sh @@ -65,35 +65,35 @@ if [ -f $STATE_DIR/rebase_resume_state ]; then source $STATE_DIR/rebase_resume_state else -if [ "x" == "x$MOZ_TOP_FF" ]; then - MOZ_TOP_FF=`hg log -r . -T"{node|short}"` + if [ "x" == "x$MOZ_TOP_FF" ]; then + MOZ_TOP_FF=`hg log -r . -T"{node|short}"` - ERROR_HELP=$" + ERROR_HELP=$" The topmost commit to be rebased is not in the public phase. Should it be pushed to elm first? If this is intentional, please rerun the command and pass it in explicitly: MOZ_TOP_FF=$MOZ_TOP_FF bash $0 " - if [[ $(hg phase -r .) != *public ]]; then - echo "$ERROR_HELP" - exit 1 - fi - ERROR_HELP="" + if [[ $(hg phase -r .) != *public ]]; then + echo "$ERROR_HELP" + exit 1 + fi + ERROR_HELP="" - ERROR_HELP=$" + ERROR_HELP=$" The topmost commit to be rebased is public but has descendants. If those descendants should not be rebased, please rerun the command and pass the commit in explicitly: MOZ_TOP_FF=$MOZ_TOP_FF bash $0 - " - if [ "x" != "x$(hg log -r 'descendants(.) and !.' -T'{node|short}')" ]; then - echo "$ERROR_HELP" - exit 1 +" + if [ "x" != "x$(hg log -r 'descendants(.) and !.' -T'{node|short}')" ]; then + echo "$ERROR_HELP" + exit 1 + fi + ERROR_HELP="" fi - ERROR_HELP="" -fi -ERROR_HELP=$" + ERROR_HELP=$" An error here is likely because no revision for central is found. One possible reason for this is this is your first rebase operation. To 'bootstrap' the first rebase operation, please find the @@ -107,54 +107,54 @@ could be the sha of the .arcconfig commit if it is the bottom commit. That command looks like: MOZ_BOTTOM_FF={base-sha} MOZ_CURRENT_CENTRAL={central-sha} bash $0 " -if [ "x" == "x$MOZ_CURRENT_CENTRAL" ]; then - MOZ_CURRENT_CENTRAL=`hg log -r central -T"{node|short}"` -fi -if [ "x" == "x$MOZ_BOTTOM_FF" ]; then - MOZ_BOTTOM_FF=`hg log -r $MOZ_CURRENT_CENTRAL~-1 -T"{node|short}"` -fi -ERROR_HELP="" + if [ "x" == "x$MOZ_CURRENT_CENTRAL" ]; then + MOZ_CURRENT_CENTRAL=`hg log -r central -T"{node|short}"` + fi + if [ "x" == "x$MOZ_BOTTOM_FF" ]; then + MOZ_BOTTOM_FF=`hg log -r $MOZ_CURRENT_CENTRAL~-1 -T"{node|short}"` + fi + ERROR_HELP="" -if [ "x" == "x$MOZ_BOTTOM_FF" ]; then - echo "No value found for the bottom commit of the fast-forward commit stack." - exit 1 -fi + if [ "x" == "x$MOZ_BOTTOM_FF" ]; then + echo "No value found for the bottom commit of the fast-forward commit stack." + exit 1 + fi -# After this point: -# * eE: All commands should succeed. -# * u: All variables should be defined before use. -# * o pipefail: All stages of all pipes should succeed. -set -eEuo pipefail + # After this point: + # * eE: All commands should succeed. + # * u: All variables should be defined before use. + # * o pipefail: All stages of all pipes should succeed. + set -eEuo pipefail -hg pull central -MOZ_NEW_CENTRAL=`hg log -r central -T"{node|short}"` + hg pull central + MOZ_NEW_CENTRAL=`hg log -r central -T"{node|short}"` -echo "moz-central in elm is currently $MOZ_CURRENT_CENTRAL" -echo "bottom of fast-foward tree is $MOZ_BOTTOM_FF" -echo "top of fast-forward tree (webrtc-fast-forward) is $MOZ_TOP_FF" -echo "new target for elm rebase $MOZ_NEW_CENTRAL (tip of moz-central)" + echo "moz-central in elm is currently $MOZ_CURRENT_CENTRAL" + echo "bottom of fast-foward tree is $MOZ_BOTTOM_FF" + echo "top of fast-forward tree (webrtc-fast-forward) is $MOZ_TOP_FF" + echo "new target for elm rebase $MOZ_NEW_CENTRAL (tip of moz-central)" -hg log -T '{rev}:{node|short} {desc|firstline}\n' \ - -r $MOZ_BOTTOM_FF::$MOZ_TOP_FF > $COMMIT_LIST_FILE + hg log -T '{rev}:{node|short} {desc|firstline}\n' \ + -r $MOZ_BOTTOM_FF::$MOZ_TOP_FF > $COMMIT_LIST_FILE -# move all FLOAT lines to end of file, and delete the "empty" tilde line -# line at the beginning -ed -s $COMMIT_LIST_FILE <<< $'g/- FLOAT -/m$\ng/^~$/d\nw\nq' + # move all FLOAT lines to end of file, and delete the "empty" tilde line + # line at the beginning + ed -s $COMMIT_LIST_FILE <<< $'g/- FLOAT -/m$\ng/^~$/d\nw\nq' -MOZ_BOOKMARK=`date "+webrtc-fast-forward-%Y-%m-%d--%H-%M"` -hg bookmark -r $MOZ_TOP_FF $MOZ_BOOKMARK + MOZ_BOOKMARK=`date "+webrtc-fast-forward-%Y-%m-%d--%H-%M"` + hg bookmark -r elm $MOZ_BOOKMARK -hg update $MOZ_NEW_CENTRAL + hg update $MOZ_NEW_CENTRAL -# pre-work is complete, let's write out a temporary config file that allows -# us to resume -echo $"export MOZ_CURRENT_CENTRAL=$MOZ_CURRENT_CENTRAL + # pre-work is complete, let's write out a temporary config file that allows + # us to resume + echo $"export MOZ_CURRENT_CENTRAL=$MOZ_CURRENT_CENTRAL export MOZ_BOTTOM_FF=$MOZ_BOTTOM_FF export MOZ_TOP_FF=$MOZ_TOP_FF export MOZ_NEW_CENTRAL=$MOZ_NEW_CENTRAL export MOZ_BOOKMARK=$MOZ_BOOKMARK " > $STATE_DIR/rebase_resume_state -fi +fi # if [ -f $STATE_DIR/rebase_resume_state ]; then ; else # grab all commits COMMITS=`cat $COMMIT_LIST_FILE | awk '{print $1;}'` diff --git a/dom/media/webrtc/third_party_build/example_config_env b/dom/media/webrtc/third_party_build/example_config_env index 2e2fde348cef..eb2fa832e300 100644 --- a/dom/media/webrtc/third_party_build/example_config_env +++ b/dom/media/webrtc/third_party_build/example_config_env @@ -9,43 +9,43 @@ export MOZ_LIBWEBRTC_SRC="{path-to}/moz-libwebrtc" # the commit summary as each upstream commit is vendored into the # mercurial repository. The bug used for the v106 fast-forward was # 1800920. -export MOZ_FASTFORWARD_BUG="1817024" +export MOZ_FASTFORWARD_BUG="1822194" # MOZ_NEXT_LIBWEBRTC_MILESTONE and MOZ_NEXT_FIREFOX_REL_TARGET are # not used during fast-forward processing, but facilitate generating this # example config. To generate an example config for the next update, run # bash dom/media/webrtc/third_party_build/update_example_config_env.sh -export MOZ_NEXT_LIBWEBRTC_MILESTONE=109 -export MOZ_NEXT_FIREFOX_REL_TARGET=113 +export MOZ_NEXT_LIBWEBRTC_MILESTONE=110 +export MOZ_NEXT_FIREFOX_REL_TARGET=114 # The branch name for the most recently completed fast-forward version. # The convention is to include which version of Chromium and the target -# Firefox release in the branch name. We landed the v108 fast-forward in -# Firefox 112. This branch name is used to prep the github repo for the +# Firefox release in the branch name. We landed the v109 fast-forward in +# Firefox 113. This branch name is used to prep the github repo for the # next fast-forward by grabbing all the Mozilla specific commits in the # prior branch and restacking them at the same base commit ready to # rebase onto the next upstream commit. -export MOZ_PRIOR_LIBWEBRTC_BRANCH="moz-mods-chr108-for-rel112" +export MOZ_PRIOR_LIBWEBRTC_BRANCH="moz-mods-chr109-for-rel113" # For Chromium release branches, see: # https://chromiumdash.appspot.com/branches -# Chromium's v108 release branch was 5359. This is used to pre-stack +# Chromium's v109 release branch was 5414. This is used to pre-stack # the previous release branch's commits onto the appropriate base commit # (the first common commit between trunk and the release branch). -export MOZ_PRIOR_UPSTREAM_BRANCH_HEAD_NUM="5359" +export MOZ_PRIOR_UPSTREAM_BRANCH_HEAD_NUM="5414" -# New target release branch for v109 is branch-heads/5414. This is used +# New target release branch for v110 is branch-heads/5481. This is used # to calculate the next upstream commit. -export MOZ_TARGET_UPSTREAM_BRANCH_HEAD="branch-heads/5414" +export MOZ_TARGET_UPSTREAM_BRANCH_HEAD="branch-heads/5481" # For local development 'mozpatches' is fine for a branch name, but when # pushing the patch stack to github, it should be named something like -# 'moz-mods-chr109-for-rel113'. +# 'moz-mods-chr110-for-rel114'. export MOZ_LIBWEBRTC_BRANCH="mozpatches" # After elm has been merged to mozilla-central, the patch stack in # moz-libwebrtc should be pushed to github. The script # push_official_branch.sh uses this branch name when pushing to the # public repo. -export MOZ_LIBWEBRTC_OFFICIAL_BRANCH="moz-mods-chr109-for-rel113" +export MOZ_LIBWEBRTC_OFFICIAL_BRANCH="moz-mods-chr110-for-rel114" diff --git a/dom/media/webrtc/third_party_build/gn-configs/webrtc.json b/dom/media/webrtc/third_party_build/gn-configs/webrtc.json index 2a97c8974b9d..bfefefbbd1f6 100644 --- a/dom/media/webrtc/third_party_build/gn-configs/webrtc.json +++ b/dom/media/webrtc/third_party_build/gn-configs/webrtc.json @@ -16,7 +16,8 @@ ], "write_mozbuild_variables": { "INCLUDE_TK_CFLAGS_DIRS": [ - "third_party/libwebrtc/modules/desktop_capture/desktop_capture_gn" + "third_party/libwebrtc/modules/desktop_capture/desktop_capture_gn", + "third_party/libwebrtc/modules/portal/portal_gn" ] }, "non_unified_sources": [ diff --git a/dom/media/webrtc/third_party_build/restore_elm_arcconfig.py b/dom/media/webrtc/third_party_build/restore_elm_arcconfig.py index 174a817ff423..00c0bd7309b0 100644 --- a/dom/media/webrtc/third_party_build/restore_elm_arcconfig.py +++ b/dom/media/webrtc/third_party_build/restore_elm_arcconfig.py @@ -17,7 +17,7 @@ ret = run( "hg", "import", "-m", - "Bug 1729988 - FLOAT REPO-elm - update .arcconfig repo callsign r=bgrins", + "Bug 1729988 - FLOAT - REPO-elm - update .arcconfig repo callsign r=bgrins", "dom/media/webrtc/third_party_build/elm_arcconfig.patch", ] ).returncode diff --git a/dom/media/webrtc/transportbridge/MediaPipeline.cpp b/dom/media/webrtc/transportbridge/MediaPipeline.cpp index e8ffa514717d..4a8c126b87af 100644 --- a/dom/media/webrtc/transportbridge/MediaPipeline.cpp +++ b/dom/media/webrtc/transportbridge/MediaPipeline.cpp @@ -47,6 +47,7 @@ #include "jsapi/PeerConnectionImpl.h" #include "Tracing.h" #include "libwebrtcglue/WebrtcImageBuffer.h" +#include "libwebrtcglue/MediaConduitInterface.h" #include "common_video/include/video_frame_buffer.h" #include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" diff --git a/dom/media/webrtc/transportbridge/MediaPipeline.h b/dom/media/webrtc/transportbridge/MediaPipeline.h index 57131036e2e9..f4053659b227 100644 --- a/dom/media/webrtc/transportbridge/MediaPipeline.h +++ b/dom/media/webrtc/transportbridge/MediaPipeline.h @@ -13,7 +13,7 @@ #include "transport/sigslot.h" #include "transport/transportlayer.h" // For TransportLayer::State -#include "libwebrtcglue/MediaConduitInterface.h" +#include "libwebrtcglue/MediaConduitControl.h" #include "mozilla/ReentrantMonitor.h" #include "mozilla/Atomics.h" #include "mozilla/StateMirroring.h" @@ -25,6 +25,7 @@ #include "MediaSegment.h" #include "PrincipalChangeObserver.h" #include "jsapi/PacketDumper.h" +#include "PerformanceRecorder.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" @@ -43,10 +44,14 @@ class PeerIdentity; class ProcessedMediaTrack; class SourceMediaTrack; class VideoFrameConverter; +class MediaSessionConduit; +class AudioSessionConduit; +class VideoSessionConduit; namespace dom { class MediaStreamTrack; struct RTCRTPContributingSourceStats; +class RTCStatsTimestampMaker; } // namespace dom struct MediaPipelineReceiveControlInterface { diff --git a/media/webrtc/signaling/gtest/MockCall.cpp b/media/webrtc/signaling/gtest/MockCall.cpp index 88439a0d1442..11c4dacb7a65 100644 --- a/media/webrtc/signaling/gtest/MockCall.cpp +++ b/media/webrtc/signaling/gtest/MockCall.cpp @@ -10,7 +10,8 @@ const webrtc::AudioSendStream::Config& MockAudioSendStream::GetConfig() const { return *mCallWrapper->GetMockCall()->mAudioSendConfig; } -void MockAudioSendStream::Reconfigure(const Config& config) { +void MockAudioSendStream::Reconfigure(const Config& config, + webrtc::SetParametersCallback callback) { mCallWrapper->GetMockCall()->mAudioSendConfig = mozilla::Some(config); } @@ -45,6 +46,11 @@ void MockVideoSendStream::ReconfigureVideoEncoder( mozilla::Some(config.Copy()); } +void MockVideoSendStream::ReconfigureVideoEncoder( + webrtc::VideoEncoderConfig config, webrtc::SetParametersCallback callback) { + ReconfigureVideoEncoder(std::move(config)); +} + webrtc::RtpHeaderExtensionMap MockVideoReceiveStream::GetRtpExtensionMap() const { return webrtc::RtpHeaderExtensionMap(); diff --git a/media/webrtc/signaling/gtest/MockCall.h b/media/webrtc/signaling/gtest/MockCall.h index c0e3c867e62e..c9a489ee23a6 100644 --- a/media/webrtc/signaling/gtest/MockCall.h +++ b/media/webrtc/signaling/gtest/MockCall.h @@ -26,7 +26,8 @@ class MockAudioSendStream : public webrtc::AudioSendStream { const webrtc::AudioSendStream::Config& GetConfig() const override; - void Reconfigure(const Config& config) override; + void Reconfigure(const Config& config, + webrtc::SetParametersCallback callback) override; void Start() override {} @@ -130,10 +131,12 @@ class MockVideoSendStream : public webrtc::VideoSendStream { void ReconfigureVideoEncoder(webrtc::VideoEncoderConfig config) override; + void ReconfigureVideoEncoder(webrtc::VideoEncoderConfig config, + webrtc::SetParametersCallback callback) override; + Stats GetStats() override { return mStats; } - void UpdateActiveSimulcastLayers( - const std::vector active_layers) override {} + void StartPerRtpStream(const std::vector active_layers) override {} void AddAdaptationResource( rtc::scoped_refptr resource) override {} diff --git a/media/webrtc/signaling/gtest/mediapipeline_unittest.cpp b/media/webrtc/signaling/gtest/mediapipeline_unittest.cpp index 8cdba8d8f299..77e09dfd40f1 100644 --- a/media/webrtc/signaling/gtest/mediapipeline_unittest.cpp +++ b/media/webrtc/signaling/gtest/mediapipeline_unittest.cpp @@ -18,6 +18,7 @@ #include "mozilla/Mutex.h" #include "mozilla/RefPtr.h" #include "mozilla/SpinEventLoopUntil.h" +#include "MediaConduitInterface.h" #include "MediaPipeline.h" #include "MediaPipelineFilter.h" #include "MediaTrackGraph.h" diff --git a/third_party/libwebrtc/AUTHORS b/third_party/libwebrtc/AUTHORS index 7506cba09e37..bd7ab248c283 100644 --- a/third_party/libwebrtc/AUTHORS +++ b/third_party/libwebrtc/AUTHORS @@ -68,6 +68,7 @@ Jose Antonio Olivera Ortega Keiichi Enomoto Kiran Thind Korniltsev Anatoly +Kyutae Lee Lennart Grahl Luke Weber Maksim Khobat @@ -79,6 +80,7 @@ Maksim Sisov Maxim Pavlov Maxim Potapov Michael Iedema +MichaƂ Zarach Michel Promonet Miguel Paris Mike Gilbert @@ -106,6 +108,7 @@ Sarah Thompson Satender Saroha Saul Kravitz Sergio Garcia Murillo +Shaofan Qi Shuhai Peng Silviu Caragea Stefan Gula @@ -137,6 +140,7 @@ Pengfei Han Agora IO <*@agora.io> ARM Holdings <*@arm.com> BroadSoft Inc. <*@broadsoft.com> +Canonical Ltd <*@canonical.com> CoSMo Software Consulting, Pte Ltd <*@cosmosoftware.io> Facebook Inc. <*@fb.com> Google Inc. <*@google.com> diff --git a/third_party/libwebrtc/BUILD.gn b/third_party/libwebrtc/BUILD.gn index 52cb61211ad2..ad65320c7123 100644 --- a/third_party/libwebrtc/BUILD.gn +++ b/third_party/libwebrtc/BUILD.gn @@ -47,6 +47,7 @@ if (!build_with_chromium && !build_with_mozilla) { } if (rtc_include_tests) { deps += [ + ":fuchsia_perf_tests", ":rtc_unittests", ":video_engine_tests", ":voip_unittests", @@ -762,6 +763,22 @@ if (rtc_include_tests && !build_with_chromium) { } } + rtc_test("fuchsia_perf_tests") { + testonly = true + deps = [ + #TODO(fxbug.dev/115601) - Enable when fixed + #"call:call_perf_tests", + #"video:video_pc_full_stack_tests", + "modules/audio_coding:audio_coding_perf_tests", + "modules/audio_processing:audio_processing_perf_tests", + "pc:peerconnection_perf_tests", + "test:test_main", + "video:video_full_stack_tests", + ] + + data = webrtc_perf_tests_resources + } + rtc_test("webrtc_nonparallel_tests") { testonly = true deps = [ "rtc_base:rtc_base_nonparallel_tests" ] diff --git a/third_party/libwebrtc/DEPS b/third_party/libwebrtc/DEPS index 2621bf5de683..e751e9dd6339 100644 --- a/third_party/libwebrtc/DEPS +++ b/third_party/libwebrtc/DEPS @@ -10,24 +10,28 @@ vars = { # chromium waterfalls. More info at: crbug.com/570091. 'checkout_configuration': 'default', 'checkout_instrumented_libraries': 'checkout_linux and checkout_configuration == "default"', - 'chromium_revision': '927e2f6dae198e3a9d0d3e40d6e4d76edb70884d', + 'chromium_revision': 'd4870f767ea66ffff0f83f8267d2b61dfff0bf5d', # Keep the Chromium default of generating location tags. 'generate_location_tags': True, # ResultDB version - 'resultdb_version': 'git_revision:6cc18e2763e180929d70c786b419c1f8e6bcc66c', + 'resultdb_version': 'git_revision:39e20ee396fe4a84eaa7f7d389e5659198c12e87', # By default, download the fuchsia sdk from the public sdk directory. 'fuchsia_sdk_cipd_prefix': 'fuchsia/sdk/gn/', - 'fuchsia_version': 'version:9.20220919.2.1', + 'fuchsia_version': 'version:10.20221201.3.1', # By default, download the fuchsia images from the fuchsia GCS bucket. 'fuchsia_images_bucket': 'fuchsia', - 'checkout_fuchsia_boot_images': "qemu.x64", 'checkout_fuchsia': False, + # Since the images are hundreds of MB, default to only downloading the image + # most commonly useful for developers. Bots and developers that need to use + # other images can override this with additional images. + 'checkout_fuchsia_boot_images': "terminal.qemu-x64", + 'checkout_fuchsia_product_bundles': '"{checkout_fuchsia_boot_images}" != ""', # reclient CIPD package version - 'reclient_version': 're_client_version:0.81.1.0853992-gomaip', + 'reclient_version': 're_client_version:0.87.0.b6908b3-gomaip', # ninja CIPD package version # https://chrome-infra-packages.appspot.com/p/infra/3pp/tools/ninja @@ -37,30 +41,30 @@ vars = { deps = { # TODO(kjellander): Move this to be Android-only. 'src/base': - 'https://chromium.googlesource.com/chromium/src/base@7ee725c53f7e5a822cf4c571ce4e8ff787bfc0c7', + 'https://chromium.googlesource.com/chromium/src/base@4a17a70520935f05e354de004dcb44c7b1df534f', 'src/build': - 'https://chromium.googlesource.com/chromium/src/build@18e9d3c3adadf2489507e4e62afffafa46717d26', + 'https://chromium.googlesource.com/chromium/src/build@c91a4dbdb666e9bd82b187109ad311c58a552ce6', 'src/buildtools': - 'https://chromium.googlesource.com/chromium/src/buildtools@33b52eafd539278600d34cd9ba23550d28c933d2', + 'https://chromium.googlesource.com/chromium/src/buildtools@dcbf73cdcbcd0a2948b9e40bf500de166f622261', # Gradle 6.6.1. Used for testing Android Studio project generation for WebRTC. 'src/examples/androidtests/third_party/gradle': { 'url': 'https://chromium.googlesource.com/external/github.com/gradle/gradle.git@f2d1fb54a951d8b11d25748e4711bec8d128d7e3', 'condition': 'checkout_android', }, 'src/ios': { - 'url': 'https://chromium.googlesource.com/chromium/src/ios@dc273d7b2d91ad6c39d99604682a4e82c0278383', + 'url': 'https://chromium.googlesource.com/chromium/src/ios@36316fedfa1873be15dcaf681bf1295696abafbc', 'condition': 'checkout_ios', }, 'src/testing': - 'https://chromium.googlesource.com/chromium/src/testing@e3bca95dbc08d3879eadc39a66141210e85ee76f', + 'https://chromium.googlesource.com/chromium/src/testing@9adab94016c5e0840a235b4c0a7dd85173d3f370', 'src/third_party': - 'https://chromium.googlesource.com/chromium/src/third_party@38080027ded6b80f8f121e748ae8ecc60d9314b4', + 'https://chromium.googlesource.com/chromium/src/third_party@fc733299410a7104a0848539baab0131b8a616b8', 'src/buildtools/linux64': { 'packages': [ { 'package': 'gn/gn/linux-${{arch}}', - 'version': 'git_revision:a4d67be044b42963de801001e7146f9657c7fad4', + 'version': 'git_revision:5e19d2fb166fbd4f6f32147fbb2f497091a54ad8', } ], 'dep_type': 'cipd', @@ -70,7 +74,7 @@ deps = { 'packages': [ { 'package': 'gn/gn/mac-${{arch}}', - 'version': 'git_revision:a4d67be044b42963de801001e7146f9657c7fad4', + 'version': 'git_revision:5e19d2fb166fbd4f6f32147fbb2f497091a54ad8', } ], 'dep_type': 'cipd', @@ -80,7 +84,7 @@ deps = { 'packages': [ { 'package': 'gn/gn/windows-amd64', - 'version': 'git_revision:a4d67be044b42963de801001e7146f9657c7fad4', + 'version': 'git_revision:5e19d2fb166fbd4f6f32147fbb2f497091a54ad8', } ], 'dep_type': 'cipd', @@ -89,21 +93,24 @@ deps = { 'src/buildtools/reclient': { 'packages': [ { + # https://chrome-infra-packages.appspot.com/p/infra/rbe/client/ 'package': 'infra/rbe/client/${{platform}}', 'version': Var('reclient_version'), } ], 'dep_type': 'cipd', + # Reclient doesn't have linux-arm64 package. + 'condition': 'not (host_os == "linux" and host_cpu == "arm64")', }, 'src/buildtools/clang_format/script': 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/clang/tools/clang-format.git@8b525d2747f2584fc35d8c7e612e66f377858df7', 'src/buildtools/third_party/libc++/trunk': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git@fc6bbc5eb039769b5ed2de84444a3c6f9b45a598', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git@2fc3d704672fbd3e85fad8492d39e02d49412891', 'src/buildtools/third_party/libc++abi/trunk': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@8dd405113a4f3694e910b79785dd7fb7535a888a', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@123239cdb67b3d69c5af933e364a84019a33575c', 'src/buildtools/third_party/libunwind/trunk': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@aabcd8753678f1536e15eb6385a948470debdae4', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@5e22a7fe2335161ab267867c8e1be481bf6c8300', 'src/third_party/ninja': { 'packages': [ @@ -139,7 +146,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_build_tools/aapt2', - 'version': '-QrdsGmvVhHeoRc5wKCnU2LXEjk1s0ocheitXWf5dhYC', + 'version': 'cbNG7g8Sinh-lsT8hWsU-RyXqLT_uh4jIb1fjCdhrzIC', }, ], 'condition': 'checkout_android', @@ -150,7 +157,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_build_tools/bundletool', - 'version': 'JUxLsQLBkNG0ylmbHz6FGBtYyK1PNDZ04pMCii90Bd4C', + 'version': 'eYz83zbG33sGLyNdc-a64qo1K6LRcS9GwW7GmSvyWisC', }, ], 'condition': 'checkout_android', @@ -158,11 +165,11 @@ deps = { }, 'src/third_party/boringssl/src': - 'https://boringssl.googlesource.com/boringssl.git@1ee71185a2322dc354bee5e5a0abfb1810a27dc6', + 'https://boringssl.googlesource.com/boringssl.git@28f96c2686459add7acedcd97cb841030bdda019', 'src/third_party/breakpad/breakpad': - 'https://chromium.googlesource.com/breakpad/breakpad.git@e085b3b50bde862d0cf3ce4594e3f391bcf5faec', + 'https://chromium.googlesource.com/breakpad/breakpad.git@cc7abac08b0c52e6581b9c9c4226816b17a4c26d', 'src/third_party/catapult': - 'https://chromium.googlesource.com/catapult.git@3ffa6b222803f54188a7b249383b2f092a24d19a', + 'https://chromium.googlesource.com/catapult.git@bf0782db65682f3918886ba69807c03fe515c2e8', 'src/third_party/ced/src': { 'url': 'https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git@ba412eaaacd3186085babcd901679a48863c7dd5', }, @@ -171,9 +178,9 @@ deps = { 'src/third_party/crc32c/src': 'https://chromium.googlesource.com/external/github.com/google/crc32c.git@fa5ade41ee480003d9c5af6f43567ba22e4e17e6', 'src/third_party/depot_tools': - 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@b52683fa2e74087464d32a1a9c76bf1b5275e4fe', + 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@41a2d0f1a0173723f63ca2994e17c81eaf302b65', 'src/third_party/ffmpeg': - 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@b9f01c3c54576330b2cf8918c54d5ee5be8faefe', + 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@a249b21db6516234e5456716ae074fbb00176b3f', 'src/third_party/flatbuffers/src': 'https://chromium.googlesource.com/external/github.com/google/flatbuffers.git@e3017029647a88eb6f509ee9744012fffeb0d371', 'src/third_party/grpc/src': { @@ -185,9 +192,9 @@ deps = { 'condition': 'checkout_linux', }, 'src/third_party/freetype/src': - 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@dea2e6358b2f963008d447d27564dd79890b61f0', + 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@ace97a02a4461bbdae29da4019c105eead95e277', 'src/third_party/harfbuzz-ng/src': - 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@56c467093598ec559a7148b61e112e9de52b7076', + 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@2822b589bc837fae6f66233e2cf2eef0f6ce8470', 'src/third_party/google_benchmark/src': { 'url': 'https://chromium.googlesource.com/external/github.com/google/benchmark.git@f730846b0a3c0dc0699978846fb14ffb2fad0bdc', }, @@ -207,7 +214,7 @@ deps = { 'src/third_party/googletest/src': 'https://chromium.googlesource.com/external/github.com/google/googletest.git@af29db7ec28d6df1c7f0f745186884091e602e07', 'src/third_party/icu': { - 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@da07448619763d1cde255b361324242646f5b268', + 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@1b7d391f0528fb3a4976b7541b387ee04f915f83', }, 'src/third_party/jdk': { 'packages': [ @@ -222,7 +229,7 @@ deps = { 'src/third_party/jsoncpp/source': 'https://chromium.googlesource.com/external/github.com/open-source-parsers/jsoncpp.git@42e892d96e47b1f6e29844cc705e148ec4856448', # from svn 248 'src/third_party/junit/src': { - 'url': 'https://chromium.googlesource.com/external/junit.git@64155f8a9babcfcf4263cf4d08253a1556e75481', + 'url': 'https://chromium.googlesource.com/external/junit.git@05fe2a64f59127c02135be22f416e91260d6ede6', 'condition': 'checkout_android', }, # Used for building libFuzzers (only supports Linux). @@ -235,17 +242,17 @@ deps = { 'src/third_party/dav1d/libdav1d': 'https://chromium.googlesource.com/external/github.com/videolan/dav1d.git@87f9a81cd770e49394a45deca7a3df41243de00b', 'src/third_party/libaom/source/libaom': - 'https://aomedia.googlesource.com/aom.git@7f32eb35ff2589369f095388701e3dfc4d6a9381', + 'https://aomedia.googlesource.com/aom.git@a84503456d4276348da3e80de7569adb1b389a60', 'src/third_party/libunwindstack': { 'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@4dbfa0e8c844c8e243b297bc185e54a99ff94f9e', 'condition': 'checkout_android', }, 'src/third_party/perfetto': - 'https://android.googlesource.com/platform/external/perfetto.git@326fb7f15672187f767d79ee1fabf1331ebcc0dd', + 'https://android.googlesource.com/platform/external/perfetto.git@61ba4b9b606100828e425eb9a245dd45c5591f28', 'src/third_party/libvpx/source/libvpx': - 'https://chromium.googlesource.com/webm/libvpx.git@5245f6e9cb7e6bb68ab45fe4d8b00bc9b16857e1', + 'https://chromium.googlesource.com/webm/libvpx.git@605350bd5b68ac47f595d60cc8ef346588e773c0', 'src/third_party/libyuv': - 'https://chromium.googlesource.com/libyuv/libyuv.git@fe9ced6e3c8ae6c69bcc3ebb8505a650d2df30e0', + 'https://chromium.googlesource.com/libyuv/libyuv.git@4a3c79cb31aee310443039c37d64377ed06f1d14', 'src/third_party/lss': { 'url': 'https://chromium.googlesource.com/linux-syscall-support.git@ce877209e11aa69dcfffbd53ef90ea1d07136521', 'condition': 'checkout_android or checkout_linux', @@ -266,7 +273,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/r8', - 'version': 'szXK3tCGU7smsNs4r2mGqxme7d9KWLaOk0_ghbCJxUQC', + 'version': 'pv_BIbpK8sxEFp63muv1gKsbyWJoyv4PDw342wc9H6AC', }, ], 'condition': 'checkout_android', @@ -290,7 +297,7 @@ deps = { 'condition': 'checkout_android', }, 'src/tools': - 'https://chromium.googlesource.com/chromium/src/tools@a398922738180b9301ebb4f2bf0896763ee921ed', + 'https://chromium.googlesource.com/chromium/src/tools@0c34fd995e2cfdb007209c44bb0d28e894b1d2ea', 'src/third_party/accessibility_test_framework': { 'packages': [ @@ -367,7 +374,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/androidx', - 'version': 'DRqe-W5-XlO2ZySLCwsYKy7iqIaQ77O-Y91txXGY_hMC', + 'version': '3ADwB26rDMIdmScjo6j4e98VQl6amFOyrvsvrVRthBMC', }, ], 'condition': 'checkout_android', @@ -378,7 +385,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_build_tools/manifest_merger', - 'version': '10z1KegIoj_7T--lXulnk4MUKHMHEo_onhwh_4FvyMQC', + 'version': 'X4l8RIBEAF108FpSEWRF7UHqq-kY8T3ibSsObGU5u3UC', }, ], 'condition': 'checkout_android', @@ -413,7 +420,7 @@ deps = { }, { 'package': 'chromium/third_party/android_sdk/public/cmdline-tools', - 'version': 'IPzAG-uU5zVMxohpg9-7-N0tQC1TCSW1VbrBFw7Ld04C', + 'version': 'oWlET2yQhaPKQ66tYNuSPaueU78Z9VlxpyxOoUjwRuIC', }, ], 'condition': 'checkout_android', @@ -468,7 +475,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/turbine', - 'version': 'HqLybI_r3dCgRJywsqJ3xkp2D6vQAI4-8D7zdqNiyxcC', + 'version': 'R-Qp1tMBqIuETMfXNqQU9GB00ij6dsPjVmjDuvH_194C', }, ], 'condition': 'checkout_android', @@ -479,11 +486,11 @@ deps = { 'packages': [ { 'package': 'infra/tools/luci/isolate/${{platform}}', - 'version': 'git_revision:765f51c332c38e9b8d7981f23640b9df59371cd5', + 'version': 'git_revision:bac571b5399502fa16ac48a1d3820e1117505085', }, { 'package': 'infra/tools/luci/swarming/${{platform}}', - 'version': 'git_revision:765f51c332c38e9b8d7981f23640b9df59371cd5', + 'version': 'git_revision:bac571b5399502fa16ac48a1d3820e1117505085', }, ], 'dep_type': 'cipd', @@ -936,28 +943,6 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs', - 'version': 'version:2@1.1.5.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs_configuration': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs_configuration', - 'version': 'version:2@1.1.5.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - 'src/third_party/android_deps/libs/com_android_tools_layoutlib_layoutlib_api': { 'packages': [ { @@ -2418,16 +2403,14 @@ hooks = [ '--version={fuchsia_version}', ], }, - { 'name': 'Download Fuchsia system images', 'pattern': '.', - 'condition': 'checkout_fuchsia', + 'condition': 'checkout_fuchsia and checkout_fuchsia_product_bundles', 'action': [ 'python3', - 'src/build/fuchsia/update_images.py', - '--boot-images={checkout_fuchsia_boot_images}', - '--default-bucket={fuchsia_images_bucket}', + 'src/build/fuchsia/update_product_bundles.py', + '{checkout_fuchsia_boot_images}', ], }, { @@ -2573,27 +2556,51 @@ hooks = [ ], }, { - 'name': 'msan_chained_origins', + 'name': 'msan_chained_origins_focal', 'pattern': '.', 'condition': 'checkout_instrumented_libraries', 'action': [ 'python3', 'src/third_party/depot_tools/download_from_google_storage.py', - "--no_resume", - "--no_auth", - "--bucket", "chromium-instrumented-libraries", - "-s", "src/third_party/instrumented_libraries/binaries/msan-chained-origins.tgz.sha1", + '--no_resume', + '--no_auth', + '--bucket', 'chromium-instrumented-libraries', + '-s', 'src/third_party/instrumented_libraries/binaries/msan-chained-origins-focal.tgz.sha1', ], }, { - 'name': 'msan_no_origins', + 'name': 'msan_no_origins_focal', 'pattern': '.', 'condition': 'checkout_instrumented_libraries', 'action': [ 'python3', 'src/third_party/depot_tools/download_from_google_storage.py', - "--no_resume", - "--no_auth", - "--bucket", "chromium-instrumented-libraries", - "-s", "src/third_party/instrumented_libraries/binaries/msan-no-origins.tgz.sha1", + '--no_resume', + '--no_auth', + '--bucket', 'chromium-instrumented-libraries', + '-s', 'src/third_party/instrumented_libraries/binaries/msan-no-origins-focal.tgz.sha1', + ], + }, + { + 'name': 'msan_chained_origins_xenial', + 'pattern': '.', + 'condition': 'checkout_instrumented_libraries', + 'action': [ 'python3', + 'src/third_party/depot_tools/download_from_google_storage.py', + '--no_resume', + '--no_auth', + '--bucket', 'chromium-instrumented-libraries', + '-s', 'src/third_party/instrumented_libraries/binaries/msan-chained-origins-xenial.tgz.sha1', + ], + }, + { + 'name': 'msan_no_origins_xenial', + 'pattern': '.', + 'condition': 'checkout_instrumented_libraries', + 'action': [ 'python3', + 'src/third_party/depot_tools/download_from_google_storage.py', + '--no_resume', + '--no_auth', + '--bucket', 'chromium-instrumented-libraries', + '-s', 'src/third_party/instrumented_libraries/binaries/msan-no-origins-xenial.tgz.sha1', ], }, { diff --git a/third_party/libwebrtc/OWNERS b/third_party/libwebrtc/OWNERS index 6ae4b59a95f3..bfcca980eb89 100644 --- a/third_party/libwebrtc/OWNERS +++ b/third_party/libwebrtc/OWNERS @@ -3,20 +3,4 @@ hta@webrtc.org mflodman@webrtc.org stefan@webrtc.org tommi@webrtc.org -per-file .gitignore=* -per-file .gn=mbonadei@webrtc.org -per-file BUILD.gn=mbonadei@webrtc.org -per-file .../BUILD.gn=mbonadei@webrtc.org -per-file *.gni=mbonadei@webrtc.org -per-file .../*.gni=mbonadei@webrtc.org -per-file .vpython=mbonadei@webrtc.org -per-file .vpython3=mbonadei@webrtc.org -per-file AUTHORS=* -per-file DEPS=* -per-file pylintrc=mbonadei@webrtc.org -per-file WATCHLISTS=* -per-file native-api.md=mbonadei@webrtc.org -per-file ....lua=titovartem@webrtc.org -per-file .style.yapf=jleconte@webrtc.org -per-file *.py=jansson@webrtc.org -per-file *.py=jleconte@webrtc.org +include OWNERS_INFRA #{Owners for infra and repo related files} diff --git a/third_party/libwebrtc/OWNERS_INFRA b/third_party/libwebrtc/OWNERS_INFRA new file mode 100644 index 000000000000..71725701527f --- /dev/null +++ b/third_party/libwebrtc/OWNERS_INFRA @@ -0,0 +1,17 @@ +#Owners for infra and repo related files +per-file .gitignore=* +per-file .gn=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file BUILD.gn=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file .../BUILD.gn=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file *.gni=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file .../*.gni=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file .vpython=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file .vpython3=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file AUTHORS=* +per-file DEPS=* +per-file pylintrc=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file WATCHLISTS=* +per-file native-api.md=mbonadei@webrtc.org +per-file ....lua=titovartem@webrtc.org +per-file .style.yapf=jleconte@webrtc.org +per-file *.py=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org diff --git a/third_party/libwebrtc/README.moz-ff-commit b/third_party/libwebrtc/README.moz-ff-commit index 329c9ef465b5..9db351e70f63 100644 --- a/third_party/libwebrtc/README.moz-ff-commit +++ b/third_party/libwebrtc/README.moz-ff-commit @@ -19428,3 +19428,897 @@ c71b34235e # MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh # base of lastest vendoring 4e8a5ac68e +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +95c950af03 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +3e6931b183 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +816e26da55 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6b0aea07ab +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b41568b6fd +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d53578e0f5 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1a00ebcbda +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1bef09708a +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cdc769dd76 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +31364615d7 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +5f42cdcb31 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b21c979691 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cf2856b01c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d2a48e8226 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c48a265346 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +12046bf8c4 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9b68e35baa +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7d8d64323c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +acabb3641b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +76bd5a80ea +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +954cf1f853 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2803ca27fb +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1b8f2d59c4 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c40cf325b7 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +82c8e4af7c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4185a91592 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +34cdb1f53c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d2811761e3 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e085366aca +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +da4c102cbd +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +dd4c4068d9 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +3e0658beec +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7ccd88f3e5 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e844aad41a +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4db5b979b7 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +45c882e4be +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +11be12118b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +05b58ad77e +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +91e6987f66 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +52b0ef7926 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +99543ae75f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b301b58b3f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6aa755c201 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b46c4bf27b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cd4456e336 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cb2b133bf0 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +fbeb76ab51 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d3d1dfd8f2 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ef005bc924 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2405298a28 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c30835c712 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9eb1ff3ac0 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +408f0be5c2 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +116c0a53d4 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +5dd548261f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e158b77427 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +17887eb04a +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1d17f73471 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4f6642e366 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +170316d229 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1ce8e73b1c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +5214c2e7ff +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +41a0702886 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +76793c300f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7404f07ad9 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +3d9b5590c2 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +892e61cd1e +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6eb1e709da +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +34f4ec26e3 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +63dda507f5 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +858864dcb4 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7dc590e0b7 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +5c297eb7a9 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1571258ca6 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +dd18f9f8c2 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f45f823541 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +bf28277774 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4440426792 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +78b466a0d1 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c61ffddc8b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e0bb181371 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +77ca50c3e4 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cb683099e1 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +bbdb768989 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6d91a718c8 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +538fa81328 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +987ebe6b49 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +79beaa7f38 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d742382eb0 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d8c4de7172 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d168353cb8 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +440df4bfa5 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b2556d7716 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +918eb19303 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +768cb4464e +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ca0481751d +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +dd35e244ce +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a3a3b6d798 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c6ae33fb07 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +0c56aef5d5 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +310e0624aa +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +27fed4513f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2c2d2c75b5 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +8a8c455cce +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +447b9f3fde +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +19d96365b2 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +bbc8fc165c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6a8776a108 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +46e2d103b4 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4bee365c82 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +514dff834b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +fc5d709e41 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7216b27406 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6e55319b5d +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d409621f28 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b05968e5ec +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +75170be4ac +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a7013ee650 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d6b330ea77 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +893c0e449d +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +5a65f9e0aa +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b5e5b8a6c4 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e40bb38faa +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4a44e0ef40 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +5c4509a604 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e862da376f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +158d5e3078 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +bc43fe3a50 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c7fc01269e +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +802ccfef21 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +53eb544c10 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f71e87a71d +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +95b556f022 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +840ea0f703 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +0eea00c77b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +569af3e80f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7daa6787fa +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +06cba44d7a +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6358cbf7bb +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +370ca9c52c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f25076751b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +29a8d525be +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7184016a6a +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e03862bcbf +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +fd9a1e1d98 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ef7618a8f9 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c5aac4ec1f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +bf2f605e03 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2076af4673 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c06dc4df80 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f0ea56a0a2 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b4753d038e +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +693306cf7a +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b7a3d59813 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a5e7941a57 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +13730e9742 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b4f87e5048 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b889a7aee4 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +41a8357170 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +8b47ea459e +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +03bccbe62d +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +3f2a3b19e3 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +539757b50e +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9665d01e69 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f0c33c4d68 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7eea667228 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +256d3ee2bf +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d4dbe4527d +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b00f88179e +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +77bb688982 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c19ec96bd7 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a6574909e9 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +21a9bbcf39 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +adf35a359e +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e0b4cab69c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cf7077693c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f889217015 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a445e6a489 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d8d86bd332 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +8754a3c945 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +504bd59422 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cb885923d8 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b1bdadcabd +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9e099b62a1 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +3fcd49e972 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +59ade0172f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e001474407 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2d7a3e7ca8 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c8157c33b4 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a639528a43 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +8b4a81fb55 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2e3069bf07 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +dd236a94f8 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d0eaa54104 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +5b42b93010 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +737dc4455c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9f3114dec9 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1b80be352b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b6e8c2e393 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c0c65387ae +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +3c85787ef3 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a422e93d7b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +def85594ea +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cf78b19a6f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4366c5469f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e093c481bf +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d8ed0c1f17 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +bed6401c23 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e149d4d100 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +583fd2ba99 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +79c21b1bf5 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +898403b0c9 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +279b4b7d4f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f6777a4997 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b1b2840171 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +8e21784b03 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +38a6002548 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +fcbf3724eb +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ce79f873e7 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +0524319a9d +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1985b5a927 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e4caacbfc3 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7970f87a37 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f1da1d5e53 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +53f3049588 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +36f668c8bb +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +100de33983 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e2652e168a +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f1aa9fbb09 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a8c300e36f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +352f38c7a8 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +3c529893e0 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +17e14fdf34 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9629ca2c98 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +01cac31d58 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +36fafc8827 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a55a54d1eb +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d34c4ee141 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9b235cd93b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ec4961ac54 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6419537b3b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4b5dececfd +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e04726281c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +da964d7559 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +248b9105fd +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6ebf5e3379 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ca6535593f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c1080dc884 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f72bc5f1e2 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +fecbec261b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +26c2dee621 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7ade9b2fa6 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2cda27c0b9 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +dfba28e30e +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2bfa767245 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b493db9b4d +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +3e4f5a4760 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +72f500227e +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +74e6f5b10c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +35f769c69a +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e9dc70b220 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +46ad25119c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +29464b06c5 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7ff599b753 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a106095333 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d71ca4dfc9 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +73a4bcbeff +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +073601feeb +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +8d74b28518 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cdee165646 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b02a8f5a7c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1b11b58b56 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +5a0763564b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +28b7b2458c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f89122c1fb +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4ff782685c +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6f5b89acf4 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c61312a17b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1c1ff7293b +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c0d44d9d63 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +597a2ba41a +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +3b51cd328e +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +06941ca1a6 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +919b79b7ef +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6c2827d83f +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +35962284b3 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ec609b1cdd +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +18fccfc477 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +afe956699d +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f00483206d +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4a680f11ae +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +aa5897dcc5 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2e1a9a4ae0 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +dc7333f9d6 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e0efbd45ea +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +36b2ad31c8 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1e675c7835 +# MOZ_LIBWEBRTC_SRC=/home/bcampen/checkouts/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +218b56e516 diff --git a/third_party/libwebrtc/README.mozilla b/third_party/libwebrtc/README.mozilla index f15b1a1270d9..a9519682fdd1 100644 --- a/third_party/libwebrtc/README.mozilla +++ b/third_party/libwebrtc/README.mozilla @@ -12974,3 +12974,599 @@ libwebrtc updated from /home/pehrsons/dev/moz-libwebrtc commit mozpatches on 202 libwebrtc updated from /home/pehrsons/dev/moz-libwebrtc commit mozpatches on 2023-02-24T16:01:16.026055. # ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/moz-libwebrtc --commit mozpatches libwebrtc libwebrtc updated from /home/pehrsons/dev/moz-libwebrtc commit mozpatches on 2023-02-24T16:02:53.773364. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T14:56:24.853282. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T14:57:55.610838. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T14:59:54.580446. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T15:01:19.817890. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T15:02:51.558928. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T15:04:17.239362. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T15:06:12.302720. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T15:07:42.435398. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T15:09:42.166388. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T15:11:09.093234. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T15:12:41.561153. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T15:19:41.726150. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T16:02:11.191193. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T16:03:38.185364. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T16:05:33.993409. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T16:07:02.799505. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T16:08:29.063041. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T16:09:57.406210. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T16:24:40.344711. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:31:43.042428. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:39:18.806581. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:40:35.909852. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:41:57.970080. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:43:15.296353. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:44:38.020406. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:45:52.948129. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:47:08.093521. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:48:25.822803. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:49:45.926228. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:51:01.129170. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:52:25.370552. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:53:43.821716. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:55:06.054343. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:56:22.948745. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:57:45.005845. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T21:59:04.082755. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:00:26.188511. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:01:48.006942. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:03:11.804079. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:04:28.045422. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:05:48.114305. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:07:03.223049. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:08:20.183206. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:09:47.079846. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:11:07.180679. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:12:23.301234. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:13:39.169195. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:14:54.057023. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:16:11.207380. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:17:54.085171. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:19:10.979921. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:20:58.136962. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:22:14.177475. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:23:41.109589. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:24:56.022499. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:26:14.128117. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:37:07.819764. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:38:28.313689. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:44:25.729883. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:45:44.052203. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:47:05.280806. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:48:24.246811. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:49:40.286764. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:50:55.949067. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:52:10.952891. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:53:43.317671. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:55:07.896127. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:56:25.066300. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:58:09.215726. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T22:59:27.922303. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:00:53.011296. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:02:37.321562. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:03:57.005377. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:05:13.017433. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:06:33.116197. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:07:52.979118. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:09:31.072590. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:10:56.111290. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:12:41.064846. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:14:00.862413. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:15:16.903017. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:16:33.005205. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:17:50.127790. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:19:09.077551. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:20:23.928542. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:21:45.980483. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:23:28.976516. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:24:49.776664. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:26:09.334389. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:27:31.068594. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:28:52.118770. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:30:08.026390. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:31:25.089320. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:32:41.060129. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:34:10.350820. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:35:25.993253. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:36:47.138202. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:38:03.896437. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:39:24.281378. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:40:44.136491. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:42:10.145823. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:43:28.974956. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:44:53.792961. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:46:25.080462. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:47:42.897026. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:49:03.090767. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:50:23.825203. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:51:40.812801. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:53:24.052266. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:54:45.893780. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:56:01.420669. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:57:44.265683. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-03-31T23:59:00.325579. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:00:19.947439. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:01:41.097425. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:05:32.314977. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:11:29.757143. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:12:50.725985. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:14:12.197553. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:15:36.738269. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:16:58.389480. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:18:19.805204. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:19:36.084482. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:20:51.166419. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:22:10.097985. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:23:53.785580. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:25:12.020217. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:27:16.325816. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:33:13.875227. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:34:33.824774. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:35:50.216539. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:37:06.742271. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:38:52.098775. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:40:08.028519. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:41:24.173460. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:42:47.007602. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:44:03.952055. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:45:21.138136. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:46:45.795720. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:48:05.918268. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:49:25.981293. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:50:42.329814. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:51:57.868894. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:53:31.022231. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:54:53.090711. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:56:16.785977. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:57:36.130924. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T00:59:04.737751. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T02:48:45.094527. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T02:54:45.005425. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T02:56:03.135707. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T02:57:21.740768. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T02:58:37.057716. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T02:59:53.882969. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T03:01:09.935854. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T03:02:27.210586. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T03:04:19.141185. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T03:05:38.121698. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T03:06:53.163132. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T03:08:13.011389. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T03:09:32.883200. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T03:10:51.374107. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T03:12:17.130225. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T03:14:05.085749. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T03:15:22.923972. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T13:44:41.979890. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T13:46:01.894638. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T13:47:21.309767. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T13:48:46.765345. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T13:50:03.087367. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T13:51:18.339418. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T13:53:00.359018. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T13:54:47.104140. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T13:56:07.940285. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T13:57:55.920631. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T13:59:39.952603. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:00:59.882071. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:02:25.951373. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:03:47.850431. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:05:04.924068. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:06:22.259660. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:07:43.145672. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:08:57.153724. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:10:37.036140. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:11:54.791158. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:13:14.909421. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:52:02.170530. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:53:22.175941. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:54:41.092418. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:57:31.046264. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T14:58:52.012445. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T15:57:00.154252. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T16:03:29.911217. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T16:04:46.053098. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T16:06:33.159210. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T16:07:57.925099. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T16:09:14.293653. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T19:48:35.532490. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T19:49:54.353624. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T19:53:33.413647. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T19:54:49.953889. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T19:56:28.751294. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T19:57:51.302139. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T19:59:07.814902. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:00:22.176588. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:01:45.759460. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:03:11.035040. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:04:27.732425. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:05:45.025378. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:07:00.069376. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:08:23.145932. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:09:42.150845. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:11:02.161551. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:12:21.006028. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:13:40.037875. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:15:04.468673. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:16:24.735751. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:17:45.365270. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:19:08.753907. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:20:34.907947. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:21:54.298967. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:23:38.949729. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:25:03.951104. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:26:19.852519. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:27:44.077989. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:29:00.072252. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:30:15.145421. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:31:35.723505. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:32:54.082445. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:34:09.971277. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:35:29.106487. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:36:51.120151. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:38:13.933852. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:42:42.275783. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:48:55.783602. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:50:15.996034. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:51:32.485484. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:52:55.094289. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:54:10.173977. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:55:59.924349. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:57:21.072903. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:58:37.029278. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T20:59:57.076329. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T21:01:24.680822. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T21:02:49.131580. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T21:04:04.159464. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T21:05:24.180367. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T21:06:49.086215. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T21:08:09.081454. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:11:11.193562. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:17:41.930930. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:19:00.972868. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:20:22.312389. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:21:40.989432. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:23:30.820954. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:24:51.886231. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:26:11.924808. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:27:27.746140. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:28:48.987791. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:30:09.026975. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:31:39.298128. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:33:21.181811. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:34:41.044223. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:36:01.115705. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:37:18.733587. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:39:07.674289. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:40:24.764285. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:41:47.186058. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:43:02.175133. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:44:18.784974. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:45:44.711082. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:47:04.177459. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:48:23.100808. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:49:43.069379. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:50:58.192612. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:52:17.975042. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:53:36.018420. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:54:57.901655. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:56:17.262061. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T22:57:38.964124. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T23:44:29.778788. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T23:45:49.160097. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T23:47:10.903018. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T23:48:33.053975. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T23:49:48.281578. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T23:51:03.138887. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T23:52:22.022162. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T23:53:39.717374. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T23:55:02.920426. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-01T23:56:19.135472. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-02T00:30:23.301403. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-02T00:31:40.906338. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-02T00:33:28.917045. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-02T00:35:18.174938. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-02T00:36:36.955762. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-02T12:16:57.189418. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-02T12:18:24.774786. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/bcampen/checkouts/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/bcampen/checkouts/moz-libwebrtc commit mozpatches on 2023-04-02T12:19:44.825749. diff --git a/third_party/libwebrtc/api/BUILD.gn b/third_party/libwebrtc/api/BUILD.gn index eb0c8b07e89e..7c6e6144bc58 100644 --- a/third_party/libwebrtc/api/BUILD.gn +++ b/third_party/libwebrtc/api/BUILD.gn @@ -193,6 +193,52 @@ rtc_library("dtls_transport_interface") { absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } +rtc_library("dtmf_sender_interface") { + visibility = [ "*" ] + + sources = [ "dtmf_sender_interface.h" ] + deps = [ + ":media_stream_interface", + "../rtc_base:refcount", + ] +} + +rtc_library("rtp_sender_interface") { + visibility = [ "*" ] + + sources = [ + "rtp_sender_interface.cc", + "rtp_sender_interface.h", + ] + deps = [ + ":dtls_transport_interface", + ":dtmf_sender_interface", + ":frame_transformer_interface", + ":media_stream_interface", + ":rtp_parameters", + ":rtp_sender_setparameters_callback", + ":scoped_refptr", + "../rtc_base:checks", + "../rtc_base:refcount", + "../rtc_base/system:rtc_export", + "crypto:frame_encryptor_interface", + "video_codecs:video_codecs_api", + ] +} + +rtc_library("rtp_sender_setparameters_callback") { + visibility = [ "*" ] + + sources = [ + "rtp_sender_setparameters_callback.cc", + "rtp_sender_setparameters_callback.h", + ] + deps = [ + ":rtc_error", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ] +} + rtc_library("libjingle_peerconnection_api") { if (!build_with_mozilla) { visibility = [ "*" ] @@ -201,27 +247,34 @@ if (!build_with_mozilla) { "crypto_params.h", "data_channel_interface.cc", "data_channel_interface.h", - "dtmf_sender_interface.h", "jsep.cc", "jsep.h", "jsep_ice_candidate.cc", "jsep_ice_candidate.h", "jsep_session_description.h", + "legacy_stats_types.cc", + "legacy_stats_types.h", "peer_connection_interface.cc", "peer_connection_interface.h", "rtp_receiver_interface.cc", "rtp_receiver_interface.h", - "rtp_sender_interface.h", "rtp_transceiver_interface.cc", "rtp_transceiver_interface.h", "sctp_transport_interface.cc", "sctp_transport_interface.h", "set_local_description_observer_interface.h", "set_remote_description_observer_interface.h", - "stats_types.cc", - "stats_types.h", "uma_metrics.h", "video_track_source_proxy_factory.h", + + # Remove when downstream has been updated + "dtmf_sender_interface.h", + "rtp_sender_interface.h", + ] + public_deps = [ # no-presubmit-check TODO(webrtc:8603) + # Remove when downstream has been updated + ":dtmf_sender_interface", + ":rtp_sender_interface", ] deps = [ ":array_view", @@ -245,6 +298,7 @@ if (!build_with_mozilla) { ":rtc_stats_api", ":rtp_packet_info", ":rtp_parameters", + ":rtp_sender_interface", ":rtp_transceiver_direction", ":scoped_refptr", ":sequence_checker", @@ -295,6 +349,7 @@ if (!build_with_mozilla) { absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", @@ -536,7 +591,6 @@ if (!build_with_mozilla) { "../rtc_base:stringutils", "../rtc_base:threading", "../test:fileutils", - "../test/pc/e2e:video_dumping", "audio:audio_mixer_api", "rtc_event_log", "task_queue", @@ -951,22 +1005,50 @@ if (rtc_include_tests) { ] } - rtc_library("videocodec_test_fixture_api") { + rtc_library("videocodec_test_stats_api") { visibility = [ "*" ] testonly = true sources = [ - "test/videocodec_test_fixture.h", "test/videocodec_test_stats.cc", "test/videocodec_test_stats.h", ] deps = [ - "../modules/video_coding:video_codec_interface", + "../api/units:data_rate", + "../api/units:frequency", "../rtc_base:stringutils", "video:video_frame_type", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + + rtc_library("videocodec_test_fixture_api") { + visibility = [ "*" ] + testonly = true + sources = [ "test/videocodec_test_fixture.h" ] + deps = [ + ":videocodec_test_stats_api", + "../modules/video_coding:video_codec_interface", "video_codecs:video_codecs_api", ] } + rtc_library("video_codec_tester_api") { + visibility = [ "*" ] + testonly = true + sources = [ "test/video_codec_tester.h" ] + deps = [ + ":videocodec_test_stats_api", + "../modules/video_coding/svc:scalability_mode_util", + "video:encoded_image", + "video:resolution", + "video:video_frame", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/types:optional", + ] + } + rtc_library("create_videocodec_test_fixture_api") { visibility = [ "*" ] testonly = true @@ -982,6 +1064,19 @@ if (rtc_include_tests) { ] } + rtc_library("create_video_codec_tester_api") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/create_video_codec_tester.cc", + "test/create_video_codec_tester.h", + ] + deps = [ + ":video_codec_tester_api", + "../modules/video_coding:videocodec_test_impl", + ] + } + rtc_source_set("mock_audio_mixer") { visibility = [ "*" ] testonly = true @@ -1022,6 +1117,7 @@ if (rtc_include_tests) { sources = [ "test/mock_dtmf_sender.h" ] deps = [ + ":dtmf_sender_interface", ":libjingle_peerconnection_api", "../test:test_support", ] @@ -1183,6 +1279,8 @@ if (rtc_include_tests) { deps = [ ":libjingle_peerconnection_api", + ":rtp_sender_interface", + "../api/crypto:frame_decryptor_interface", "../test:test_support", ] } @@ -1330,6 +1428,7 @@ if (rtc_include_tests) { "../rtc_base/containers:flat_set", "../rtc_base/task_utils:repeating_task", "../system_wrappers:field_trial", + "../test:field_trial", "../test:fileutils", "../test:rtc_expect_death", "../test:test_support", @@ -1430,3 +1529,19 @@ rtc_library("field_trials") { ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } + +rtc_library("frame_transformer_factory") { + visibility = [ "*" ] + sources = [ + "frame_transformer_factory.cc", + "frame_transformer_factory.h", + ] + deps = [ + ":frame_transformer_interface", + ":scoped_refptr", + "../modules/rtp_rtcp", + "../rtc_base:refcount", + "video:encoded_frame", + "video:video_frame_metadata", + ] +} diff --git a/third_party/libwebrtc/api/DEPS b/third_party/libwebrtc/api/DEPS index 5f012040683d..bcfd70574182 100644 --- a/third_party/libwebrtc/api/DEPS +++ b/third_party/libwebrtc/api/DEPS @@ -182,7 +182,7 @@ specific_include_rules = { "+rtc_base/ref_count.h", ], - "stats_types\.h": [ + "legacy_stats_types\.h": [ "+rtc_base/ref_count.h", "+rtc_base/thread_checker.h", ], diff --git a/third_party/libwebrtc/api/audio_codecs/BUILD.gn b/third_party/libwebrtc/api/audio_codecs/BUILD.gn index b4b06fb32b1f..82ed31a5dac0 100644 --- a/third_party/libwebrtc/api/audio_codecs/BUILD.gn +++ b/third_party/libwebrtc/api/audio_codecs/BUILD.gn @@ -62,7 +62,6 @@ rtc_library("builtin_audio_decoder_factory") { "L16:audio_decoder_L16", "g711:audio_decoder_g711", "g722:audio_decoder_g722", - "isac:audio_decoder_isac", ] defines = [] if (rtc_include_ilbc) { @@ -95,7 +94,6 @@ rtc_library("builtin_audio_encoder_factory") { "L16:audio_encoder_L16", "g711:audio_encoder_g711", "g722:audio_encoder_g722", - "isac:audio_encoder_isac", ] defines = [] if (rtc_include_ilbc) { diff --git a/third_party/libwebrtc/api/audio_codecs/OWNERS b/third_party/libwebrtc/api/audio_codecs/OWNERS index 77e9d0022a6c..77b414abc30d 100644 --- a/third_party/libwebrtc/api/audio_codecs/OWNERS +++ b/third_party/libwebrtc/api/audio_codecs/OWNERS @@ -1,2 +1,3 @@ -minyue@webrtc.org +alessiob@webrtc.org henrik.lundin@webrtc.org +jakobi@webrtc.org diff --git a/third_party/libwebrtc/api/audio_codecs/builtin_audio_decoder_factory.cc b/third_party/libwebrtc/api/audio_codecs/builtin_audio_decoder_factory.cc index 963cfe5cb9bc..881113d985e9 100644 --- a/third_party/libwebrtc/api/audio_codecs/builtin_audio_decoder_factory.cc +++ b/third_party/libwebrtc/api/audio_codecs/builtin_audio_decoder_factory.cc @@ -20,7 +20,6 @@ #if WEBRTC_USE_BUILTIN_ILBC #include "api/audio_codecs/ilbc/audio_decoder_ilbc.h" // nogncheck #endif -#include "api/audio_codecs/isac/audio_decoder_isac.h" #if WEBRTC_USE_BUILTIN_OPUS #include "api/audio_codecs/opus/audio_decoder_multi_channel_opus.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" // nogncheck @@ -57,7 +56,7 @@ rtc::scoped_refptr CreateBuiltinAudioDecoderFactory() { AudioDecoderOpus, NotAdvertised, #endif - AudioDecoderIsac, AudioDecoderG722, + AudioDecoderG722, #if WEBRTC_USE_BUILTIN_ILBC AudioDecoderIlbc, diff --git a/third_party/libwebrtc/api/audio_codecs/builtin_audio_decoder_factory_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/builtin_audio_decoder_factory_gn/moz.build index d5e4bb813c5d..742c17dbd15b 100644 --- a/third_party/libwebrtc/api/audio_codecs/builtin_audio_decoder_factory_gn/moz.build +++ b/third_party/libwebrtc/api/audio_codecs/builtin_audio_decoder_factory_gn/moz.build @@ -69,8 +69,6 @@ if CONFIG["OS_TARGET"] == "Darwin": DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_MAC"] = True DEFINES["WEBRTC_POSIX"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0" DEFINES["__STDC_CONSTANT_MACROS"] = True @@ -103,8 +101,6 @@ if CONFIG["OS_TARGET"] == "OpenBSD": DEFINES["WEBRTC_BSD"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_POSIX"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" DEFINES["_FILE_OFFSET_BITS"] = "64" DEFINES["_LARGEFILE64_SOURCE"] = True DEFINES["_LARGEFILE_SOURCE"] = True @@ -120,8 +116,6 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["UNICODE"] = True DEFINES["USE_AURA"] = "1" DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" DEFINES["WEBRTC_WIN"] = True DEFINES["WIN32"] = True DEFINES["WIN32_LEAN_AND_MEAN"] = True @@ -161,17 +155,6 @@ if CONFIG["CPU_ARCH"] == "arm": DEFINES["WEBRTC_ARCH_ARM"] = True DEFINES["WEBRTC_ARCH_ARM_V7"] = True DEFINES["WEBRTC_HAS_NEON"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "1" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "0" - -if CONFIG["CPU_ARCH"] == "ppc64": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - - OS_LIBS += [ - "m" - ] if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": @@ -197,11 +180,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": DEFINES["USE_X11"] = "1" -if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android": OS_LIBS += [ @@ -215,29 +193,15 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": "-msse2" ] - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - OS_LIBS += [ "android_support" ] -if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" DEFINES["_GNU_SOURCE"] = True - OS_LIBS += [ - "m" - ] - if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True @@ -250,23 +214,11 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": ] DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" DEFINES["_GNU_SOURCE"] = True - OS_LIBS += [ - "m" - ] - if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" DEFINES["_GNU_SOURCE"] = True - OS_LIBS += [ - "m" - ] - Library("builtin_audio_decoder_factory_gn") diff --git a/third_party/libwebrtc/api/audio_codecs/builtin_audio_encoder_factory.cc b/third_party/libwebrtc/api/audio_codecs/builtin_audio_encoder_factory.cc index 530d64b2bacc..4546a2eaee37 100644 --- a/third_party/libwebrtc/api/audio_codecs/builtin_audio_encoder_factory.cc +++ b/third_party/libwebrtc/api/audio_codecs/builtin_audio_encoder_factory.cc @@ -20,7 +20,6 @@ #if WEBRTC_USE_BUILTIN_ILBC #include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" // nogncheck #endif -#include "api/audio_codecs/isac/audio_encoder_isac.h" #if WEBRTC_USE_BUILTIN_OPUS #include "api/audio_codecs/opus/audio_encoder_multi_channel_opus.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" // nogncheck @@ -63,7 +62,7 @@ rtc::scoped_refptr CreateBuiltinAudioEncoderFactory() { AudioEncoderOpus, NotAdvertised, #endif - AudioEncoderIsac, AudioEncoderG722, + AudioEncoderG722, #if WEBRTC_USE_BUILTIN_ILBC AudioEncoderIlbc, diff --git a/third_party/libwebrtc/api/audio_codecs/builtin_audio_encoder_factory_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/builtin_audio_encoder_factory_gn/moz.build index a2c0e3eb322b..dc8555f2f55e 100644 --- a/third_party/libwebrtc/api/audio_codecs/builtin_audio_encoder_factory_gn/moz.build +++ b/third_party/libwebrtc/api/audio_codecs/builtin_audio_encoder_factory_gn/moz.build @@ -69,8 +69,6 @@ if CONFIG["OS_TARGET"] == "Darwin": DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_MAC"] = True DEFINES["WEBRTC_POSIX"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0" DEFINES["__STDC_CONSTANT_MACROS"] = True @@ -103,8 +101,6 @@ if CONFIG["OS_TARGET"] == "OpenBSD": DEFINES["WEBRTC_BSD"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_POSIX"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" DEFINES["_FILE_OFFSET_BITS"] = "64" DEFINES["_LARGEFILE64_SOURCE"] = True DEFINES["_LARGEFILE_SOURCE"] = True @@ -120,8 +116,6 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["UNICODE"] = True DEFINES["USE_AURA"] = "1" DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" DEFINES["WEBRTC_WIN"] = True DEFINES["WIN32"] = True DEFINES["WIN32_LEAN_AND_MEAN"] = True @@ -161,17 +155,6 @@ if CONFIG["CPU_ARCH"] == "arm": DEFINES["WEBRTC_ARCH_ARM"] = True DEFINES["WEBRTC_ARCH_ARM_V7"] = True DEFINES["WEBRTC_HAS_NEON"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "1" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "0" - -if CONFIG["CPU_ARCH"] == "ppc64": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - - OS_LIBS += [ - "m" - ] if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": @@ -197,11 +180,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": DEFINES["USE_X11"] = "1" -if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android": OS_LIBS += [ @@ -215,29 +193,15 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": "-msse2" ] - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - OS_LIBS += [ "android_support" ] -if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" DEFINES["_GNU_SOURCE"] = True - OS_LIBS += [ - "m" - ] - if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True @@ -250,23 +214,11 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": ] DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" DEFINES["_GNU_SOURCE"] = True - OS_LIBS += [ - "m" - ] - if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" DEFINES["_GNU_SOURCE"] = True - OS_LIBS += [ - "m" - ] - Library("builtin_audio_encoder_factory_gn") diff --git a/third_party/libwebrtc/api/audio_codecs/isac/BUILD.gn b/third_party/libwebrtc/api/audio_codecs/isac/BUILD.gn deleted file mode 100644 index 95f38284389c..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/BUILD.gn +++ /dev/null @@ -1,135 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -import("../../../webrtc.gni") -if (is_android) { - import("//build/config/android/config.gni") - import("//build/config/android/rules.gni") -} - -# The targets with _fix and _float suffixes unconditionally use the -# fixed-point and floating-point iSAC implementations, respectively. -# The targets without suffixes pick one of the implementations based -# on cleverly chosen criteria. - -rtc_source_set("audio_encoder_isac") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - public = [ "audio_encoder_isac.h" ] - public_configs = [ ":isac_config" ] - if (target_cpu == "arm") { - deps = [ ":audio_encoder_isac_fix" ] - } else { - deps = [ ":audio_encoder_isac_float" ] - } -} - -rtc_source_set("audio_decoder_isac") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - public = [ "audio_decoder_isac.h" ] - public_configs = [ ":isac_config" ] - if (target_cpu == "arm") { - deps = [ ":audio_decoder_isac_fix" ] - } else { - deps = [ ":audio_decoder_isac_float" ] - } -} - -config("isac_config") { - visibility = [ ":*" ] - if (target_cpu == "arm") { - defines = [ - "WEBRTC_USE_BUILTIN_ISAC_FIX=1", - "WEBRTC_USE_BUILTIN_ISAC_FLOAT=0", - ] - } else { - defines = [ - "WEBRTC_USE_BUILTIN_ISAC_FIX=0", - "WEBRTC_USE_BUILTIN_ISAC_FLOAT=1", - ] - } -} - -rtc_library("audio_encoder_isac_fix") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "audio_encoder_isac_fix.cc", - "audio_encoder_isac_fix.h", - ] - deps = [ - "..:audio_codecs_api", - "../../../api:field_trials_view", - "../../../modules/audio_coding:isac_fix", - "../../../rtc_base:stringutils", - "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("audio_decoder_isac_fix") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "audio_decoder_isac_fix.cc", - "audio_decoder_isac_fix.h", - ] - deps = [ - "..:audio_codecs_api", - "../../../api:field_trials_view", - "../../../modules/audio_coding:isac_fix", - "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("audio_encoder_isac_float") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "audio_encoder_isac_float.cc", - "audio_encoder_isac_float.h", - ] - deps = [ - "..:audio_codecs_api", - "../../../api:field_trials_view", - "../../../modules/audio_coding:isac", - "../../../rtc_base:stringutils", - "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("audio_decoder_isac_float") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "audio_decoder_isac_float.cc", - "audio_decoder_isac_float.h", - ] - deps = [ - "..:audio_codecs_api", - "../../../api:field_trials_view", - "../../../modules/audio_coding:isac", - "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac.h b/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac.h deleted file mode 100644 index f4e9331282be..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_H_ -#define API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_H_ - -#if WEBRTC_USE_BUILTIN_ISAC_FIX && !WEBRTC_USE_BUILTIN_ISAC_FLOAT -#include "api/audio_codecs/isac/audio_decoder_isac_fix.h" // nogncheck -#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT && !WEBRTC_USE_BUILTIN_ISAC_FIX -#include "api/audio_codecs/isac/audio_decoder_isac_float.h" // nogncheck -#else -#error "Must choose either fix or float" -#endif - -namespace webrtc { - -#if WEBRTC_USE_BUILTIN_ISAC_FIX -using AudioDecoderIsac = AudioDecoderIsacFix; -#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT -using AudioDecoderIsac = AudioDecoderIsacFloat; -#endif - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_H_ diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_fix.cc b/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_fix.cc deleted file mode 100644 index b3ab91da4717..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_fix.cc +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/isac/audio_decoder_isac_fix.h" - -#include - -#include "absl/strings/match.h" -#include "modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h" - -namespace webrtc { - -absl::optional AudioDecoderIsacFix::SdpToConfig( - const SdpAudioFormat& format) { - if (absl::EqualsIgnoreCase(format.name, "ISAC") && - format.clockrate_hz == 16000 && format.num_channels == 1) { - return Config(); - } - return absl::nullopt; -} - -void AudioDecoderIsacFix::AppendSupportedDecoders( - std::vector* specs) { - specs->push_back({{"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}}); -} - -std::unique_ptr AudioDecoderIsacFix::MakeAudioDecoder( - Config config, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { - AudioDecoderIsacFixImpl::Config c; - c.sample_rate_hz = 16000; - return std::make_unique(c); -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_fix.h b/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_fix.h deleted file mode 100644 index 8f61d9ab0ee2..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_fix.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FIX_H_ -#define API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FIX_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_codec_pair_id.h" -#include "api/audio_codecs/audio_decoder.h" -#include "api/audio_codecs/audio_format.h" -#include "api/field_trials_view.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// iSAC decoder API (fixed-point implementation) for use as a template -// parameter to CreateAudioDecoderFactory<...>(). -struct RTC_EXPORT AudioDecoderIsacFix { - struct Config {}; // Empty---no config values needed! - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); - static void AppendSupportedDecoders(std::vector* specs); - static std::unique_ptr MakeAudioDecoder( - Config config, - absl::optional codec_pair_id = absl::nullopt, - const FieldTrialsView* field_trials = nullptr); -}; - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FIX_H_ diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_fix_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_fix_gn/moz.build deleted file mode 100644 index 7b4fc60b35e7..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_fix_gn/moz.build +++ /dev/null @@ -1,94 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - - ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### - ### DO NOT edit it by hand. ### - -COMPILE_FLAGS["OS_INCLUDES"] = [] -AllowCompilerWarnings() - -CXXFLAGS += [ - "-mfpu=neon" -] - -DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" -DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True -DEFINES["RTC_ENABLE_VP9"] = True -DEFINES["WEBRTC_ARCH_ARM"] = True -DEFINES["WEBRTC_ARCH_ARM_V7"] = True -DEFINES["WEBRTC_ENABLE_AVX2"] = True -DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" -DEFINES["WEBRTC_HAS_NEON"] = True -DEFINES["WEBRTC_LIBRARY_IMPL"] = True -DEFINES["WEBRTC_LINUX"] = True -DEFINES["WEBRTC_MOZILLA_BUILD"] = True -DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" -DEFINES["WEBRTC_POSIX"] = True -DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" -DEFINES["_GNU_SOURCE"] = True -DEFINES["__STDC_CONSTANT_MACROS"] = True -DEFINES["__STDC_FORMAT_MACROS"] = True - -FINAL_LIBRARY = "webrtc" - - -LOCAL_INCLUDES += [ - "!/ipc/ipdl/_ipdlheaders", - "!/third_party/libwebrtc/gen", - "/ipc/chromium/src", - "/third_party/libwebrtc/", - "/third_party/libwebrtc/third_party/abseil-cpp/", - "/tools/profiler/public" -] - -UNIFIED_SOURCES += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_fix.cc" -] - -if not CONFIG["MOZ_DEBUG"]: - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" - DEFINES["NDEBUG"] = True - DEFINES["NVALGRIND"] = True - -if CONFIG["MOZ_DEBUG"] == "1": - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" - DEFINES["_DEBUG"] = True - -if CONFIG["OS_TARGET"] == "Android": - - DEFINES["ANDROID"] = True - DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" - DEFINES["HAVE_SYS_UIO_H"] = True - DEFINES["WEBRTC_ANDROID"] = True - DEFINES["WEBRTC_ANDROID_OPENSLES"] = True - - OS_LIBS += [ - "android_support", - "log", - "unwind" - ] - -if CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_AURA"] = "1" - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_NSS_CERTS"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_UDEV"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - - OS_LIBS += [ - "rt" - ] - -if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_X11"] = "1" - -Library("audio_decoder_isac_fix_gn") diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float.cc b/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float.cc deleted file mode 100644 index 98f672b46858..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float.cc +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/isac/audio_decoder_isac_float.h" - -#include - -#include "absl/strings/match.h" -#include "modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h" - -namespace webrtc { - -absl::optional -AudioDecoderIsacFloat::SdpToConfig(const SdpAudioFormat& format) { - if (absl::EqualsIgnoreCase(format.name, "ISAC") && - (format.clockrate_hz == 16000 || format.clockrate_hz == 32000) && - format.num_channels == 1) { - Config config; - config.sample_rate_hz = format.clockrate_hz; - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return absl::nullopt; - } - return config; - } else { - return absl::nullopt; - } -} - -void AudioDecoderIsacFloat::AppendSupportedDecoders( - std::vector* specs) { - specs->push_back({{"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}}); - specs->push_back({{"ISAC", 32000, 1}, {32000, 1, 56000, 10000, 56000}}); -} - -std::unique_ptr AudioDecoderIsacFloat::MakeAudioDecoder( - Config config, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { - AudioDecoderIsacFloatImpl::Config c; - c.sample_rate_hz = config.sample_rate_hz; - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return nullptr; - } - return std::make_unique(c); -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float.h b/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float.h deleted file mode 100644 index 864c6b999f68..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FLOAT_H_ -#define API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FLOAT_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_codec_pair_id.h" -#include "api/audio_codecs/audio_decoder.h" -#include "api/audio_codecs/audio_format.h" -#include "api/field_trials_view.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// iSAC decoder API (floating-point implementation) for use as a template -// parameter to CreateAudioDecoderFactory<...>(). -struct RTC_EXPORT AudioDecoderIsacFloat { - struct Config { - bool IsOk() const { - return sample_rate_hz == 16000 || sample_rate_hz == 32000; - } - int sample_rate_hz = 16000; - }; - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); - static void AppendSupportedDecoders(std::vector* specs); - static std::unique_ptr MakeAudioDecoder( - Config config, - absl::optional codec_pair_id = absl::nullopt, - const FieldTrialsView* field_trials = nullptr); -}; - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FLOAT_H_ diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_gn/moz.build deleted file mode 100644 index e5ec655bdb46..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_gn/moz.build +++ /dev/null @@ -1,254 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - - ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### - ### DO NOT edit it by hand. ### - -COMPILE_FLAGS["OS_INCLUDES"] = [] -AllowCompilerWarnings() - -DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" -DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True -DEFINES["RTC_ENABLE_VP9"] = True -DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" -DEFINES["WEBRTC_LIBRARY_IMPL"] = True -DEFINES["WEBRTC_MOZILLA_BUILD"] = True -DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" -DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" - -FINAL_LIBRARY = "webrtc" - - -LOCAL_INCLUDES += [ - "!/ipc/ipdl/_ipdlheaders", - "!/third_party/libwebrtc/gen", - "/ipc/chromium/src", - "/third_party/libwebrtc/", - "/third_party/libwebrtc/third_party/abseil-cpp/", - "/tools/profiler/public" -] - -if not CONFIG["MOZ_DEBUG"]: - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" - DEFINES["NDEBUG"] = True - DEFINES["NVALGRIND"] = True - -if CONFIG["MOZ_DEBUG"] == "1": - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" - -if CONFIG["OS_TARGET"] == "Android": - - DEFINES["ANDROID"] = True - DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" - DEFINES["HAVE_SYS_UIO_H"] = True - DEFINES["WEBRTC_ANDROID"] = True - DEFINES["WEBRTC_ANDROID_OPENSLES"] = True - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_LINUX"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_GNU_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - - OS_LIBS += [ - "log" - ] - -if CONFIG["OS_TARGET"] == "Darwin": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_MAC"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True - DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0" - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - -if CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_AURA"] = "1" - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_NSS_CERTS"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_UDEV"] = True - DEFINES["WEBRTC_LINUX"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - - OS_LIBS += [ - "rt" - ] - -if CONFIG["OS_TARGET"] == "OpenBSD": - - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_X11"] = "1" - DEFINES["WEBRTC_BSD"] = True - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - -if CONFIG["OS_TARGET"] == "WINNT": - - DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True - DEFINES["NOMINMAX"] = True - DEFINES["NTDDI_VERSION"] = "0x0A000000" - DEFINES["PSAPI_VERSION"] = "2" - DEFINES["UNICODE"] = True - DEFINES["USE_AURA"] = "1" - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - DEFINES["WEBRTC_WIN"] = True - DEFINES["WIN32"] = True - DEFINES["WIN32_LEAN_AND_MEAN"] = True - DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP" - DEFINES["WINVER"] = "0x0A00" - DEFINES["_ATL_NO_OPENGL"] = True - DEFINES["_CRT_RAND_S"] = True - DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True - DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True - DEFINES["_HAS_EXCEPTIONS"] = "0" - DEFINES["_HAS_NODISCARD"] = True - DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True - DEFINES["_SECURE_ATL"] = True - DEFINES["_UNICODE"] = True - DEFINES["_WIN32_WINNT"] = "0x0A00" - DEFINES["_WINDOWS"] = True - DEFINES["__STD_C"] = True - - OS_LIBS += [ - "crypt32", - "iphlpapi", - "secur32", - "winmm" - ] - -if CONFIG["CPU_ARCH"] == "aarch64": - - DEFINES["WEBRTC_ARCH_ARM64"] = True - DEFINES["WEBRTC_HAS_NEON"] = True - -if CONFIG["CPU_ARCH"] == "arm": - - DEFINES["WEBRTC_ARCH_ARM"] = True - DEFINES["WEBRTC_ARCH_ARM_V7"] = True - DEFINES["WEBRTC_HAS_NEON"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "1" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "0" - -if CONFIG["CPU_ARCH"] == "ppc64": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - - OS_LIBS += [ - "m" - ] - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT": - - DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0" - -if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_X11"] = "1" - -if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - -if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android": - - OS_LIBS += [ - "android_support", - "unwind" - ] - -if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - - OS_LIBS += [ - "android_support" - ] - -if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - -if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - DEFINES["_GNU_SOURCE"] = True - - OS_LIBS += [ - "m" - ] - -if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["_GNU_SOURCE"] = True - -if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - DEFINES["_GNU_SOURCE"] = True - - OS_LIBS += [ - "m" - ] - -if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - DEFINES["_GNU_SOURCE"] = True - - OS_LIBS += [ - "m" - ] - -Library("audio_decoder_isac_gn") diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac.h b/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac.h deleted file mode 100644 index 3cb0a1f053d5..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_H_ -#define API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_H_ - -#if WEBRTC_USE_BUILTIN_ISAC_FIX && !WEBRTC_USE_BUILTIN_ISAC_FLOAT -#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" // nogncheck -#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT && !WEBRTC_USE_BUILTIN_ISAC_FIX -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" // nogncheck -#else -#error "Must choose either fix or float" -#endif - -namespace webrtc { - -#if WEBRTC_USE_BUILTIN_ISAC_FIX -using AudioEncoderIsac = AudioEncoderIsacFix; -#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT -using AudioEncoderIsac = AudioEncoderIsacFloat; -#endif - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_H_ diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_fix.cc b/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_fix.cc deleted file mode 100644 index 39603775a490..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_fix.cc +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" - -#include - -#include "absl/strings/match.h" -#include "modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h" -#include "rtc_base/string_to_number.h" - -namespace webrtc { - -absl::optional AudioEncoderIsacFix::SdpToConfig( - const SdpAudioFormat& format) { - if (absl::EqualsIgnoreCase(format.name, "ISAC") && - format.clockrate_hz == 16000 && format.num_channels == 1) { - Config config; - const auto ptime_iter = format.parameters.find("ptime"); - if (ptime_iter != format.parameters.end()) { - const auto ptime = rtc::StringToNumber(ptime_iter->second); - if (ptime && *ptime >= 60) { - config.frame_size_ms = 60; - } - } - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return absl::nullopt; - } - return config; - } else { - return absl::nullopt; - } -} - -void AudioEncoderIsacFix::AppendSupportedEncoders( - std::vector* specs) { - const SdpAudioFormat fmt = {"ISAC", 16000, 1}; - const AudioCodecInfo info = QueryAudioEncoder(*SdpToConfig(fmt)); - specs->push_back({fmt, info}); -} - -AudioCodecInfo AudioEncoderIsacFix::QueryAudioEncoder( - AudioEncoderIsacFix::Config config) { - RTC_DCHECK(config.IsOk()); - return {16000, 1, 32000, 10000, 32000}; -} - -std::unique_ptr AudioEncoderIsacFix::MakeAudioEncoder( - AudioEncoderIsacFix::Config config, - int payload_type, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { - AudioEncoderIsacFixImpl::Config c; - c.frame_size_ms = config.frame_size_ms; - c.bit_rate = config.bit_rate; - c.payload_type = payload_type; - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return nullptr; - } - return std::make_unique(c); -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_fix.h b/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_fix.h deleted file mode 100644 index de0f1d130876..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_fix.h +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FIX_H_ -#define API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FIX_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_codec_pair_id.h" -#include "api/audio_codecs/audio_encoder.h" -#include "api/audio_codecs/audio_format.h" -#include "api/field_trials_view.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// iSAC encoder API (fixed-point implementation) for use as a template -// parameter to CreateAudioEncoderFactory<...>(). -struct RTC_EXPORT AudioEncoderIsacFix { - struct Config { - bool IsOk() const { - if (frame_size_ms != 30 && frame_size_ms != 60) { - return false; - } - if (bit_rate < 10000 || bit_rate > 32000) { - return false; - } - return true; - } - int frame_size_ms = 30; - int bit_rate = 32000; // Limit on short-term average bit rate, in bits/s. - }; - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); - static void AppendSupportedEncoders(std::vector* specs); - static AudioCodecInfo QueryAudioEncoder(Config config); - static std::unique_ptr MakeAudioEncoder( - Config config, - int payload_type, - absl::optional codec_pair_id = absl::nullopt, - const FieldTrialsView* field_trials = nullptr); -}; - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FIX_H_ diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_fix_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_fix_gn/moz.build deleted file mode 100644 index 4c917d45692c..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_fix_gn/moz.build +++ /dev/null @@ -1,94 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - - ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### - ### DO NOT edit it by hand. ### - -COMPILE_FLAGS["OS_INCLUDES"] = [] -AllowCompilerWarnings() - -CXXFLAGS += [ - "-mfpu=neon" -] - -DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" -DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True -DEFINES["RTC_ENABLE_VP9"] = True -DEFINES["WEBRTC_ARCH_ARM"] = True -DEFINES["WEBRTC_ARCH_ARM_V7"] = True -DEFINES["WEBRTC_ENABLE_AVX2"] = True -DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" -DEFINES["WEBRTC_HAS_NEON"] = True -DEFINES["WEBRTC_LIBRARY_IMPL"] = True -DEFINES["WEBRTC_LINUX"] = True -DEFINES["WEBRTC_MOZILLA_BUILD"] = True -DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" -DEFINES["WEBRTC_POSIX"] = True -DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" -DEFINES["_GNU_SOURCE"] = True -DEFINES["__STDC_CONSTANT_MACROS"] = True -DEFINES["__STDC_FORMAT_MACROS"] = True - -FINAL_LIBRARY = "webrtc" - - -LOCAL_INCLUDES += [ - "!/ipc/ipdl/_ipdlheaders", - "!/third_party/libwebrtc/gen", - "/ipc/chromium/src", - "/third_party/libwebrtc/", - "/third_party/libwebrtc/third_party/abseil-cpp/", - "/tools/profiler/public" -] - -UNIFIED_SOURCES += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_fix.cc" -] - -if not CONFIG["MOZ_DEBUG"]: - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" - DEFINES["NDEBUG"] = True - DEFINES["NVALGRIND"] = True - -if CONFIG["MOZ_DEBUG"] == "1": - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" - DEFINES["_DEBUG"] = True - -if CONFIG["OS_TARGET"] == "Android": - - DEFINES["ANDROID"] = True - DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" - DEFINES["HAVE_SYS_UIO_H"] = True - DEFINES["WEBRTC_ANDROID"] = True - DEFINES["WEBRTC_ANDROID_OPENSLES"] = True - - OS_LIBS += [ - "android_support", - "log", - "unwind" - ] - -if CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_AURA"] = "1" - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_NSS_CERTS"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_UDEV"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - - OS_LIBS += [ - "rt" - ] - -if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_X11"] = "1" - -Library("audio_encoder_isac_fix_gn") diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float.cc b/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float.cc deleted file mode 100644 index e3e50080fa9d..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float.cc +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" - -#include - -#include "absl/strings/match.h" -#include "modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h" -#include "rtc_base/string_to_number.h" - -namespace webrtc { - -absl::optional -AudioEncoderIsacFloat::SdpToConfig(const SdpAudioFormat& format) { - if (absl::EqualsIgnoreCase(format.name, "ISAC") && - (format.clockrate_hz == 16000 || format.clockrate_hz == 32000) && - format.num_channels == 1) { - Config config; - config.sample_rate_hz = format.clockrate_hz; - config.bit_rate = format.clockrate_hz == 16000 ? 32000 : 56000; - if (config.sample_rate_hz == 16000) { - // For sample rate 16 kHz, optionally use 60 ms frames, instead of the - // default 30 ms. - const auto ptime_iter = format.parameters.find("ptime"); - if (ptime_iter != format.parameters.end()) { - const auto ptime = rtc::StringToNumber(ptime_iter->second); - if (ptime && *ptime >= 60) { - config.frame_size_ms = 60; - } - } - } - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return absl::nullopt; - } - return config; - } else { - return absl::nullopt; - } -} - -void AudioEncoderIsacFloat::AppendSupportedEncoders( - std::vector* specs) { - for (int sample_rate_hz : {16000, 32000}) { - const SdpAudioFormat fmt = {"ISAC", sample_rate_hz, 1}; - const AudioCodecInfo info = QueryAudioEncoder(*SdpToConfig(fmt)); - specs->push_back({fmt, info}); - } -} - -AudioCodecInfo AudioEncoderIsacFloat::QueryAudioEncoder( - const AudioEncoderIsacFloat::Config& config) { - RTC_DCHECK(config.IsOk()); - constexpr int min_bitrate = 10000; - const int max_bitrate = config.sample_rate_hz == 16000 ? 32000 : 56000; - const int default_bitrate = max_bitrate; - return {config.sample_rate_hz, 1, default_bitrate, min_bitrate, max_bitrate}; -} - -std::unique_ptr AudioEncoderIsacFloat::MakeAudioEncoder( - const AudioEncoderIsacFloat::Config& config, - int payload_type, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { - AudioEncoderIsacFloatImpl::Config c; - c.payload_type = payload_type; - c.sample_rate_hz = config.sample_rate_hz; - c.frame_size_ms = config.frame_size_ms; - c.bit_rate = config.bit_rate; - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return nullptr; - } - return std::make_unique(c); -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float.h b/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float.h deleted file mode 100644 index d031d76db13d..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float.h +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FLOAT_H_ -#define API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FLOAT_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_codec_pair_id.h" -#include "api/audio_codecs/audio_encoder.h" -#include "api/audio_codecs/audio_format.h" -#include "api/field_trials_view.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// iSAC encoder API (floating-point implementation) for use as a template -// parameter to CreateAudioEncoderFactory<...>(). -struct RTC_EXPORT AudioEncoderIsacFloat { - struct Config { - bool IsOk() const { - switch (sample_rate_hz) { - case 16000: - if (frame_size_ms != 30 && frame_size_ms != 60) { - return false; - } - if (bit_rate < 10000 || bit_rate > 32000) { - return false; - } - return true; - case 32000: - if (frame_size_ms != 30) { - return false; - } - if (bit_rate < 10000 || bit_rate > 56000) { - return false; - } - return true; - default: - return false; - } - } - int sample_rate_hz = 16000; - int frame_size_ms = 30; - int bit_rate = 32000; // Limit on short-term average bit rate, in bits/s. - }; - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); - static void AppendSupportedEncoders(std::vector* specs); - static AudioCodecInfo QueryAudioEncoder(const Config& config); - static std::unique_ptr MakeAudioEncoder( - const Config& config, - int payload_type, - absl::optional codec_pair_id = absl::nullopt, - const FieldTrialsView* field_trials = nullptr); -}; - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FLOAT_H_ diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float_gn/moz.build deleted file mode 100644 index 4084e894dba6..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float_gn/moz.build +++ /dev/null @@ -1,201 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - - ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### - ### DO NOT edit it by hand. ### - -COMPILE_FLAGS["OS_INCLUDES"] = [] -AllowCompilerWarnings() - -DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" -DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True -DEFINES["RTC_ENABLE_VP9"] = True -DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" -DEFINES["WEBRTC_LIBRARY_IMPL"] = True -DEFINES["WEBRTC_MOZILLA_BUILD"] = True -DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" -DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" - -FINAL_LIBRARY = "webrtc" - - -LOCAL_INCLUDES += [ - "!/ipc/ipdl/_ipdlheaders", - "!/third_party/libwebrtc/gen", - "/ipc/chromium/src", - "/third_party/libwebrtc/", - "/third_party/libwebrtc/third_party/abseil-cpp/", - "/tools/profiler/public" -] - -UNIFIED_SOURCES += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float.cc" -] - -if not CONFIG["MOZ_DEBUG"]: - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" - DEFINES["NDEBUG"] = True - DEFINES["NVALGRIND"] = True - -if CONFIG["MOZ_DEBUG"] == "1": - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" - -if CONFIG["OS_TARGET"] == "Android": - - DEFINES["ANDROID"] = True - DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" - DEFINES["HAVE_SYS_UIO_H"] = True - DEFINES["WEBRTC_ANDROID"] = True - DEFINES["WEBRTC_ANDROID_OPENSLES"] = True - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_LINUX"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_GNU_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - - OS_LIBS += [ - "log" - ] - -if CONFIG["OS_TARGET"] == "Darwin": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_MAC"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True - DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0" - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - -if CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_AURA"] = "1" - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_NSS_CERTS"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_UDEV"] = True - DEFINES["WEBRTC_LINUX"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - - OS_LIBS += [ - "m", - "rt" - ] - -if CONFIG["OS_TARGET"] == "OpenBSD": - - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_X11"] = "1" - DEFINES["WEBRTC_BSD"] = True - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - -if CONFIG["OS_TARGET"] == "WINNT": - - DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True - DEFINES["NOMINMAX"] = True - DEFINES["NTDDI_VERSION"] = "0x0A000000" - DEFINES["PSAPI_VERSION"] = "2" - DEFINES["UNICODE"] = True - DEFINES["USE_AURA"] = "1" - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_WIN"] = True - DEFINES["WIN32"] = True - DEFINES["WIN32_LEAN_AND_MEAN"] = True - DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP" - DEFINES["WINVER"] = "0x0A00" - DEFINES["_ATL_NO_OPENGL"] = True - DEFINES["_CRT_RAND_S"] = True - DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True - DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True - DEFINES["_HAS_EXCEPTIONS"] = "0" - DEFINES["_HAS_NODISCARD"] = True - DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True - DEFINES["_SECURE_ATL"] = True - DEFINES["_UNICODE"] = True - DEFINES["_WIN32_WINNT"] = "0x0A00" - DEFINES["_WINDOWS"] = True - DEFINES["__STD_C"] = True - - OS_LIBS += [ - "crypt32", - "iphlpapi", - "secur32", - "winmm" - ] - -if CONFIG["CPU_ARCH"] == "aarch64": - - DEFINES["WEBRTC_ARCH_ARM64"] = True - DEFINES["WEBRTC_HAS_NEON"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT": - - DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0" - -if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_X11"] = "1" - -if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": - - CXXFLAGS += [ - "-msse2" - ] - - OS_LIBS += [ - "android_support" - ] - -if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["_GNU_SOURCE"] = True - -if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": - - CXXFLAGS += [ - "-msse2" - ] - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["_GNU_SOURCE"] = True - -if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["_GNU_SOURCE"] = True - -Library("audio_encoder_isac_float_gn") diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_gn/moz.build deleted file mode 100644 index dc7555b1c92f..000000000000 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_gn/moz.build +++ /dev/null @@ -1,254 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - - ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### - ### DO NOT edit it by hand. ### - -COMPILE_FLAGS["OS_INCLUDES"] = [] -AllowCompilerWarnings() - -DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" -DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True -DEFINES["RTC_ENABLE_VP9"] = True -DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" -DEFINES["WEBRTC_LIBRARY_IMPL"] = True -DEFINES["WEBRTC_MOZILLA_BUILD"] = True -DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" -DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" - -FINAL_LIBRARY = "webrtc" - - -LOCAL_INCLUDES += [ - "!/ipc/ipdl/_ipdlheaders", - "!/third_party/libwebrtc/gen", - "/ipc/chromium/src", - "/third_party/libwebrtc/", - "/third_party/libwebrtc/third_party/abseil-cpp/", - "/tools/profiler/public" -] - -if not CONFIG["MOZ_DEBUG"]: - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" - DEFINES["NDEBUG"] = True - DEFINES["NVALGRIND"] = True - -if CONFIG["MOZ_DEBUG"] == "1": - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" - -if CONFIG["OS_TARGET"] == "Android": - - DEFINES["ANDROID"] = True - DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" - DEFINES["HAVE_SYS_UIO_H"] = True - DEFINES["WEBRTC_ANDROID"] = True - DEFINES["WEBRTC_ANDROID_OPENSLES"] = True - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_LINUX"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_GNU_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - - OS_LIBS += [ - "log" - ] - -if CONFIG["OS_TARGET"] == "Darwin": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_MAC"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True - DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0" - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - -if CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_AURA"] = "1" - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_NSS_CERTS"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_UDEV"] = True - DEFINES["WEBRTC_LINUX"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - - OS_LIBS += [ - "rt" - ] - -if CONFIG["OS_TARGET"] == "OpenBSD": - - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_X11"] = "1" - DEFINES["WEBRTC_BSD"] = True - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - -if CONFIG["OS_TARGET"] == "WINNT": - - DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True - DEFINES["NOMINMAX"] = True - DEFINES["NTDDI_VERSION"] = "0x0A000000" - DEFINES["PSAPI_VERSION"] = "2" - DEFINES["UNICODE"] = True - DEFINES["USE_AURA"] = "1" - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - DEFINES["WEBRTC_WIN"] = True - DEFINES["WIN32"] = True - DEFINES["WIN32_LEAN_AND_MEAN"] = True - DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP" - DEFINES["WINVER"] = "0x0A00" - DEFINES["_ATL_NO_OPENGL"] = True - DEFINES["_CRT_RAND_S"] = True - DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True - DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True - DEFINES["_HAS_EXCEPTIONS"] = "0" - DEFINES["_HAS_NODISCARD"] = True - DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True - DEFINES["_SECURE_ATL"] = True - DEFINES["_UNICODE"] = True - DEFINES["_WIN32_WINNT"] = "0x0A00" - DEFINES["_WINDOWS"] = True - DEFINES["__STD_C"] = True - - OS_LIBS += [ - "crypt32", - "iphlpapi", - "secur32", - "winmm" - ] - -if CONFIG["CPU_ARCH"] == "aarch64": - - DEFINES["WEBRTC_ARCH_ARM64"] = True - DEFINES["WEBRTC_HAS_NEON"] = True - -if CONFIG["CPU_ARCH"] == "arm": - - DEFINES["WEBRTC_ARCH_ARM"] = True - DEFINES["WEBRTC_ARCH_ARM_V7"] = True - DEFINES["WEBRTC_HAS_NEON"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "1" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "0" - -if CONFIG["CPU_ARCH"] == "ppc64": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - - OS_LIBS += [ - "m" - ] - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT": - - DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0" - -if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_X11"] = "1" - -if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - -if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android": - - OS_LIBS += [ - "android_support", - "unwind" - ] - -if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - - OS_LIBS += [ - "android_support" - ] - -if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - -if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - DEFINES["_GNU_SOURCE"] = True - - OS_LIBS += [ - "m" - ] - -if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["_GNU_SOURCE"] = True - -if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - DEFINES["_GNU_SOURCE"] = True - - OS_LIBS += [ - "m" - ] - -if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FIX"] = "0" - DEFINES["WEBRTC_USE_BUILTIN_ISAC_FLOAT"] = "1" - DEFINES["_GNU_SOURCE"] = True - - OS_LIBS += [ - "m" - ] - -Library("audio_encoder_isac_gn") diff --git a/third_party/libwebrtc/api/audio_codecs/test/BUILD.gn b/third_party/libwebrtc/api/audio_codecs/test/BUILD.gn index 12df649febd7..89f5fef1eafe 100644 --- a/third_party/libwebrtc/api/audio_codecs/test/BUILD.gn +++ b/third_party/libwebrtc/api/audio_codecs/test/BUILD.gn @@ -32,10 +32,6 @@ if (rtc_include_tests) { "../g722:audio_encoder_g722", "../ilbc:audio_decoder_ilbc", "../ilbc:audio_encoder_ilbc", - "../isac:audio_decoder_isac_fix", - "../isac:audio_decoder_isac_float", - "../isac:audio_encoder_isac_fix", - "../isac:audio_encoder_isac_float", "../opus:audio_decoder_opus", "../opus:audio_encoder_opus", ] diff --git a/third_party/libwebrtc/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc b/third_party/libwebrtc/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc index 3662f3b76dde..0b18cf934a99 100644 --- a/third_party/libwebrtc/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc +++ b/third_party/libwebrtc/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc @@ -16,8 +16,6 @@ #include "api/audio_codecs/g711/audio_decoder_g711.h" #include "api/audio_codecs/g722/audio_decoder_g722.h" #include "api/audio_codecs/ilbc/audio_decoder_ilbc.h" -#include "api/audio_codecs/isac/audio_decoder_isac_fix.h" -#include "api/audio_codecs/isac/audio_decoder_isac_float.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" #include "test/gmock.h" #include "test/gtest.h" @@ -182,41 +180,6 @@ TEST(AudioDecoderFactoryTemplateTest, Ilbc) { EXPECT_EQ(8000, dec->SampleRateHz()); } -TEST(AudioDecoderFactoryTemplateTest, IsacFix) { - auto factory = CreateAudioDecoderFactory(); - EXPECT_THAT(factory->GetSupportedDecoders(), - ::testing::ElementsAre(AudioCodecSpec{ - {"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}})); - EXPECT_FALSE(factory->IsSupportedDecoder({"isac", 16000, 2})); - EXPECT_TRUE(factory->IsSupportedDecoder({"isac", 16000, 1})); - EXPECT_FALSE(factory->IsSupportedDecoder({"isac", 32000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioDecoder({"isac", 8000, 1}, absl::nullopt)); - auto dec = factory->MakeAudioDecoder({"isac", 16000, 1}, absl::nullopt); - ASSERT_NE(nullptr, dec); - EXPECT_EQ(16000, dec->SampleRateHz()); -} - -TEST(AudioDecoderFactoryTemplateTest, IsacFloat) { - auto factory = CreateAudioDecoderFactory(); - EXPECT_THAT( - factory->GetSupportedDecoders(), - ::testing::ElementsAre( - AudioCodecSpec{{"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}}, - AudioCodecSpec{{"ISAC", 32000, 1}, {32000, 1, 56000, 10000, 56000}})); - EXPECT_FALSE(factory->IsSupportedDecoder({"isac", 16000, 2})); - EXPECT_TRUE(factory->IsSupportedDecoder({"isac", 16000, 1})); - EXPECT_TRUE(factory->IsSupportedDecoder({"isac", 32000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioDecoder({"isac", 8000, 1}, absl::nullopt)); - auto dec1 = factory->MakeAudioDecoder({"isac", 16000, 1}, absl::nullopt); - ASSERT_NE(nullptr, dec1); - EXPECT_EQ(16000, dec1->SampleRateHz()); - auto dec2 = factory->MakeAudioDecoder({"isac", 32000, 1}, absl::nullopt); - ASSERT_NE(nullptr, dec2); - EXPECT_EQ(32000, dec2->SampleRateHz()); -} - TEST(AudioDecoderFactoryTemplateTest, L16) { auto factory = CreateAudioDecoderFactory(); EXPECT_THAT( diff --git a/third_party/libwebrtc/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc b/third_party/libwebrtc/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc index 67b688358337..dbba38772448 100644 --- a/third_party/libwebrtc/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc +++ b/third_party/libwebrtc/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc @@ -16,8 +16,6 @@ #include "api/audio_codecs/g711/audio_encoder_g711.h" #include "api/audio_codecs/g722/audio_encoder_g722.h" #include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" -#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" #include "test/gmock.h" #include "test/gtest.h" @@ -180,49 +178,6 @@ TEST(AudioEncoderFactoryTemplateTest, Ilbc) { EXPECT_EQ(8000, enc->SampleRateHz()); } -TEST(AudioEncoderFactoryTemplateTest, IsacFix) { - auto factory = CreateAudioEncoderFactory(); - EXPECT_THAT(factory->GetSupportedEncoders(), - ::testing::ElementsAre(AudioCodecSpec{ - {"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}})); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"isac", 16000, 2})); - EXPECT_EQ(AudioCodecInfo(16000, 1, 32000, 10000, 32000), - factory->QueryAudioEncoder({"isac", 16000, 1})); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"isac", 32000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioEncoder(17, {"isac", 8000, 1}, absl::nullopt)); - auto enc1 = factory->MakeAudioEncoder(17, {"isac", 16000, 1}, absl::nullopt); - ASSERT_NE(nullptr, enc1); - EXPECT_EQ(16000, enc1->SampleRateHz()); - EXPECT_EQ(3u, enc1->Num10MsFramesInNextPacket()); - auto enc2 = factory->MakeAudioEncoder( - 17, {"isac", 16000, 1, {{"ptime", "60"}}}, absl::nullopt); - ASSERT_NE(nullptr, enc2); - EXPECT_EQ(6u, enc2->Num10MsFramesInNextPacket()); -} - -TEST(AudioEncoderFactoryTemplateTest, IsacFloat) { - auto factory = CreateAudioEncoderFactory(); - EXPECT_THAT( - factory->GetSupportedEncoders(), - ::testing::ElementsAre( - AudioCodecSpec{{"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}}, - AudioCodecSpec{{"ISAC", 32000, 1}, {32000, 1, 56000, 10000, 56000}})); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"isac", 16000, 2})); - EXPECT_EQ(AudioCodecInfo(16000, 1, 32000, 10000, 32000), - factory->QueryAudioEncoder({"isac", 16000, 1})); - EXPECT_EQ(AudioCodecInfo(32000, 1, 56000, 10000, 56000), - factory->QueryAudioEncoder({"isac", 32000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioEncoder(17, {"isac", 8000, 1}, absl::nullopt)); - auto enc1 = factory->MakeAudioEncoder(17, {"isac", 16000, 1}, absl::nullopt); - ASSERT_NE(nullptr, enc1); - EXPECT_EQ(16000, enc1->SampleRateHz()); - auto enc2 = factory->MakeAudioEncoder(17, {"isac", 32000, 1}, absl::nullopt); - ASSERT_NE(nullptr, enc2); - EXPECT_EQ(32000, enc2->SampleRateHz()); -} - TEST(AudioEncoderFactoryTemplateTest, L16) { auto factory = CreateAudioEncoderFactory(); EXPECT_THAT( diff --git a/third_party/libwebrtc/api/candidate.h b/third_party/libwebrtc/api/candidate.h index b8aaebc14a47..281f2f01a537 100644 --- a/third_party/libwebrtc/api/candidate.h +++ b/third_party/libwebrtc/api/candidate.h @@ -25,6 +25,10 @@ namespace cricket { +// TURN servers are limited to 32 in accordance with +// https://w3c.github.io/webrtc-pc/#dom-rtcconfiguration-iceservers +static constexpr size_t kMaxTurnServers = 32; + // Candidate for ICE based connection discovery. // TODO(phoglund): remove things in here that are not needed in the public API. diff --git a/third_party/libwebrtc/api/field_trials_unittest.cc b/third_party/libwebrtc/api/field_trials_unittest.cc index 2616a2e30c02..804b52a81861 100644 --- a/third_party/libwebrtc/api/field_trials_unittest.cc +++ b/third_party/libwebrtc/api/field_trials_unittest.cc @@ -17,6 +17,7 @@ #include "api/transport/field_trial_based_config.h" #include "rtc_base/containers/flat_set.h" #include "system_wrappers/include/field_trial.h" +#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" @@ -28,11 +29,11 @@ namespace webrtc { namespace { using ::testing::NotNull; -using ::webrtc::field_trial::InitFieldTrialsFromString; -using ::webrtc::field_trial::ScopedGlobalFieldTrialsForTesting; +using ::webrtc::field_trial::FieldTrialsAllowedInScopeForTesting; +using ::webrtc::test::ScopedFieldTrials; TEST(FieldTrialsTest, EmptyStringHasNoEffect) { - ScopedGlobalFieldTrialsForTesting g({"MyCoolTrial"}); + FieldTrialsAllowedInScopeForTesting k({"MyCoolTrial"}); FieldTrials f(""); f.RegisterKeysForTesting({"MyCoolTrial"}); @@ -53,9 +54,8 @@ TEST(FieldTrialsTest, EnabledDisabledMustBeFirstInValue) { } TEST(FieldTrialsTest, FieldTrialsDoesNotReadGlobalString) { - ScopedGlobalFieldTrialsForTesting g({"MyCoolTrial", "MyUncoolTrial"}); - static constexpr char s[] = "MyCoolTrial/Enabled/MyUncoolTrial/Disabled/"; - InitFieldTrialsFromString(s); + FieldTrialsAllowedInScopeForTesting k({"MyCoolTrial", "MyUncoolTrial"}); + ScopedFieldTrials g("MyCoolTrial/Enabled/MyUncoolTrial/Disabled/"); FieldTrials f(""); f.RegisterKeysForTesting({"MyCoolTrial", "MyUncoolTrial"}); @@ -64,7 +64,7 @@ TEST(FieldTrialsTest, FieldTrialsDoesNotReadGlobalString) { } TEST(FieldTrialsTest, FieldTrialsWritesGlobalString) { - ScopedGlobalFieldTrialsForTesting g({"MyCoolTrial", "MyUncoolTrial"}); + FieldTrialsAllowedInScopeForTesting k({"MyCoolTrial", "MyUncoolTrial"}); FieldTrials f("MyCoolTrial/Enabled/MyUncoolTrial/Disabled/"); EXPECT_TRUE(webrtc::field_trial::IsEnabled("MyCoolTrial")); EXPECT_TRUE(webrtc::field_trial::IsDisabled("MyUncoolTrial")); @@ -72,7 +72,7 @@ TEST(FieldTrialsTest, FieldTrialsWritesGlobalString) { TEST(FieldTrialsTest, FieldTrialsRestoresGlobalStringAfterDestruction) { static constexpr char s[] = "SomeString/Enabled/"; - InitFieldTrialsFromString(s); + ScopedFieldTrials g(s); { FieldTrials f("SomeOtherString/Enabled/"); EXPECT_STREQ(webrtc::field_trial::GetFieldTrialString(), @@ -95,7 +95,7 @@ TEST(FieldTrialsTest, FieldTrialsSupportsSeparateInstances) { } TEST(FieldTrialsTest, NonGlobalFieldTrialsInstanceDoesNotModifyGlobalString) { - ScopedGlobalFieldTrialsForTesting g({"SomeString"}); + FieldTrialsAllowedInScopeForTesting k({"SomeString"}); std::unique_ptr f = FieldTrials::CreateNoGlobal("SomeString/Enabled/"); ASSERT_THAT(f, NotNull()); @@ -123,7 +123,7 @@ TEST(FieldTrialsTest, NonGlobalFieldTrialsSupportSimultaneousInstances) { } TEST(FieldTrialsTest, GlobalAndNonGlobalFieldTrialsAreDisjoint) { - ScopedGlobalFieldTrialsForTesting g({"SomeString", "SomeOtherString"}); + FieldTrialsAllowedInScopeForTesting k({"SomeString", "SomeOtherString"}); FieldTrials f1("SomeString/Enabled/"); std::unique_ptr f2 = FieldTrials::CreateNoGlobal("SomeOtherString/Enabled/"); @@ -139,9 +139,8 @@ TEST(FieldTrialsTest, GlobalAndNonGlobalFieldTrialsAreDisjoint) { } TEST(FieldTrialsTest, FieldTrialBasedConfigReadsGlobalString) { - ScopedGlobalFieldTrialsForTesting g({"MyCoolTrial", "MyUncoolTrial"}); - static constexpr char s[] = "MyCoolTrial/Enabled/MyUncoolTrial/Disabled/"; - InitFieldTrialsFromString(s); + FieldTrialsAllowedInScopeForTesting k({"MyCoolTrial", "MyUncoolTrial"}); + ScopedFieldTrials g("MyCoolTrial/Enabled/MyUncoolTrial/Disabled/"); FieldTrialBasedConfig f; f.RegisterKeysForTesting({"MyCoolTrial", "MyUncoolTrial"}); diff --git a/third_party/libwebrtc/api/frame_transformer_factory.cc b/third_party/libwebrtc/api/frame_transformer_factory.cc new file mode 100644 index 000000000000..af08372e37ff --- /dev/null +++ b/third_party/libwebrtc/api/frame_transformer_factory.cc @@ -0,0 +1,33 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/frame_transformer_factory.h" + +#include "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h" + +namespace webrtc { + +std::unique_ptr CreateVideoSenderFrame() { + RTC_CHECK_NOTREACHED(); + return nullptr; +} + +std::unique_ptr CreateVideoReceiverFrame() { + RTC_CHECK_NOTREACHED(); + return nullptr; +} + +std::unique_ptr CloneVideoFrame( + TransformableVideoFrameInterface* original) { + // At the moment, only making sender frames from receiver frames is supported. + return CloneSenderVideoFrame(original); +} + +} // namespace webrtc diff --git a/third_party/libwebrtc/api/frame_transformer_factory.h b/third_party/libwebrtc/api/frame_transformer_factory.h new file mode 100644 index 000000000000..8ba9c292d5c5 --- /dev/null +++ b/third_party/libwebrtc/api/frame_transformer_factory.h @@ -0,0 +1,39 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_FRAME_TRANSFORMER_FACTORY_H_ +#define API_FRAME_TRANSFORMER_FACTORY_H_ + +#include +#include + +#include "api/frame_transformer_interface.h" +#include "api/scoped_refptr.h" +#include "api/video/encoded_frame.h" +#include "api/video/video_frame_metadata.h" + +// This file contains EXPERIMENTAL functions to create video frames from +// either an old video frame or directly from parameters. +// These functions will be used in Chrome functionality to manipulate +// encoded frames from Javascript. +namespace webrtc { + +// TODO(bugs.webrtc.org/14708): Add the required parameters to these APIs. +std::unique_ptr CreateVideoSenderFrame(); +// TODO(bugs.webrtc.org/14708): Consider whether Receiver frames ever make sense +// to create. +std::unique_ptr CreateVideoReceiverFrame(); +// Creates a new frame with the same metadata as the original. +// The original can be a sender or receiver frame. +RTC_EXPORT std::unique_ptr CloneVideoFrame( + TransformableVideoFrameInterface* original); +} // namespace webrtc + +#endif // API_FRAME_TRANSFORMER_FACTORY_H_ diff --git a/third_party/libwebrtc/api/stats_types.cc b/third_party/libwebrtc/api/legacy_stats_types.cc similarity index 99% rename from third_party/libwebrtc/api/stats_types.cc rename to third_party/libwebrtc/api/legacy_stats_types.cc index 61a0b8499dfd..e3b2144eddba 100644 --- a/third_party/libwebrtc/api/stats_types.cc +++ b/third_party/libwebrtc/api/legacy_stats_types.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "api/stats_types.h" +#include "api/legacy_stats_types.h" #include diff --git a/third_party/libwebrtc/api/stats_types.h b/third_party/libwebrtc/api/legacy_stats_types.h similarity index 99% rename from third_party/libwebrtc/api/stats_types.h rename to third_party/libwebrtc/api/legacy_stats_types.h index d75da46439e4..a62e014834f9 100644 --- a/third_party/libwebrtc/api/stats_types.h +++ b/third_party/libwebrtc/api/legacy_stats_types.h @@ -11,8 +11,8 @@ // This file contains structures used for retrieving statistics from an ongoing // libjingle session. -#ifndef API_STATS_TYPES_H_ -#define API_STATS_TYPES_H_ +#ifndef API_LEGACY_STATS_TYPES_H_ +#define API_LEGACY_STATS_TYPES_H_ #include #include @@ -452,4 +452,4 @@ class StatsCollection { } // namespace webrtc -#endif // API_STATS_TYPES_H_ +#endif // API_LEGACY_STATS_TYPES_H_ diff --git a/third_party/libwebrtc/api/peer_connection_interface.h b/third_party/libwebrtc/api/peer_connection_interface.h index f8689ebc1b0c..1097a1639c97 100644 --- a/third_party/libwebrtc/api/peer_connection_interface.h +++ b/third_party/libwebrtc/api/peer_connection_interface.h @@ -94,6 +94,7 @@ #include "api/field_trials_view.h" #include "api/ice_transport_interface.h" #include "api/jsep.h" +#include "api/legacy_stats_types.h" #include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/metronome/metronome.h" @@ -112,7 +113,6 @@ #include "api/set_local_description_observer_interface.h" #include "api/set_remote_description_observer_interface.h" #include "api/stats/rtc_stats_collector_callback.h" -#include "api/stats_types.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/bitrate_settings.h" #include "api/transport/enums.h" @@ -426,13 +426,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // default will be used. ////////////////////////////////////////////////////////////////////////// - // If set to true, don't gather IPv6 ICE candidates. - // TODO(https://crbug.com/webrtc/14608): Delete this flag. - union { - bool DEPRECATED_disable_ipv6 = false; - bool ABSL_DEPRECATED("https://crbug.com/webrtc/14608") disable_ipv6; - }; - // If set to true, don't gather IPv6 ICE candidates on Wi-Fi. // Only intended to be used on specific devices. Certain phones disable IPv6 // when the screen is turned off and it would be better to just disable the @@ -695,6 +688,9 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { PortAllocatorConfig port_allocator_config; + // The burst interval of the pacer, see TaskQueuePacedSender constructor. + absl::optional pacer_burst_interval; + // // Don't forget to update operator== if adding something. // diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/audio_decoder_isacfix.cc b/third_party/libwebrtc/api/rtp_sender_interface.cc similarity index 52% rename from third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/audio_decoder_isacfix.cc rename to third_party/libwebrtc/api/rtp_sender_interface.cc index 21259ee2e2dd..f1ca5c22031d 100644 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/audio_decoder_isacfix.cc +++ b/third_party/libwebrtc/api/rtp_sender_interface.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * Copyright 2022 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,13 +8,15 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h" +#include "api/rtp_sender_interface.h" -#include "modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h" +#include "rtc_base/checks.h" namespace webrtc { -// Explicit instantiation: -template class AudioDecoderIsacT; +void RtpSenderInterface::SetParametersAsync(const RtpParameters& parameters, + SetParametersCallback callback) { + RTC_DCHECK_NOTREACHED() << "Default implementation called"; +} } // namespace webrtc diff --git a/third_party/libwebrtc/api/rtp_sender_interface.h b/third_party/libwebrtc/api/rtp_sender_interface.h index 7e84cd420aac..98ee91b1ccd0 100644 --- a/third_party/libwebrtc/api/rtp_sender_interface.h +++ b/third_party/libwebrtc/api/rtp_sender_interface.h @@ -18,6 +18,7 @@ #include #include +#include "absl/functional/any_invocable.h" #include "api/crypto/frame_encryptor_interface.h" #include "api/dtls_transport_interface.h" #include "api/dtmf_sender_interface.h" @@ -31,6 +32,8 @@ #include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" +#include "api/rtp_sender_setparameters_callback.h" + namespace webrtc { class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { @@ -79,6 +82,8 @@ class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { // rtpparameters.h // The encodings are in increasing quality order for simulcast. virtual RTCError SetParameters(const RtpParameters& parameters) = 0; + virtual void SetParametersAsync(const RtpParameters& parameters, + SetParametersCallback callback); // Returns null for a video sender. virtual rtc::scoped_refptr GetDtmfSender() const = 0; diff --git a/third_party/libwebrtc/api/rtp_sender_setparameters_callback.cc b/third_party/libwebrtc/api/rtp_sender_setparameters_callback.cc new file mode 100644 index 000000000000..99728ef95ea1 --- /dev/null +++ b/third_party/libwebrtc/api/rtp_sender_setparameters_callback.cc @@ -0,0 +1,27 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// File added by mozilla, to decouple this from libwebrtc's implementation of +// RTCRtpSender. + +#include "api/rtp_sender_setparameters_callback.h" + +namespace webrtc { + +webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, + RTCError error) { + if (callback) { + std::move(callback)(error); + callback = nullptr; + } + return error; +} + +} // namespace webrtc diff --git a/third_party/libwebrtc/api/rtp_sender_setparameters_callback.h b/third_party/libwebrtc/api/rtp_sender_setparameters_callback.h new file mode 100644 index 000000000000..45194f5acec2 --- /dev/null +++ b/third_party/libwebrtc/api/rtp_sender_setparameters_callback.h @@ -0,0 +1,28 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// File added by mozilla, to decouple this from libwebrtc's implementation of +// RTCRtpSender. + +#ifndef API_RTP_SENDER_SETPARAMETERS_CALLBACK_H_ +#define API_RTP_SENDER_SETPARAMETERS_CALLBACK_H_ + +#include "api/rtc_error.h" +#include "absl/functional/any_invocable.h" + +namespace webrtc { + +using SetParametersCallback = absl::AnyInvocable; + +webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, + RTCError error); +} // namespace webrtc + +#endif // API_RTP_SENDER_SETPARAMETERS_CALLBACK_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/isac_common_gn/moz.build b/third_party/libwebrtc/api/rtp_sender_setparameters_callback_gn/moz.build similarity index 95% rename from third_party/libwebrtc/modules/audio_coding/isac_common_gn/moz.build rename to third_party/libwebrtc/api/rtp_sender_setparameters_callback_gn/moz.build index 75fd011e1a4a..2c59bcc25159 100644 --- a/third_party/libwebrtc/modules/audio_coding/isac_common_gn/moz.build +++ b/third_party/libwebrtc/api/rtp_sender_setparameters_callback_gn/moz.build @@ -30,6 +30,10 @@ LOCAL_INCLUDES += [ "/tools/profiler/public" ] +UNIFIED_SOURCES += [ + "/third_party/libwebrtc/api/rtp_sender_setparameters_callback.cc" +] + if not CONFIG["MOZ_DEBUG"]: DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" @@ -135,6 +139,10 @@ if CONFIG["CPU_ARCH"] == "aarch64": if CONFIG["CPU_ARCH"] == "arm": + CXXFLAGS += [ + "-mfpu=neon" + ] + DEFINES["WEBRTC_ARCH_ARM"] = True DEFINES["WEBRTC_ARCH_ARM_V7"] = True DEFINES["WEBRTC_HAS_NEON"] = True @@ -172,6 +180,10 @@ if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android": if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": + CXXFLAGS += [ + "-msse2" + ] + OS_LIBS += [ "android_support" ] @@ -188,6 +200,10 @@ if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux": if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": + CXXFLAGS += [ + "-msse2" + ] + DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True @@ -196,4 +212,4 @@ if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True -Library("isac_common_gn") +Library("rtp_sender_setparameters_callback_gn") diff --git a/third_party/libwebrtc/api/stats/rtcstats_objects.h b/third_party/libwebrtc/api/stats/rtcstats_objects.h index 6f8178407fa6..532af53de3d1 100644 --- a/third_party/libwebrtc/api/stats/rtcstats_objects.h +++ b/third_party/libwebrtc/api/stats/rtcstats_objects.h @@ -561,6 +561,7 @@ class RTC_EXPORT RTCOutboundRTPStreamStats final : public RTCRTPStreamStats { RTCStatsMember active; RTCRestrictedStatsMember power_efficient_encoder; + RTCStatsMember scalability_mode; }; // https://w3c.github.io/webrtc-stats/#remoteinboundrtpstats-dict* diff --git a/third_party/libwebrtc/api/test/create_network_emulation_manager.cc b/third_party/libwebrtc/api/test/create_network_emulation_manager.cc index 089a2f8a860a..f5d5a1bc88f9 100644 --- a/third_party/libwebrtc/api/test/create_network_emulation_manager.cc +++ b/third_party/libwebrtc/api/test/create_network_emulation_manager.cc @@ -18,8 +18,10 @@ namespace webrtc { std::unique_ptr CreateNetworkEmulationManager( - TimeMode mode) { - return std::make_unique(mode); + TimeMode time_mode, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) { + return std::make_unique( + time_mode, stats_gathering_mode); } } // namespace webrtc diff --git a/third_party/libwebrtc/api/test/create_network_emulation_manager.h b/third_party/libwebrtc/api/test/create_network_emulation_manager.h index f4447437865f..941b2b1c52c3 100644 --- a/third_party/libwebrtc/api/test/create_network_emulation_manager.h +++ b/third_party/libwebrtc/api/test/create_network_emulation_manager.h @@ -19,7 +19,9 @@ namespace webrtc { // Returns a non-null NetworkEmulationManager instance. std::unique_ptr CreateNetworkEmulationManager( - TimeMode mode = TimeMode::kRealTime); + TimeMode time_mode = TimeMode::kRealTime, + EmulatedNetworkStatsGatheringMode stats_gathering_mode = + EmulatedNetworkStatsGatheringMode::kDefault); } // namespace webrtc diff --git a/third_party/libwebrtc/api/test/create_video_codec_tester.cc b/third_party/libwebrtc/api/test/create_video_codec_tester.cc new file mode 100644 index 000000000000..a1efefdb489e --- /dev/null +++ b/third_party/libwebrtc/api/test/create_video_codec_tester.cc @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/create_video_codec_tester.h" + +#include +#include + +#include "api/test/video_codec_tester.h" +#include "modules/video_coding/codecs/test/video_codec_tester_impl.h" + +namespace webrtc { +namespace test { + +std::unique_ptr CreateVideoCodecTester() { + return std::make_unique(); +} + +} // namespace test +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/audio_encoder_isacfix.cc b/third_party/libwebrtc/api/test/create_video_codec_tester.h similarity index 52% rename from third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/audio_encoder_isacfix.cc rename to third_party/libwebrtc/api/test/create_video_codec_tester.h index 0190ab91b60c..c68864ce85a3 100644 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/audio_encoder_isacfix.cc +++ b/third_party/libwebrtc/api/test/create_video_codec_tester.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,13 +8,19 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h" +#ifndef API_TEST_CREATE_VIDEO_CODEC_TESTER_H_ +#define API_TEST_CREATE_VIDEO_CODEC_TESTER_H_ -#include "modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h" +#include + +#include "api/test/video_codec_tester.h" namespace webrtc { +namespace test { -// Explicit instantiation: -template class AudioEncoderIsacT; +std::unique_ptr CreateVideoCodecTester(); +} // namespace test } // namespace webrtc + +#endif // API_TEST_CREATE_VIDEO_CODEC_TESTER_H_ diff --git a/third_party/libwebrtc/api/test/mock_audio_sink.h b/third_party/libwebrtc/api/test/mock_audio_sink.h index 0c17dc45ca5d..88f38a3c5750 100644 --- a/third_party/libwebrtc/api/test/mock_audio_sink.h +++ b/third_party/libwebrtc/api/test/mock_audio_sink.h @@ -17,7 +17,7 @@ namespace webrtc { -class MockAudioSink final : public webrtc::AudioTrackSinkInterface { +class MockAudioSink : public webrtc::AudioTrackSinkInterface { public: MOCK_METHOD(void, OnData, diff --git a/third_party/libwebrtc/api/test/mock_data_channel.h b/third_party/libwebrtc/api/test/mock_data_channel.h index 40f7edb08a9a..38730eaa5162 100644 --- a/third_party/libwebrtc/api/test/mock_data_channel.h +++ b/third_party/libwebrtc/api/test/mock_data_channel.h @@ -18,7 +18,7 @@ namespace webrtc { -class MockDataChannelInterface final +class MockDataChannelInterface : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { diff --git a/third_party/libwebrtc/api/test/mock_media_stream_interface.h b/third_party/libwebrtc/api/test/mock_media_stream_interface.h index 209962358d1f..dfdbab35e92e 100644 --- a/third_party/libwebrtc/api/test/mock_media_stream_interface.h +++ b/third_party/libwebrtc/api/test/mock_media_stream_interface.h @@ -18,8 +18,7 @@ namespace webrtc { -class MockAudioSource final - : public rtc::RefCountedObject { +class MockAudioSource : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { return rtc::scoped_refptr(new MockAudioSource()); @@ -52,7 +51,7 @@ class MockAudioSource final MockAudioSource() = default; }; -class MockAudioTrack final : public rtc::RefCountedObject { +class MockAudioTrack : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { return rtc::scoped_refptr(new MockAudioTrack()); diff --git a/third_party/libwebrtc/api/test/mock_peer_connection_factory_interface.h b/third_party/libwebrtc/api/test/mock_peer_connection_factory_interface.h index 6bab595b5a08..ae1fbfbbb7e2 100644 --- a/third_party/libwebrtc/api/test/mock_peer_connection_factory_interface.h +++ b/third_party/libwebrtc/api/test/mock_peer_connection_factory_interface.h @@ -19,7 +19,7 @@ namespace webrtc { -class MockPeerConnectionFactoryInterface final +class MockPeerConnectionFactoryInterface : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { diff --git a/third_party/libwebrtc/api/test/mock_rtpreceiver.h b/third_party/libwebrtc/api/test/mock_rtpreceiver.h index 4bcf064b2a2e..63318dc32d10 100644 --- a/third_party/libwebrtc/api/test/mock_rtpreceiver.h +++ b/third_party/libwebrtc/api/test/mock_rtpreceiver.h @@ -14,6 +14,7 @@ #include #include +#include "api/crypto/frame_decryptor_interface.h" #include "api/rtp_receiver_interface.h" #include "test/gmock.h" @@ -32,12 +33,24 @@ class MockRtpReceiver : public rtc::RefCountedObject { MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); MOCK_METHOD(std::string, id, (), (const, override)); MOCK_METHOD(RtpParameters, GetParameters, (), (const, override)); + MOCK_METHOD(bool, + SetParameters, + (const webrtc::RtpParameters& parameters), + (override)); MOCK_METHOD(void, SetObserver, (RtpReceiverObserverInterface*), (override)); MOCK_METHOD(void, SetJitterBufferMinimumDelay, (absl::optional), (override)); MOCK_METHOD(std::vector, GetSources, (), (const, override)); + MOCK_METHOD(void, + SetFrameDecryptor, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + GetFrameDecryptor, + (), + (const, override)); }; } // namespace webrtc diff --git a/third_party/libwebrtc/api/test/mock_rtpsender.h b/third_party/libwebrtc/api/test/mock_rtpsender.h index e2351f87fe76..22113678b985 100644 --- a/third_party/libwebrtc/api/test/mock_rtpsender.h +++ b/third_party/libwebrtc/api/test/mock_rtpsender.h @@ -46,6 +46,10 @@ class MockRtpSender : public RtpSenderInterface { (const, override)); MOCK_METHOD(RtpParameters, GetParameters, (), (const, override)); MOCK_METHOD(RTCError, SetParameters, (const RtpParameters&), (override)); + MOCK_METHOD(void, + SetParametersAsync, + (const RtpParameters&, SetParametersCallback), + (override)); MOCK_METHOD(rtc::scoped_refptr, GetDtmfSender, (), diff --git a/third_party/libwebrtc/api/test/mock_video_track.h b/third_party/libwebrtc/api/test/mock_video_track.h index 705d13509b57..1212a3252707 100644 --- a/third_party/libwebrtc/api/test/mock_video_track.h +++ b/third_party/libwebrtc/api/test/mock_video_track.h @@ -20,7 +20,7 @@ namespace webrtc { -class MockVideoTrack final +class MockVideoTrack : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { diff --git a/third_party/libwebrtc/api/test/network_emulation/network_emulation_interfaces.h b/third_party/libwebrtc/api/test/network_emulation/network_emulation_interfaces.h index f0a12df52a65..7cab07b75dc8 100644 --- a/third_party/libwebrtc/api/test/network_emulation/network_emulation_interfaces.h +++ b/third_party/libwebrtc/api/test/network_emulation/network_emulation_interfaces.h @@ -67,8 +67,8 @@ struct EmulatedNetworkOutgoingStats { DataSize bytes_sent = DataSize::Zero(); - // Sizes of all sent packets if EmulatedEndpointConfig::stats_gatherming_mode - // was set to StatsGatheringMode::kDebug; empty otherwise. + // Sizes of all sent packets. + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. SamplesStatsCounter sent_packets_size; DataSize first_sent_packet_size = DataSize::Zero(); @@ -90,9 +90,8 @@ struct EmulatedNetworkIncomingStats { // Total amount of bytes in received packets. DataSize bytes_received = DataSize::Zero(); - // Sizes of all received packets if - // EmulatedEndpointConfig::stats_gatherming_mode was set to - // StatsGatheringMode::kDebug; empty otherwise. + // Sizes of all received packets. + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. SamplesStatsCounter received_packets_size; // Total amount of packets that were received, but no destination was found. @@ -101,9 +100,8 @@ struct EmulatedNetworkIncomingStats { // Total amount of bytes in discarded packets. DataSize bytes_discarded_no_receiver = DataSize::Zero(); - // Sizes of all packets that were received, but no destination was found if - // EmulatedEndpointConfig::stats_gatherming_mode was set to - // StatsGatheringMode::kDebug; empty otherwise. + // Sizes of all packets that were received, but no destination was found. + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. SamplesStatsCounter packets_discarded_no_receiver_size; DataSize first_received_packet_size = DataSize::Zero(); @@ -124,10 +122,9 @@ struct EmulatedNetworkStats { DataSize BytesSent() const { return overall_outgoing_stats.bytes_sent; } - // Returns the timestamped sizes of all sent packets if - // EmulatedEndpointConfig::stats_gatherming_mode was set to - // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returns the timestamped sizes of all sent packets. // Returned reference is valid until the next call to a non-const method. + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. const SamplesStatsCounter& SentPacketsSizeCounter() const { return overall_outgoing_stats.sent_packets_size; } @@ -162,10 +159,9 @@ struct EmulatedNetworkStats { return overall_incoming_stats.bytes_received; } - // Returns the timestamped sizes of all received packets if - // EmulatedEndpointConfig::stats_gatherming_mode was set to - // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returns the timestamped sizes of all received packets. // Returned reference is valid until the next call to a non-const method. + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. const SamplesStatsCounter& ReceivedPacketsSizeCounter() const { return overall_incoming_stats.received_packets_size; } @@ -181,10 +177,9 @@ struct EmulatedNetworkStats { } // Returns counter with timestamped sizes of all packets that were received, - // but no destination was found if - // EmulatedEndpointConfig::stats_gatherming_mode was set to - // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // but no destination was found. // Returned reference is valid until the next call to a non-const method. + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. const SamplesStatsCounter& PacketsDiscardedNoReceiverSizeCounter() const { return overall_incoming_stats.packets_discarded_no_receiver_size; } @@ -226,12 +221,25 @@ struct EmulatedNetworkStats { incoming_stats_per_source; // Duration between packet was received on network interface and was - // dispatched to the network in microseconds if - // EmulatedEndpointConfig::stats_gatherming_mode was set to - // StatsGatheringMode::kDebug; empty otherwise. + // dispatched to the network in microseconds. + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. SamplesStatsCounter sent_packets_queue_wait_time_us; }; +struct EmulatedNetworkNodeStats { + // Amount of time each packet spent in the emulated network node for which + // stats were collected. + // + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. + SamplesStatsCounter packet_transport_time; + + // For each packet contains its size divided on the amount of time which it + // spent in the emulated network node for which stats were collected. + // + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. + SamplesStatsCounter size_to_packet_transport_time; +}; + // EmulatedEndpoint is an abstraction for network interface on device. Instances // of this are created by NetworkEmulationManager::CreateEndpoint and // thread safe. diff --git a/third_party/libwebrtc/api/test/network_emulation_manager.h b/third_party/libwebrtc/api/test/network_emulation_manager.h index b2a6ed3ba2ef..bc9279d3067a 100644 --- a/third_party/libwebrtc/api/test/network_emulation_manager.h +++ b/third_party/libwebrtc/api/test/network_emulation_manager.h @@ -49,15 +49,18 @@ class EmulatedNetworkNode; // peer device to another network interface on another peer device. class EmulatedRoute; +enum class EmulatedNetworkStatsGatheringMode { + // Gather main network stats counters. See more details on which particular + // metrics are collected in the `EmulatedNetworkStats` and + // `EmulatedNetworkNodeStats` documentation. + kDefault, + // kDefault + also gather per packet statistics. In this mode more memory + // will be used. + kDebug +}; + struct EmulatedEndpointConfig { enum class IpAddressFamily { kIpv4, kIpv6 }; - enum class StatsGatheringMode { - // Gather main network stats counters. - kDefault, - // kDefault + also gather per packet statistics. In this mode more memory - // will be used. - kDebug - }; // If specified will be used to name endpoint for logging purposes. absl::optional name = absl::nullopt; @@ -70,7 +73,6 @@ struct EmulatedEndpointConfig { bool start_as_enabled = true; // Network type which will be used to represent endpoint to WebRTC. rtc::AdapterType type = rtc::AdapterType::ADAPTER_TYPE_UNKNOWN; - StatsGatheringMode stats_gathering_mode = StatsGatheringMode::kDefault; // Allow endpoint to send packets specifying source IP address different to // the current endpoint IP address. If false endpoint will crash if attempt // to send such packet will be done. @@ -142,10 +144,6 @@ class EmulatedNetworkManagerInterface { // Passes summarized network stats for endpoints for this manager into // specified `stats_callback`. Callback will be executed on network emulation // internal task queue. - // Deprecated. - virtual void GetStats( - std::function)> stats_callback) - const = 0; virtual void GetStats( std::function stats_callback) const = 0; }; @@ -327,18 +325,20 @@ class NetworkEmulationManager { CreateEmulatedNetworkManagerInterface( const std::vector& endpoints) = 0; - // Passes summarized network stats for specified `endpoints` into specified + // Passes combined network stats for all specified `endpoints` into specified // `stats_callback`. Callback will be executed on network emulation // internal task queue. - // Deprecated. - virtual void GetStats( - rtc::ArrayView endpoints, - std::function)> - stats_callback) = 0; virtual void GetStats( rtc::ArrayView endpoints, std::function stats_callback) = 0; + // Passes combined network stats for all specified `nodes` into specified + // `stats_callback`. Callback will be executed on network emulation + // internal task queue. + virtual void GetStats( + rtc::ArrayView nodes, + std::function stats_callback) = 0; + // Create a EmulatedTURNServer. // The TURN server has 2 endpoints that need to be connected with routes, // - GetClientEndpoint() - the endpoint that accepts TURN allocations. diff --git a/third_party/libwebrtc/api/test/pclf/BUILD.gn b/third_party/libwebrtc/api/test/pclf/BUILD.gn index 17cd758ea417..a50744e92bfb 100644 --- a/third_party/libwebrtc/api/test/pclf/BUILD.gn +++ b/third_party/libwebrtc/api/test/pclf/BUILD.gn @@ -40,7 +40,7 @@ rtc_source_set("media_configuration") { "../../../rtc_base:threading", "../../../test:fileutils", "../../../test:video_test_support", - "../../../test/pc/e2e:video_dumping", + "../../../test/pc/e2e/analyzer/video:video_dumping", "../../audio:audio_mixer_api", "../../rtc_event_log", "../../task_queue", diff --git a/third_party/libwebrtc/api/test/pclf/peer_configurer.h b/third_party/libwebrtc/api/test/pclf/peer_configurer.h index 54f9402246c5..7841a261b3f3 100644 --- a/third_party/libwebrtc/api/test/pclf/peer_configurer.h +++ b/third_party/libwebrtc/api/test/pclf/peer_configurer.h @@ -47,16 +47,6 @@ class PeerConfigurer { explicit PeerConfigurer(const PeerNetworkDependencies& network_dependencies); - PeerConfigurer(rtc::Thread* network_thread, - rtc::NetworkManager* network_manager, - rtc::PacketSocketFactory* packet_socket_factory) - : components_( - std::make_unique(network_thread, - network_manager, - packet_socket_factory)), - params_(std::make_unique()), - configurable_params_(std::make_unique()) {} - // Sets peer name that will be used to report metrics related to this peer. // If not set, some default name will be assigned. All names have to be // unique. diff --git a/third_party/libwebrtc/api/test/peerconnection_quality_test_fixture.h b/third_party/libwebrtc/api/test/peerconnection_quality_test_fixture.h index 9db209301a1c..74470cdf8634 100644 --- a/third_party/libwebrtc/api/test/peerconnection_quality_test_fixture.h +++ b/third_party/libwebrtc/api/test/peerconnection_quality_test_fixture.h @@ -65,21 +65,6 @@ namespace webrtc_pc_e2e { // API is in development. Can be changed/removed without notice. class PeerConnectionE2EQualityTestFixture { public: - using CapturingDeviceIndex = ::webrtc::webrtc_pc_e2e::CapturingDeviceIndex; - using ScrollingParams = ::webrtc::webrtc_pc_e2e::ScrollingParams; - using ScreenShareConfig = ::webrtc::webrtc_pc_e2e::ScreenShareConfig; - using VideoSimulcastConfig = ::webrtc::webrtc_pc_e2e::VideoSimulcastConfig; - using EmulatedSFUConfig = ::webrtc::webrtc_pc_e2e::EmulatedSFUConfig; - using VideoResolution = ::webrtc::webrtc_pc_e2e::VideoResolution; - using VideoDumpOptions = ::webrtc::webrtc_pc_e2e::VideoDumpOptions; - using VideoConfig = ::webrtc::webrtc_pc_e2e::VideoConfig; - using AudioConfig = ::webrtc::webrtc_pc_e2e::AudioConfig; - using VideoCodecConfig = ::webrtc::webrtc_pc_e2e::VideoCodecConfig; - using VideoSubscription = ::webrtc::webrtc_pc_e2e::VideoSubscription; - using EchoEmulationConfig = ::webrtc::webrtc_pc_e2e::EchoEmulationConfig; - using RunParams = ::webrtc::webrtc_pc_e2e::RunParams; - using PeerConfigurer = ::webrtc::webrtc_pc_e2e::PeerConfigurer; - // Represent an entity that will report quality metrics after test. class QualityMetricsReporter : public StatsObserverInterface { public: @@ -133,18 +118,7 @@ class PeerConnectionE2EQualityTestFixture { // `network_dependencies` are used to provide networking for peer's peer // connection. Members must be non-null. // `configurer` function will be used to configure peer in the call. - [[deprecated("bugs.webrtc.org/14627")]] virtual PeerHandle* AddPeer( - const PeerNetworkDependencies& network_dependencies, - rtc::FunctionView configurer) { - RTC_CHECK_NOTREACHED(); - return nullptr; - } - // TODO(bugs.webrtc.org/14627): make pure virtual once all subclasses - // implement it. - virtual PeerHandle* AddPeer(std::unique_ptr configurer) { - RTC_CHECK_NOTREACHED(); - return nullptr; - } + virtual PeerHandle* AddPeer(std::unique_ptr configurer) = 0; // Runs the media quality test, which includes setting up the call with // configured participants, running it according to provided `run_params` and diff --git a/third_party/libwebrtc/api/test/simulcast_test_fixture.h b/third_party/libwebrtc/api/test/simulcast_test_fixture.h index cd470703c38f..c7130d290902 100644 --- a/third_party/libwebrtc/api/test/simulcast_test_fixture.h +++ b/third_party/libwebrtc/api/test/simulcast_test_fixture.h @@ -19,6 +19,7 @@ class SimulcastTestFixture { virtual ~SimulcastTestFixture() = default; virtual void TestKeyFrameRequestsOnAllStreams() = 0; + virtual void TestKeyFrameRequestsOnSpecificStreams() = 0; virtual void TestPaddingAllStreams() = 0; virtual void TestPaddingTwoStreams() = 0; virtual void TestPaddingTwoStreamsOneMaxedOut() = 0; diff --git a/third_party/libwebrtc/api/test/video_codec_tester.h b/third_party/libwebrtc/api/test/video_codec_tester.h new file mode 100644 index 000000000000..0eaaa1b89555 --- /dev/null +++ b/third_party/libwebrtc/api/test/video_codec_tester.h @@ -0,0 +1,134 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_VIDEO_CODEC_TESTER_H_ +#define API_TEST_VIDEO_CODEC_TESTER_H_ + +#include + +#include "absl/functional/any_invocable.h" +#include "api/test/videocodec_test_stats.h" +#include "api/video/encoded_image.h" +#include "api/video/resolution.h" +#include "api/video/video_frame.h" + +namespace webrtc { +namespace test { + +// Interface for a video codec tester. The interface provides minimalistic set +// of data structures that enables implementation of decode-only, encode-only +// and encode-decode tests. +class VideoCodecTester { + public: + // Pacing settings for codec input. + struct PacingSettings { + enum PacingMode { + // Pacing is not used. Frames are sent to codec back-to-back. + kNoPacing, + // Pace with the rate equal to the target video frame rate. Pacing time is + // derived from RTP timestamp. + kRealTime, + // Pace with the explicitly provided rate. + kConstantRate, + }; + PacingMode mode = PacingMode::kNoPacing; + // Pacing rate for `kConstantRate` mode. + Frequency constant_rate = Frequency::Zero(); + }; + + struct DecoderSettings { + PacingSettings pacing; + }; + + struct EncoderSettings { + PacingSettings pacing; + }; + + virtual ~VideoCodecTester() = default; + + // Interface for a raw video frames source. + class RawVideoSource { + public: + virtual ~RawVideoSource() = default; + + // Returns next frame. If no more frames to pull, returns `absl::nullopt`. + // For analysis and pacing purposes, frame must have RTP timestamp set. The + // timestamp must represent the target video frame rate and be unique. + virtual absl::optional PullFrame() = 0; + + // Returns early pulled frame with RTP timestamp equal to `timestamp_rtp`. + virtual VideoFrame GetFrame(uint32_t timestamp_rtp, + Resolution resolution) = 0; + }; + + // Interface for a coded video frames source. + class CodedVideoSource { + public: + virtual ~CodedVideoSource() = default; + + // Returns next frame. If no more frames to pull, returns `absl::nullopt`. + // For analysis and pacing purposes, frame must have RTP timestamp set. The + // timestamp must represent the target video frame rate and be unique. + virtual absl::optional PullFrame() = 0; + }; + + // Interface for a video encoder. + class Encoder { + public: + using EncodeCallback = + absl::AnyInvocable; + + virtual ~Encoder() = default; + + virtual void Encode(const VideoFrame& frame, EncodeCallback callback) = 0; + }; + + // Interface for a video decoder. + class Decoder { + public: + using DecodeCallback = + absl::AnyInvocable; + + virtual ~Decoder() = default; + + virtual void Decode(const EncodedImage& frame, DecodeCallback callback) = 0; + }; + + // Pulls coded video frames from `video_source` and passes them to `decoder`. + // Returns `VideoCodecTestStats` object that contains collected per-frame + // metrics. + virtual std::unique_ptr RunDecodeTest( + std::unique_ptr video_source, + std::unique_ptr decoder, + const DecoderSettings& decoder_settings) = 0; + + // Pulls raw video frames from `video_source` and passes them to `encoder`. + // Returns `VideoCodecTestStats` object that contains collected per-frame + // metrics. + virtual std::unique_ptr RunEncodeTest( + std::unique_ptr video_source, + std::unique_ptr encoder, + const EncoderSettings& encoder_settings) = 0; + + // Pulls raw video frames from `video_source`, passes them to `encoder` and + // then passes encoded frames to `decoder`. Returns `VideoCodecTestStats` + // object that contains collected per-frame metrics. + virtual std::unique_ptr RunEncodeDecodeTest( + std::unique_ptr video_source, + std::unique_ptr encoder, + std::unique_ptr decoder, + const EncoderSettings& encoder_settings, + const DecoderSettings& decoder_settings) = 0; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_VIDEO_CODEC_TESTER_H_ diff --git a/third_party/libwebrtc/api/test/video_quality_analyzer_interface.h b/third_party/libwebrtc/api/test/video_quality_analyzer_interface.h index dc58b049671c..d35be8ca1a85 100644 --- a/third_party/libwebrtc/api/test/video_quality_analyzer_interface.h +++ b/third_party/libwebrtc/api/test/video_quality_analyzer_interface.h @@ -62,6 +62,8 @@ class VideoQualityAnalyzerInterface // https://crbug.com/webrtc/11443: improve stats API to make available // there. uint32_t target_encode_bitrate = 0; + // Encoder quantizer value. + int qp = -1; }; // Contains extra statistic provided by video decoder. struct DecoderStats { diff --git a/third_party/libwebrtc/api/test/videocodec_test_stats.h b/third_party/libwebrtc/api/test/videocodec_test_stats.h index a05985a6650d..12c60638dbc9 100644 --- a/third_party/libwebrtc/api/test/videocodec_test_stats.h +++ b/third_party/libwebrtc/api/test/videocodec_test_stats.h @@ -18,6 +18,9 @@ #include #include +#include "absl/types/optional.h" +#include "api/units/data_rate.h" +#include "api/units/frequency.h" #include "api/video/video_frame_type.h" namespace webrtc { @@ -135,11 +138,16 @@ class VideoCodecTestStats { virtual ~VideoCodecTestStats() = default; - virtual std::vector GetFrameStatistics() = 0; + virtual std::vector GetFrameStatistics() const = 0; virtual std::vector SliceAndCalcLayerVideoStatistic( size_t first_frame_num, size_t last_frame_num) = 0; + + virtual VideoStatistics CalcVideoStatistic(size_t first_frame, + size_t last_frame, + DataRate target_bitrate, + Frequency target_framerate) = 0; }; } // namespace test diff --git a/third_party/libwebrtc/api/video/BUILD.gn b/third_party/libwebrtc/api/video/BUILD.gn index d1f7878c00ca..d65f6412b458 100644 --- a/third_party/libwebrtc/api/video/BUILD.gn +++ b/third_party/libwebrtc/api/video/BUILD.gn @@ -332,8 +332,11 @@ rtc_source_set("video_frame_metadata") { "video_frame_metadata.h", ] deps = [ + ":video_frame", + ":video_frame_type", + ":video_rtp_headers", "..:array_view", - "../../modules/rtp_rtcp:rtp_video_header", + "../../rtc_base/system:rtc_export", "../transport/rtp:dependency_descriptor", ] absl_deps = [ @@ -400,12 +403,10 @@ rtc_library("frame_buffer_unittest") { if (rtc_include_tests) { rtc_library("video_unittests") { testonly = true - sources = [ - "video_frame_metadata_unittest.cc", - "video_stream_decoder_create_unittest.cc", - ] + sources = [ "video_stream_decoder_create_unittest.cc" ] deps = [ ":video_frame_metadata", + ":video_frame_type", ":video_stream_decoder_create", "../../modules/rtp_rtcp:rtp_video_header", "../../test:test_support", diff --git a/third_party/libwebrtc/api/video/video_frame_metadata.cc b/third_party/libwebrtc/api/video/video_frame_metadata.cc index df82875eb959..842aeb0524a0 100644 --- a/third_party/libwebrtc/api/video/video_frame_metadata.cc +++ b/third_party/libwebrtc/api/video/video_frame_metadata.cc @@ -10,19 +10,118 @@ #include "api/video/video_frame_metadata.h" -#include "modules/rtp_rtcp/source/rtp_video_header.h" - namespace webrtc { -VideoFrameMetadata::VideoFrameMetadata(const RTPVideoHeader& header) - : width_(header.width), height_(header.height) { - if (header.generic) { - frame_id_ = header.generic->frame_id; - spatial_index_ = header.generic->spatial_index; - temporal_index_ = header.generic->temporal_index; - frame_dependencies_ = header.generic->dependencies; - decode_target_indications_ = header.generic->decode_target_indications; - } +VideoFrameMetadata::VideoFrameMetadata() = default; + +VideoFrameType VideoFrameMetadata::GetFrameType() const { + return frame_type_; +} + +void VideoFrameMetadata::SetFrameType(VideoFrameType frame_type) { + frame_type_ = frame_type; +} + +uint16_t VideoFrameMetadata::GetWidth() const { + return width_; +} + +void VideoFrameMetadata::SetWidth(uint16_t width) { + width_ = width; +} + +uint16_t VideoFrameMetadata::GetHeight() const { + return height_; +} + +void VideoFrameMetadata::SetHeight(uint16_t height) { + height_ = height; +} + +VideoRotation VideoFrameMetadata::GetRotation() const { + return rotation_; +} + +void VideoFrameMetadata::SetRotation(VideoRotation rotation) { + rotation_ = rotation; +} + +VideoContentType VideoFrameMetadata::GetContentType() const { + return content_type_; +} + +void VideoFrameMetadata::SetContentType(VideoContentType content_type) { + content_type_ = content_type; +} + +absl::optional VideoFrameMetadata::GetFrameId() const { + return frame_id_; +} + +void VideoFrameMetadata::SetFrameId(absl::optional frame_id) { + frame_id_ = frame_id; +} + +int VideoFrameMetadata::GetSpatialIndex() const { + return spatial_index_; +} + +void VideoFrameMetadata::SetSpatialIndex(int spatial_index) { + spatial_index_ = spatial_index; +} + +int VideoFrameMetadata::GetTemporalIndex() const { + return temporal_index_; +} + +void VideoFrameMetadata::SetTemporalIndex(int temporal_index) { + temporal_index_ = temporal_index; +} + +rtc::ArrayView VideoFrameMetadata::GetFrameDependencies() const { + return frame_dependencies_; +} + +void VideoFrameMetadata::SetFrameDependencies( + rtc::ArrayView frame_dependencies) { + frame_dependencies_.assign(frame_dependencies.begin(), + frame_dependencies.end()); +} + +rtc::ArrayView +VideoFrameMetadata::GetDecodeTargetIndications() const { + return decode_target_indications_; +} + +void VideoFrameMetadata::SetDecodeTargetIndications( + rtc::ArrayView decode_target_indications) { + decode_target_indications_.assign(decode_target_indications.begin(), + decode_target_indications.end()); +} + +bool VideoFrameMetadata::GetIsLastFrameInPicture() const { + return is_last_frame_in_picture_; +} + +void VideoFrameMetadata::SetIsLastFrameInPicture( + bool is_last_frame_in_picture) { + is_last_frame_in_picture_ = is_last_frame_in_picture; +} + +uint8_t VideoFrameMetadata::GetSimulcastIdx() const { + return simulcast_idx_; +} + +void VideoFrameMetadata::SetSimulcastIdx(uint8_t simulcast_idx) { + simulcast_idx_ = simulcast_idx; +} + +VideoCodecType VideoFrameMetadata::GetCodec() const { + return codec_; +} + +void VideoFrameMetadata::SetCodec(VideoCodecType codec) { + codec_ = codec; } } // namespace webrtc diff --git a/third_party/libwebrtc/api/video/video_frame_metadata.h b/third_party/libwebrtc/api/video/video_frame_metadata.h index 2e9309841b00..6e3f32fdbf9a 100644 --- a/third_party/libwebrtc/api/video/video_frame_metadata.h +++ b/third_party/libwebrtc/api/video/video_frame_metadata.h @@ -17,42 +17,80 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { -struct RTPVideoHeader; - // A subset of metadata from the RTP video header, exposed in insertable streams // API. -class VideoFrameMetadata { +class RTC_EXPORT VideoFrameMetadata { public: - explicit VideoFrameMetadata(const RTPVideoHeader& header); + VideoFrameMetadata(); VideoFrameMetadata(const VideoFrameMetadata&) = default; VideoFrameMetadata& operator=(const VideoFrameMetadata&) = default; - uint16_t GetWidth() const { return width_; } - uint16_t GetHeight() const { return height_; } - absl::optional GetFrameId() const { return frame_id_; } - int GetSpatialIndex() const { return spatial_index_; } - int GetTemporalIndex() const { return temporal_index_; } + VideoFrameType GetFrameType() const; + void SetFrameType(VideoFrameType frame_type); - rtc::ArrayView GetFrameDependencies() const { - return frame_dependencies_; - } + uint16_t GetWidth() const; + void SetWidth(uint16_t width); + + uint16_t GetHeight() const; + void SetHeight(uint16_t height); + + VideoRotation GetRotation() const; + void SetRotation(VideoRotation rotation); + + VideoContentType GetContentType() const; + void SetContentType(VideoContentType content_type); + + absl::optional GetFrameId() const; + void SetFrameId(absl::optional frame_id); + + int GetSpatialIndex() const; + void SetSpatialIndex(int spatial_index); + + int GetTemporalIndex() const; + void SetTemporalIndex(int temporal_index); + + rtc::ArrayView GetFrameDependencies() const; + void SetFrameDependencies(rtc::ArrayView frame_dependencies); rtc::ArrayView GetDecodeTargetIndications() - const { - return decode_target_indications_; - } + const; + void SetDecodeTargetIndications( + rtc::ArrayView decode_target_indications); + + bool GetIsLastFrameInPicture() const; + void SetIsLastFrameInPicture(bool is_last_frame_in_picture); + + uint8_t GetSimulcastIdx() const; + void SetSimulcastIdx(uint8_t simulcast_idx); + + VideoCodecType GetCodec() const; + void SetCodec(VideoCodecType codec); private: - int16_t width_; - int16_t height_; + VideoFrameType frame_type_ = VideoFrameType::kEmptyFrame; + int16_t width_ = 0; + int16_t height_ = 0; + VideoRotation rotation_ = VideoRotation::kVideoRotation_0; + VideoContentType content_type_ = VideoContentType::UNSPECIFIED; + + // Corresponding to GenericDescriptorInfo. absl::optional frame_id_; int spatial_index_ = 0; int temporal_index_ = 0; absl::InlinedVector frame_dependencies_; absl::InlinedVector decode_target_indications_; + + bool is_last_frame_in_picture_ = true; + uint8_t simulcast_idx_ = 0; + VideoCodecType codec_ = VideoCodecType::kVideoCodecGeneric; }; } // namespace webrtc diff --git a/third_party/libwebrtc/api/video/video_frame_metadata_unittest.cc b/third_party/libwebrtc/api/video/video_frame_metadata_unittest.cc deleted file mode 100644 index 7a808e1ea9bc..000000000000 --- a/third_party/libwebrtc/api/video/video_frame_metadata_unittest.cc +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/video/video_frame_metadata.h" - -#include "modules/rtp_rtcp/source/rtp_video_header.h" -#include "test/gmock.h" -#include "test/gtest.h" - -namespace webrtc { -namespace { - -using ::testing::ElementsAre; -using ::testing::IsEmpty; - -TEST(VideoFrameMetadata, GetWidthReturnsCorrectValue) { - RTPVideoHeader video_header; - video_header.width = 1280u; - VideoFrameMetadata metadata(video_header); - EXPECT_EQ(metadata.GetWidth(), video_header.width); -} - -TEST(VideoFrameMetadata, GetHeightReturnsCorrectValue) { - RTPVideoHeader video_header; - video_header.height = 720u; - VideoFrameMetadata metadata(video_header); - EXPECT_EQ(metadata.GetHeight(), video_header.height); -} - -TEST(VideoFrameMetadata, GetFrameIdReturnsCorrectValue) { - RTPVideoHeader video_header; - RTPVideoHeader::GenericDescriptorInfo& generic = - video_header.generic.emplace(); - generic.frame_id = 10; - VideoFrameMetadata metadata(video_header); - EXPECT_EQ(metadata.GetFrameId().value(), 10); -} - -TEST(VideoFrameMetadata, HasNoFrameIdForHeaderWithoutGeneric) { - RTPVideoHeader video_header; - VideoFrameMetadata metadata(video_header); - ASSERT_FALSE(video_header.generic); - EXPECT_EQ(metadata.GetFrameId(), absl::nullopt); -} - -TEST(VideoFrameMetadata, GetSpatialIndexReturnsCorrectValue) { - RTPVideoHeader video_header; - RTPVideoHeader::GenericDescriptorInfo& generic = - video_header.generic.emplace(); - generic.spatial_index = 2; - VideoFrameMetadata metadata(video_header); - EXPECT_EQ(metadata.GetSpatialIndex(), 2); -} - -TEST(VideoFrameMetadata, SpatialIndexIsZeroForHeaderWithoutGeneric) { - RTPVideoHeader video_header; - VideoFrameMetadata metadata(video_header); - ASSERT_FALSE(video_header.generic); - EXPECT_EQ(metadata.GetSpatialIndex(), 0); -} - -TEST(VideoFrameMetadata, GetTemporalIndexReturnsCorrectValue) { - RTPVideoHeader video_header; - RTPVideoHeader::GenericDescriptorInfo& generic = - video_header.generic.emplace(); - generic.temporal_index = 3; - VideoFrameMetadata metadata(video_header); - EXPECT_EQ(metadata.GetTemporalIndex(), 3); -} - -TEST(VideoFrameMetadata, TemporalIndexIsZeroForHeaderWithoutGeneric) { - RTPVideoHeader video_header; - VideoFrameMetadata metadata(video_header); - ASSERT_FALSE(video_header.generic); - EXPECT_EQ(metadata.GetTemporalIndex(), 0); -} - -TEST(VideoFrameMetadata, GetFrameDependenciesReturnsCorrectValue) { - RTPVideoHeader video_header; - RTPVideoHeader::GenericDescriptorInfo& generic = - video_header.generic.emplace(); - generic.dependencies = {5, 6, 7}; - VideoFrameMetadata metadata(video_header); - EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5, 6, 7)); -} - -TEST(VideoFrameMetadata, FrameDependencyVectorIsEmptyForHeaderWithoutGeneric) { - RTPVideoHeader video_header; - VideoFrameMetadata metadata(video_header); - ASSERT_FALSE(video_header.generic); - EXPECT_THAT(metadata.GetFrameDependencies(), IsEmpty()); -} - -TEST(VideoFrameMetadata, GetDecodeTargetIndicationsReturnsCorrectValue) { - RTPVideoHeader video_header; - RTPVideoHeader::GenericDescriptorInfo& generic = - video_header.generic.emplace(); - generic.decode_target_indications = {DecodeTargetIndication::kSwitch}; - VideoFrameMetadata metadata(video_header); - EXPECT_THAT(metadata.GetDecodeTargetIndications(), - ElementsAre(DecodeTargetIndication::kSwitch)); -} - -TEST(VideoFrameMetadata, - DecodeTargetIndicationsVectorIsEmptyForHeaderWithoutGeneric) { - RTPVideoHeader video_header; - VideoFrameMetadata metadata(video_header); - ASSERT_FALSE(video_header.generic); - EXPECT_THAT(metadata.GetDecodeTargetIndications(), IsEmpty()); -} - -} // namespace -} // namespace webrtc diff --git a/third_party/libwebrtc/api/video_codecs/video_encoder.h b/third_party/libwebrtc/api/video_codecs/video_encoder.h index 30ec58e807c3..395a87e0893a 100644 --- a/third_party/libwebrtc/api/video_codecs/video_encoder.h +++ b/third_party/libwebrtc/api/video_codecs/video_encoder.h @@ -174,7 +174,7 @@ class RTC_EXPORT VideoEncoder { // For example: With I420, this value would be a multiple of 2. // Note that this field is unrelated to any horizontal or vertical stride // requirements the encoder has on the incoming video frame buffers. - int requested_resolution_alignment; + uint32_t requested_resolution_alignment; // Same as above but if true, each simulcast layer should also be divisible // by `requested_resolution_alignment`. diff --git a/third_party/libwebrtc/audio/BUILD.gn b/third_party/libwebrtc/audio/BUILD.gn index 91d66d4f8bf2..d2ba68459d63 100644 --- a/third_party/libwebrtc/audio/BUILD.gn +++ b/third_party/libwebrtc/audio/BUILD.gn @@ -70,6 +70,7 @@ rtc_library("audio") { "../common_audio:common_audio_c", "../logging:rtc_event_audio", "../logging:rtc_stream_config", + "../media:rtc_media_base", "../modules/async_audio_processing", "../modules/audio_coding", "../modules/audio_coding:audio_coding_module_typedefs", @@ -290,7 +291,7 @@ if (rtc_include_tests) { data += [ "${root_out_dir}/low_bandwidth_audio_test" ] } - if (is_linux || is_chromeos || is_android) { + if (is_linux || is_chromeos || is_android || is_fuchsia) { data += [ "../tools_webrtc/audio_quality/linux/PolqaOem64", "../tools_webrtc/audio_quality/linux/pesq", diff --git a/third_party/libwebrtc/audio/DEPS b/third_party/libwebrtc/audio/DEPS index 9b89dc39abdd..7a0c7e7ce62e 100644 --- a/third_party/libwebrtc/audio/DEPS +++ b/third_party/libwebrtc/audio/DEPS @@ -2,6 +2,7 @@ include_rules = [ "+call", "+common_audio", "+logging/rtc_event_log", + "+media/base", "+modules/async_audio_processing", "+modules/audio_coding", "+modules/audio_device", diff --git a/third_party/libwebrtc/audio/audio_send_stream.cc b/third_party/libwebrtc/audio/audio_send_stream.cc index 4de7a4819b7c..20af3f772269 100644 --- a/third_party/libwebrtc/audio/audio_send_stream.cc +++ b/third_party/libwebrtc/audio/audio_send_stream.cc @@ -31,6 +31,7 @@ #include "common_audio/vad/include/vad.h" #include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" #include "logging/rtc_event_log/rtc_stream_config.h" +#include "media/base/media_channel.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" #include "modules/audio_coding/codecs/red/audio_encoder_copy_red.h" #include "modules/audio_processing/include/audio_processing.h" @@ -151,8 +152,6 @@ AudioSendStream::AudioSendStream( field_trials_.IsEnabled("WebRTC-Audio-ABWENoTWCC")), enable_audio_alr_probing_( !field_trials_.IsDisabled("WebRTC-Audio-AlrProbing")), - send_side_bwe_with_overhead_( - !field_trials_.IsDisabled("WebRTC-SendSideBwe-WithOverhead")), allocation_settings_(field_trials_), config_(Config(/*send_transport=*/nullptr)), audio_state_(audio_state), @@ -174,7 +173,7 @@ AudioSendStream::AudioSendStream( RTC_DCHECK(rtp_rtcp_module_); RTC_DCHECK_RUN_ON(&worker_thread_checker_); - ConfigureStream(config, true); + ConfigureStream(config, true, nullptr); UpdateCachedTargetAudioBitrateConstraints(); } @@ -195,9 +194,10 @@ const webrtc::AudioSendStream::Config& AudioSendStream::GetConfig() const { } void AudioSendStream::Reconfigure( - const webrtc::AudioSendStream::Config& new_config) { + const webrtc::AudioSendStream::Config& new_config, + SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - ConfigureStream(new_config, false); + ConfigureStream(new_config, false, std::move(callback)); } AudioSendStream::ExtensionIds AudioSendStream::FindExtensionIds( @@ -229,7 +229,8 @@ int AudioSendStream::TransportSeqNumId(const AudioSendStream::Config& config) { void AudioSendStream::ConfigureStream( const webrtc::AudioSendStream::Config& new_config, - bool first_time) { + bool first_time, + SetParametersCallback callback) { RTC_LOG(LS_INFO) << "AudioSendStream::ConfigureStream: " << new_config.ToString(); UpdateEventLogStreamConfig(event_log_, new_config, @@ -327,6 +328,10 @@ void AudioSendStream::ConfigureStream( if (!ReconfigureSendCodec(new_config)) { RTC_LOG(LS_ERROR) << "Failed to set up send codec state."; + + webrtc::InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR, + "Failed to set up send codec state.")); } // Set currently known overhead (used in ANA, opus only). @@ -352,6 +357,8 @@ void AudioSendStream::ConfigureStream( if (!first_time) { UpdateCachedTargetAudioBitrateConstraints(); } + + webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); } void AudioSendStream::Start() { @@ -363,8 +370,7 @@ void AudioSendStream::Start() { config_.max_bitrate_bps != -1 && (allocate_audio_without_feedback_ || TransportSeqNumId(config_) != 0)) { rtp_transport_->AccountForAudioPacketsInPacedSender(true); - if (send_side_bwe_with_overhead_) - rtp_transport_->IncludeOverheadInPacedSender(); + rtp_transport_->IncludeOverheadInPacedSender(); rtp_rtcp_module_->SetAsPartOfAllocation(true); ConfigureBitrateObserver(); } else { @@ -803,8 +809,7 @@ void AudioSendStream::ReconfigureBitrateObserver( if (!new_config.has_dscp && new_config.min_bitrate_bps != -1 && new_config.max_bitrate_bps != -1 && TransportSeqNumId(new_config) != 0) { rtp_transport_->AccountForAudioPacketsInPacedSender(true); - if (send_side_bwe_with_overhead_) - rtp_transport_->IncludeOverheadInPacedSender(); + rtp_transport_->IncludeOverheadInPacedSender(); // We may get a callback immediately as the observer is registered, so // make sure the bitrate limits in config_ are up-to-date. config_.min_bitrate_bps = new_config.min_bitrate_bps; @@ -827,22 +832,21 @@ void AudioSendStream::ConfigureBitrateObserver() { RTC_DCHECK(constraints.has_value()); DataRate priority_bitrate = allocation_settings_.priority_bitrate; - if (send_side_bwe_with_overhead_) { - if (use_legacy_overhead_calculation_) { - // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12) - constexpr int kOverheadPerPacket = 20 + 8 + 10 + 12; - const TimeDelta kMinPacketDuration = TimeDelta::Millis(20); - DataRate max_overhead = - DataSize::Bytes(kOverheadPerPacket) / kMinPacketDuration; - priority_bitrate += max_overhead; - } else { - RTC_DCHECK(frame_length_range_); - const DataSize overhead_per_packet = - DataSize::Bytes(total_packet_overhead_bytes_); - DataRate min_overhead = overhead_per_packet / frame_length_range_->second; - priority_bitrate += min_overhead; - } + if (use_legacy_overhead_calculation_) { + // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12) + constexpr int kOverheadPerPacket = 20 + 8 + 10 + 12; + const TimeDelta kMinPacketDuration = TimeDelta::Millis(20); + DataRate max_overhead = + DataSize::Bytes(kOverheadPerPacket) / kMinPacketDuration; + priority_bitrate += max_overhead; + } else { + RTC_DCHECK(frame_length_range_); + const DataSize overhead_per_packet = + DataSize::Bytes(total_packet_overhead_bytes_); + DataRate min_overhead = overhead_per_packet / frame_length_range_->second; + priority_bitrate += min_overhead; } + if (allocation_settings_.priority_bitrate_raw) priority_bitrate = *allocation_settings_.priority_bitrate_raw; @@ -895,25 +899,23 @@ AudioSendStream::GetMinMaxBitrateConstraints() const { << "TargetAudioBitrateConstraints::min"; return absl::nullopt; } - if (send_side_bwe_with_overhead_) { - if (use_legacy_overhead_calculation_) { - // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12) - const DataSize kOverheadPerPacket = DataSize::Bytes(20 + 8 + 10 + 12); - const TimeDelta kMaxFrameLength = - TimeDelta::Millis(60); // Based on Opus spec - const DataRate kMinOverhead = kOverheadPerPacket / kMaxFrameLength; - constraints.min += kMinOverhead; - constraints.max += kMinOverhead; - } else { - if (!frame_length_range_.has_value()) { - RTC_LOG(LS_WARNING) << "frame_length_range_ is not set"; - return absl::nullopt; - } - const DataSize kOverheadPerPacket = - DataSize::Bytes(total_packet_overhead_bytes_); - constraints.min += kOverheadPerPacket / frame_length_range_->second; - constraints.max += kOverheadPerPacket / frame_length_range_->first; + if (use_legacy_overhead_calculation_) { + // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12) + const DataSize kOverheadPerPacket = DataSize::Bytes(20 + 8 + 10 + 12); + const TimeDelta kMaxFrameLength = + TimeDelta::Millis(60); // Based on Opus spec + const DataRate kMinOverhead = kOverheadPerPacket / kMaxFrameLength; + constraints.min += kMinOverhead; + constraints.max += kMinOverhead; + } else { + if (!frame_length_range_.has_value()) { + RTC_LOG(LS_WARNING) << "frame_length_range_ is not set"; + return absl::nullopt; } + const DataSize kOverheadPerPacket = + DataSize::Bytes(total_packet_overhead_bytes_); + constraints.min += kOverheadPerPacket / frame_length_range_->second; + constraints.max += kOverheadPerPacket / frame_length_range_->first; } return constraints; } diff --git a/third_party/libwebrtc/audio/audio_send_stream.h b/third_party/libwebrtc/audio/audio_send_stream.h index 4962ccd7a3ac..42be43afb930 100644 --- a/third_party/libwebrtc/audio/audio_send_stream.h +++ b/third_party/libwebrtc/audio/audio_send_stream.h @@ -88,7 +88,8 @@ class AudioSendStream final : public webrtc::AudioSendStream, // webrtc::AudioSendStream implementation. const webrtc::AudioSendStream::Config& GetConfig() const override; - void Reconfigure(const webrtc::AudioSendStream::Config& config) override; + void Reconfigure(const webrtc::AudioSendStream::Config& config, + SetParametersCallback callback) override; void Start() override; void Stop() override; void SendAudioData(std::unique_ptr audio_frame) override; @@ -129,7 +130,9 @@ class AudioSendStream final : public webrtc::AudioSendStream, void StoreEncoderProperties(int sample_rate_hz, size_t num_channels) RTC_RUN_ON(worker_thread_checker_); - void ConfigureStream(const Config& new_config, bool first_time) + void ConfigureStream(const Config& new_config, + bool first_time, + SetParametersCallback callback) RTC_RUN_ON(worker_thread_checker_); bool SetupSendCodec(const Config& new_config) RTC_RUN_ON(worker_thread_checker_); @@ -175,7 +178,6 @@ class AudioSendStream final : public webrtc::AudioSendStream, const bool allocate_audio_without_feedback_; const bool force_no_audio_feedback_ = allocate_audio_without_feedback_; const bool enable_audio_alr_probing_; - const bool send_side_bwe_with_overhead_; const AudioAllocationConfig allocation_settings_; webrtc::AudioSendStream::Config config_ diff --git a/third_party/libwebrtc/audio/audio_send_stream_unittest.cc b/third_party/libwebrtc/audio/audio_send_stream_unittest.cc index cbf24b5e72fb..a81b40cbe7d3 100644 --- a/third_party/libwebrtc/audio/audio_send_stream_unittest.cc +++ b/third_party/libwebrtc/audio/audio_send_stream_unittest.cc @@ -550,7 +550,7 @@ TEST(AudioSendStreamTest, SendCodecAppliesAudioNetworkAdaptor) { auto stream_config = helper.config(); stream_config.audio_network_adaptor_config = kAnaReconfigString; - send_stream->Reconfigure(stream_config); + send_stream->Reconfigure(stream_config, nullptr); } } @@ -590,7 +590,7 @@ TEST(AudioSendStreamTest, AudioNetworkAdaptorReceivesOverhead) { auto stream_config = helper.config(); stream_config.audio_network_adaptor_config = kAnaConfigString; - send_stream->Reconfigure(stream_config); + send_stream->Reconfigure(stream_config, nullptr); } } @@ -791,7 +791,7 @@ TEST(AudioSendStreamTest, DontRecreateEncoder) { AudioSendStream::Config::SendCodecSpec(9, kG722Format); helper.config().send_codec_spec->cng_payload_type = 105; auto send_stream = helper.CreateAudioSendStream(); - send_stream->Reconfigure(helper.config()); + send_stream->Reconfigure(helper.config(), nullptr); } } @@ -816,7 +816,7 @@ TEST(AudioSendStreamTest, ReconfigureTransportCcResetsFirst) { .Times(1); } - send_stream->Reconfigure(new_config); + send_stream->Reconfigure(new_config, nullptr); } } @@ -928,11 +928,11 @@ TEST(AudioSendStreamTest, ReconfigureWithFrameEncryptor) { new_config.frame_encryptor = mock_frame_encryptor_0; EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr))) .Times(1); - send_stream->Reconfigure(new_config); + send_stream->Reconfigure(new_config, nullptr); // Not updating the frame encryptor shouldn't force it to reconfigure. EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(_)).Times(0); - send_stream->Reconfigure(new_config); + send_stream->Reconfigure(new_config, nullptr); // Updating frame encryptor to a new object should force a call to the // proxy. @@ -942,7 +942,7 @@ TEST(AudioSendStreamTest, ReconfigureWithFrameEncryptor) { new_config.crypto_options.sframe.require_frame_encryption = true; EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr))) .Times(1); - send_stream->Reconfigure(new_config); + send_stream->Reconfigure(new_config, nullptr); } } } // namespace test diff --git a/third_party/libwebrtc/audio/channel_send.cc b/third_party/libwebrtc/audio/channel_send.cc index 3dde54513cdd..8cda62b0fa62 100644 --- a/third_party/libwebrtc/audio/channel_send.cc +++ b/third_party/libwebrtc/audio/channel_send.cc @@ -289,6 +289,11 @@ class RtpPacketSenderProxy : public RtpPacketSender { rtp_packet_pacer_->EnqueuePackets(std::move(packets)); } + void RemovePacketsForSsrc(uint32_t ssrc) override { + MutexLock lock(&mutex_); + rtp_packet_pacer_->RemovePacketsForSsrc(ssrc); + } + private: SequenceChecker thread_checker_; Mutex mutex_; @@ -595,6 +600,7 @@ void ChannelSend::StopSend() { RTC_DCHECK(packet_router_); packet_router_->RemoveSendRtpModule(rtp_rtcp_.get()); + rtp_packet_pacer_proxy_->RemovePacketsForSsrc(rtp_rtcp_->SSRC()); } void ChannelSend::SetEncoder(int payload_type, diff --git a/third_party/libwebrtc/build_overrides/partition_alloc.gni b/third_party/libwebrtc/build_overrides/partition_alloc.gni index 417392851513..044036879a0b 100644 --- a/third_party/libwebrtc/build_overrides/partition_alloc.gni +++ b/third_party/libwebrtc/build_overrides/partition_alloc.gni @@ -8,10 +8,5 @@ # Use default values for PartitionAlloc as standalone library from # base/allocator/partition_allocator/build_overrides/partition_alloc.gni -use_partition_alloc_as_malloc_default = false -use_allocator_shim_default = false -enable_backup_ref_ptr_support_default = false -enable_mte_checked_ptr_support_default = false -put_ref_count_in_previous_slot_default = false -enable_backup_ref_ptr_slow_checks_default = false -enable_dangling_raw_ptr_checks_default = false +import( + "//base/allocator/partition_allocator/build_overrides/partition_alloc.gni") diff --git a/third_party/libwebrtc/call/BUILD.gn b/third_party/libwebrtc/call/BUILD.gn index d26d58951ebf..0c22f5d6d70e 100644 --- a/third_party/libwebrtc/call/BUILD.gn +++ b/third_party/libwebrtc/call/BUILD.gn @@ -52,6 +52,7 @@ rtc_library("call_interfaces") { "../api:rtc_error", "../api:rtp_headers", "../api:rtp_parameters", + "../api:rtp_sender_setparameters_callback", "../api:scoped_refptr", "../api:transport_api", "../api/adaptation:resource_adaptation_api", @@ -80,6 +81,7 @@ rtc_library("call_interfaces") { "../rtc_base/network:sent_packet", ] absl_deps = [ + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/functional:bind_front", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", @@ -387,6 +389,7 @@ rtc_library("video_stream_api") { "../api:frame_transformer_interface", "../api:rtp_headers", "../api:rtp_parameters", + "../api:rtp_sender_setparameters_callback", "../api:scoped_refptr", "../api:transport_api", "../api/adaptation:resource_adaptation_api", @@ -396,6 +399,7 @@ rtc_library("video_stream_api") { "../api/video:video_frame", "../api/video:video_rtp_headers", "../api/video:video_stream_encoder", + "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../common_video", "../common_video:frame_counts", @@ -404,7 +408,10 @@ rtc_library("video_stream_api") { "../rtc_base:stringutils", "../video/config:encoder_config", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + absl_deps = [ + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/types:optional", + ] } rtc_library("simulated_network") { diff --git a/third_party/libwebrtc/call/audio_send_stream.h b/third_party/libwebrtc/call/audio_send_stream.h index 07ad53b2fda9..187ec65ed8d3 100644 --- a/third_party/libwebrtc/call/audio_send_stream.h +++ b/third_party/libwebrtc/call/audio_send_stream.h @@ -25,6 +25,7 @@ #include "api/crypto/frame_encryptor_interface.h" #include "api/frame_transformer_interface.h" #include "api/rtp_parameters.h" +#include "api/rtp_sender_setparameters_callback.h" #include "api/scoped_refptr.h" #include "call/audio_sender.h" #include "call/rtp_config.h" @@ -175,7 +176,8 @@ class AudioSendStream : public AudioSender { virtual const webrtc::AudioSendStream::Config& GetConfig() const = 0; // Reconfigure the stream according to the Configuration. - virtual void Reconfigure(const Config& config) = 0; + virtual void Reconfigure(const Config& config, + SetParametersCallback callback) = 0; // Starts stream activity. // When a stream is active, it can receive, process and deliver packets. diff --git a/third_party/libwebrtc/call/call_config.cc b/third_party/libwebrtc/call/call_config.cc index 23b60ce436a2..93f6b1aec440 100644 --- a/third_party/libwebrtc/call/call_config.cc +++ b/third_party/libwebrtc/call/call_config.cc @@ -31,6 +31,7 @@ RtpTransportConfig CallConfig::ExtractTransportConfig() const { network_state_predictor_factory; transportConfig.task_queue_factory = task_queue_factory; transportConfig.trials = trials; + transportConfig.pacer_burst_interval = pacer_burst_interval; return transportConfig; } diff --git a/third_party/libwebrtc/call/call_config.h b/third_party/libwebrtc/call/call_config.h index 3072fa452f01..6df4ab7ed4a9 100644 --- a/third_party/libwebrtc/call/call_config.h +++ b/third_party/libwebrtc/call/call_config.h @@ -78,6 +78,9 @@ struct CallConfig { rtp_transport_controller_send_factory = nullptr; Metronome* metronome = nullptr; + + // The burst interval of the pacer, see TaskQueuePacedSender constructor. + absl::optional pacer_burst_interval; }; } // namespace webrtc diff --git a/third_party/libwebrtc/call/call_perf_tests.cc b/third_party/libwebrtc/call/call_perf_tests.cc index 9379dce83307..d59b70418f76 100644 --- a/third_party/libwebrtc/call/call_perf_tests.cc +++ b/third_party/libwebrtc/call/call_perf_tests.cc @@ -267,8 +267,11 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, AudioSendStream::Config audio_send_config(audio_send_transport.get()); audio_send_config.rtp.ssrc = kAudioSendSsrc; + // TODO(bugs.webrtc.org/14683): Let the tests fail with invalid config. audio_send_config.send_codec_spec = AudioSendStream::Config::SendCodecSpec( - kAudioSendPayloadType, {"ISAC", 16000, 1}); + kAudioSendPayloadType, {"OPUS", 48000, 2}); + audio_send_config.min_bitrate_bps = 6000; + audio_send_config.max_bitrate_bps = 510000; audio_send_config.encoder_factory = CreateBuiltinAudioEncoderFactory(); audio_send_stream = sender_call_->CreateAudioSendStream(audio_send_config); @@ -290,7 +293,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, audio_recv_config.sync_group = kSyncGroup; audio_recv_config.decoder_factory = audio_decoder_factory_; audio_recv_config.decoder_map = { - {kAudioSendPayloadType, {"ISAC", 16000, 1}}}; + {kAudioSendPayloadType, {"OPUS", 48000, 2}}}; if (create_first == CreateOrder::kAudioFirst) { audio_receive_stream = @@ -775,13 +778,9 @@ TEST_F(CallPerfTest, Bitrate_Kbps_NoPadWithoutMinTransmitBitrate) { #endif TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { static const uint32_t kInitialBitrateKbps = 400; + static const uint32_t kInitialBitrateOverheadKpbs = 6; static const uint32_t kReconfigureThresholdKbps = 600; - // We get lower bitrate than expected by this test if the following field - // trial is enabled. - test::ScopedKeyValueConfig field_trials( - field_trials_, "WebRTC-SendSideBwe-WithOverhead/Disabled/"); - class VideoStreamFactory : public VideoEncoderConfig::VideoStreamFactoryInterface { public: @@ -821,9 +820,10 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { // First time initialization. Frame size is known. // `expected_bitrate` is affected by bandwidth estimation before the // first frame arrives to the encoder. - uint32_t expected_bitrate = last_set_bitrate_kbps_ > 0 - ? last_set_bitrate_kbps_ - : kInitialBitrateKbps; + uint32_t expected_bitrate = + last_set_bitrate_kbps_ > 0 + ? last_set_bitrate_kbps_ + : kInitialBitrateKbps - kInitialBitrateOverheadKpbs; EXPECT_EQ(expected_bitrate, config->startBitrate) << "Encoder not initialized at expected bitrate."; EXPECT_EQ(kDefaultWidth, config->width); diff --git a/third_party/libwebrtc/call/rtp_transport_config.h b/third_party/libwebrtc/call/rtp_transport_config.h index f2030b367275..6c94f7d91111 100644 --- a/third_party/libwebrtc/call/rtp_transport_config.h +++ b/third_party/libwebrtc/call/rtp_transport_config.h @@ -44,6 +44,9 @@ struct RtpTransportConfig { // Key-value mapping of internal configurations to apply, // e.g. field trials. const FieldTrialsView* trials = nullptr; + + // The burst interval of the pacer, see TaskQueuePacedSender constructor. + absl::optional pacer_burst_interval; }; } // namespace webrtc diff --git a/third_party/libwebrtc/call/rtp_transport_controller_send.cc b/third_party/libwebrtc/call/rtp_transport_controller_send.cc index 3ecec98b800b..940dff7894ea 100644 --- a/third_party/libwebrtc/call/rtp_transport_controller_send.cc +++ b/third_party/libwebrtc/call/rtp_transport_controller_send.cc @@ -65,10 +65,6 @@ bool IsEnabled(const FieldTrialsView& trials, absl::string_view key) { return absl::StartsWith(trials.Lookup(key), "Enabled"); } -bool IsDisabled(const FieldTrialsView& trials, absl::string_view key) { - return absl::StartsWith(trials.Lookup(key), "Disabled"); -} - bool IsRelayed(const rtc::NetworkRoute& route) { return route.local.uses_turn() || route.remote.uses_turn(); } @@ -84,53 +80,53 @@ RtpTransportControllerSend::PacerSettings::PacerSettings( RtpTransportControllerSend::RtpTransportControllerSend( Clock* clock, - webrtc::RtcEventLog* event_log, - NetworkStatePredictorFactoryInterface* predictor_factory, - NetworkControllerFactoryInterface* controller_factory, - const BitrateConstraints& bitrate_config, - TaskQueueFactory* task_queue_factory, - const FieldTrialsView& trials) + const RtpTransportConfig& config) : clock_(clock), - event_log_(event_log), - task_queue_factory_(task_queue_factory), - bitrate_configurator_(bitrate_config), + event_log_(config.event_log), + task_queue_factory_(config.task_queue_factory), + bitrate_configurator_(config.bitrate_config), pacer_started_(false), - pacer_settings_(trials), + pacer_settings_(*config.trials), pacer_(clock, &packet_router_, - trials, - task_queue_factory, + *config.trials, + config.task_queue_factory, pacer_settings_.holdback_window.Get(), - pacer_settings_.holdback_packets.Get()), + pacer_settings_.holdback_packets.Get(), + config.pacer_burst_interval), observer_(nullptr), - controller_factory_override_(controller_factory), + controller_factory_override_(config.network_controller_factory), controller_factory_fallback_( - std::make_unique(predictor_factory)), + std::make_unique( + config.network_state_predictor_factory)), process_interval_(controller_factory_fallback_->GetProcessInterval()), last_report_block_time_(Timestamp::Millis(clock_->TimeInMilliseconds())), reset_feedback_on_route_change_( - !IsEnabled(trials, "WebRTC-Bwe-NoFeedbackReset")), - send_side_bwe_with_overhead_( - !IsDisabled(trials, "WebRTC-SendSideBwe-WithOverhead")), + !IsEnabled(*config.trials, "WebRTC-Bwe-NoFeedbackReset")), add_pacing_to_cwin_( - IsEnabled(trials, "WebRTC-AddPacingToCongestionWindowPushback")), + IsEnabled(*config.trials, + "WebRTC-AddPacingToCongestionWindowPushback")), relay_bandwidth_cap_("relay_cap", DataRate::PlusInfinity()), transport_overhead_bytes_per_packet_(0), network_available_(false), congestion_window_size_(DataSize::PlusInfinity()), is_congested_(false), retransmission_rate_limiter_(clock, kRetransmitWindowSizeMs), - task_queue_(trials, "rtp_send_controller", task_queue_factory), - field_trials_(trials) { + task_queue_(*config.trials, + "rtp_send_controller", + config.task_queue_factory), + field_trials_(*config.trials) { ParseFieldTrial({&relay_bandwidth_cap_}, - trials.Lookup("WebRTC-Bwe-NetworkRouteConstraints")); - initial_config_.constraints = ConvertConstraints(bitrate_config, clock_); - initial_config_.event_log = event_log; - initial_config_.key_value_config = &trials; - RTC_DCHECK(bitrate_config.start_bitrate_bps > 0); + config.trials->Lookup("WebRTC-Bwe-NetworkRouteConstraints")); + initial_config_.constraints = + ConvertConstraints(config.bitrate_config, clock_); + initial_config_.event_log = config.event_log; + initial_config_.key_value_config = config.trials; + RTC_DCHECK(config.bitrate_config.start_bitrate_bps > 0); - pacer_.SetPacingRates(DataRate::BitsPerSec(bitrate_config.start_bitrate_bps), - DataRate::Zero()); + pacer_.SetPacingRates( + DataRate::BitsPerSec(config.bitrate_config.start_bitrate_bps), + DataRate::Zero()); } RtpTransportControllerSend::~RtpTransportControllerSend() { @@ -552,9 +548,7 @@ void RtpTransportControllerSend::OnAddPacket( RTC_DCHECK_RUN_ON(&task_queue_); feedback_demuxer_.AddPacket(packet_info); transport_feedback_adapter_.AddPacket( - packet_info, - send_side_bwe_with_overhead_ ? transport_overhead_bytes_per_packet_ : 0, - creation_time); + packet_info, transport_overhead_bytes_per_packet_, creation_time); }); } diff --git a/third_party/libwebrtc/call/rtp_transport_controller_send.h b/third_party/libwebrtc/call/rtp_transport_controller_send.h index 88f5b2bae496..51bda7344503 100644 --- a/third_party/libwebrtc/call/rtp_transport_controller_send.h +++ b/third_party/libwebrtc/call/rtp_transport_controller_send.h @@ -25,6 +25,7 @@ #include "api/transport/network_control.h" #include "api/units/data_rate.h" #include "call/rtp_bitrate_configurator.h" +#include "call/rtp_transport_config.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_video_sender.h" #include "modules/congestion_controller/rtp/control_handler.h" @@ -50,14 +51,7 @@ class RtpTransportControllerSend final public TransportFeedbackObserver, public NetworkStateEstimateObserver { public: - RtpTransportControllerSend( - Clock* clock, - RtcEventLog* event_log, - NetworkStatePredictorFactoryInterface* predictor_factory, - NetworkControllerFactoryInterface* controller_factory, - const BitrateConstraints& bitrate_config, - TaskQueueFactory* task_queue_factory, - const FieldTrialsView& trials); + RtpTransportControllerSend(Clock* clock, const RtpTransportConfig& config); ~RtpTransportControllerSend() override; RtpTransportControllerSend(const RtpTransportControllerSend&) = delete; @@ -195,7 +189,6 @@ class RtpTransportControllerSend final StreamsConfig streams_config_ RTC_GUARDED_BY(task_queue_); const bool reset_feedback_on_route_change_; - const bool send_side_bwe_with_overhead_; const bool add_pacing_to_cwin_; FieldTrialParameter relay_bandwidth_cap_; diff --git a/third_party/libwebrtc/call/rtp_transport_controller_send_factory.h b/third_party/libwebrtc/call/rtp_transport_controller_send_factory.h index 8cdae8cfbe65..6349302e45e4 100644 --- a/third_party/libwebrtc/call/rtp_transport_controller_send_factory.h +++ b/third_party/libwebrtc/call/rtp_transport_controller_send_factory.h @@ -25,10 +25,7 @@ class RtpTransportControllerSendFactory const RtpTransportConfig& config, Clock* clock) override { RTC_CHECK(config.trials); - return std::make_unique( - clock, config.event_log, config.network_state_predictor_factory, - config.network_controller_factory, config.bitrate_config, - config.task_queue_factory, *config.trials); + return std::make_unique(clock, config); } virtual ~RtpTransportControllerSendFactory() {} diff --git a/third_party/libwebrtc/call/rtp_video_sender.cc b/third_party/libwebrtc/call/rtp_video_sender.cc index 5d2d1f128823..de19b97c66f3 100644 --- a/third_party/libwebrtc/call/rtp_video_sender.cc +++ b/third_party/libwebrtc/call/rtp_video_sender.cc @@ -375,9 +375,6 @@ RtpVideoSender::RtpVideoSender( const FieldTrialsView& field_trials, TaskQueueFactory* task_queue_factory) : field_trials_(field_trials), - send_side_bwe_with_overhead_(!absl::StartsWith( - field_trials_.Lookup("WebRTC-SendSideBwe-WithOverhead"), - "Disabled")), use_frame_rate_for_overhead_(absl::StartsWith( field_trials_.Lookup("WebRTC-Video-UseFrameRateForOverhead"), "Enabled")), @@ -409,7 +406,7 @@ RtpVideoSender::RtpVideoSender( frame_count_observer_(observers.frame_count_observer) { transport_checker_.Detach(); RTC_DCHECK_EQ(rtp_config_.ssrcs.size(), rtp_streams_.size()); - if (send_side_bwe_with_overhead_ && has_packet_feedback_) + if (has_packet_feedback_) transport_->IncludeOverheadInPacedSender(); // SSRCs are assumed to be sorted in the same order as `rtp_modules`. for (uint32_t ssrc : rtp_config_.ssrcs) { @@ -480,33 +477,24 @@ RtpVideoSender::~RtpVideoSender() { RTC_DCHECK(!registered_for_feedback_); } -void RtpVideoSender::SetActive(bool active) { +void RtpVideoSender::Stop() { RTC_DCHECK_RUN_ON(&transport_checker_); MutexLock lock(&mutex_); - if (active_ == active) + if (!active_) return; - const std::vector active_modules(rtp_streams_.size(), active); + const std::vector active_modules(rtp_streams_.size(), false); SetActiveModulesLocked(active_modules); - - auto* feedback_provider = transport_->GetStreamFeedbackProvider(); - if (active && !registered_for_feedback_) { - feedback_provider->RegisterStreamFeedbackObserver(rtp_config_.ssrcs, this); - registered_for_feedback_ = true; - } else if (!active && registered_for_feedback_) { - feedback_provider->DeRegisterStreamFeedbackObserver(this); - registered_for_feedback_ = false; - } } -void RtpVideoSender::SetActiveModules(const std::vector active_modules) { +void RtpVideoSender::SetActiveModules(const std::vector& active_modules) { RTC_DCHECK_RUN_ON(&transport_checker_); MutexLock lock(&mutex_); return SetActiveModulesLocked(active_modules); } void RtpVideoSender::SetActiveModulesLocked( - const std::vector active_modules) { + const std::vector& active_modules) { RTC_DCHECK_RUN_ON(&transport_checker_); RTC_DCHECK_EQ(rtp_streams_.size(), active_modules.size()); active_ = false; @@ -527,6 +515,17 @@ void RtpVideoSender::SetActiveModulesLocked( // prevent any stray packets in the pacer from asynchronously arriving // to a disabled module. transport_->packet_router()->RemoveSendRtpModule(&rtp_module); + + // Clear the pacer queue of any packets pertaining to this module. + transport_->packet_sender()->RemovePacketsForSsrc(rtp_module.SSRC()); + if (rtp_module.RtxSsrc().has_value()) { + transport_->packet_sender()->RemovePacketsForSsrc( + *rtp_module.RtxSsrc()); + } + if (rtp_module.FlexfecSsrc().has_value()) { + transport_->packet_sender()->RemovePacketsForSsrc( + *rtp_module.FlexfecSsrc()); + } } // If set to false this module won't send media. @@ -538,6 +537,17 @@ void RtpVideoSender::SetActiveModulesLocked( /*remb_candidate=*/true); } } + if (!active_) { + auto* feedback_provider = transport_->GetStreamFeedbackProvider(); + if (registered_for_feedback_) { + feedback_provider->DeRegisterStreamFeedbackObserver(this); + registered_for_feedback_ = false; + } + } else if (!registered_for_feedback_) { + auto* feedback_provider = transport_->GetStreamFeedbackProvider(); + feedback_provider->RegisterStreamFeedbackObserver(rtp_config_.ssrcs, this); + registered_for_feedback_ = true; + } } bool RtpVideoSender::IsActive() { @@ -835,7 +845,7 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, DataSize max_total_packet_size = DataSize::Bytes( rtp_config_.max_packet_size + transport_overhead_bytes_per_packet_); uint32_t payload_bitrate_bps = update.target_bitrate.bps(); - if (send_side_bwe_with_overhead_ && has_packet_feedback_) { + if (has_packet_feedback_) { DataRate overhead_rate = CalculateOverheadRate(update.target_bitrate, max_total_packet_size, packet_overhead, Frequency::Hertz(framerate)); @@ -869,7 +879,7 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, loss_mask_vector_.clear(); uint32_t encoder_overhead_rate_bps = 0; - if (send_side_bwe_with_overhead_ && has_packet_feedback_) { + if (has_packet_feedback_) { // TODO(srte): The packet size should probably be the same as in the // CalculateOverheadRate call above (just max_total_packet_size), it doesn't // make sense to use different packet rates for different overhead @@ -882,12 +892,11 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, encoder_overhead_rate.bps(), update.target_bitrate.bps() - encoder_target_rate_bps_); } - // When the field trial "WebRTC-SendSideBwe-WithOverhead" is enabled - // protection_bitrate includes overhead. const uint32_t media_rate = encoder_target_rate_bps_ + encoder_overhead_rate_bps + packetization_rate_bps; RTC_DCHECK_GE(update.target_bitrate, DataRate::BitsPerSec(media_rate)); + // `protection_bitrate_bps_` includes overhead. protection_bitrate_bps_ = update.target_bitrate.bps() - media_rate; } diff --git a/third_party/libwebrtc/call/rtp_video_sender.h b/third_party/libwebrtc/call/rtp_video_sender.h index 9804bd8630fe..9666b8991669 100644 --- a/third_party/libwebrtc/call/rtp_video_sender.h +++ b/third_party/libwebrtc/call/rtp_video_sender.h @@ -95,13 +95,11 @@ class RtpVideoSender : public RtpVideoSenderInterface, RtpVideoSender(const RtpVideoSender&) = delete; RtpVideoSender& operator=(const RtpVideoSender&) = delete; - // RtpVideoSender will only route packets if being active, all packets will be - // dropped otherwise. - void SetActive(bool active) RTC_LOCKS_EXCLUDED(mutex_) override; // Sets the sending status of the rtp modules and appropriately sets the // payload router to active if any rtp modules are active. - void SetActiveModules(std::vector active_modules) + void SetActiveModules(const std::vector& active_modules) RTC_LOCKS_EXCLUDED(mutex_) override; + void Stop() RTC_LOCKS_EXCLUDED(mutex_) override; bool IsActive() RTC_LOCKS_EXCLUDED(mutex_) override; void OnNetworkAvailability(bool network_available) @@ -157,7 +155,7 @@ class RtpVideoSender : public RtpVideoSenderInterface, private: bool IsActiveLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void SetActiveModulesLocked(std::vector active_modules) + void SetActiveModulesLocked(const std::vector& active_modules) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void UpdateModuleSendingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void ConfigureProtection(); @@ -170,7 +168,6 @@ class RtpVideoSender : public RtpVideoSenderInterface, Frequency framerate) const; const FieldTrialsView& field_trials_; - const bool send_side_bwe_with_overhead_; const bool use_frame_rate_for_overhead_; const bool has_packet_feedback_; diff --git a/third_party/libwebrtc/call/rtp_video_sender_interface.h b/third_party/libwebrtc/call/rtp_video_sender_interface.h index acb68e3ae23c..3f2877155a4c 100644 --- a/third_party/libwebrtc/call/rtp_video_sender_interface.h +++ b/third_party/libwebrtc/call/rtp_video_sender_interface.h @@ -31,12 +31,12 @@ struct FecProtectionParams; class RtpVideoSenderInterface : public EncodedImageCallback, public FecControllerOverride { public: - // RtpVideoSender will only route packets if being active, all - // packets will be dropped otherwise. - virtual void SetActive(bool active) = 0; // Sets the sending status of the rtp modules and appropriately sets the // RtpVideoSender to active if any rtp modules are active. - virtual void SetActiveModules(std::vector active_modules) = 0; + // A module will only send packet if beeing active. + virtual void SetActiveModules(const std::vector& active_modules) = 0; + // Set the sending status of all rtp modules to inactive. + virtual void Stop() = 0; virtual bool IsActive() = 0; virtual void OnNetworkAvailability(bool network_available) = 0; diff --git a/third_party/libwebrtc/call/rtp_video_sender_unittest.cc b/third_party/libwebrtc/call/rtp_video_sender_unittest.cc index 196e29b801a5..da2bed649bff 100644 --- a/third_party/libwebrtc/call/rtp_video_sender_unittest.cc +++ b/third_party/libwebrtc/call/rtp_video_sender_unittest.cc @@ -129,13 +129,14 @@ class RtpVideoSenderTestFixture { payload_type)), send_delay_stats_(time_controller_.GetClock()), bitrate_config_(GetBitrateConfig()), - transport_controller_(time_controller_.GetClock(), - &event_log_, - nullptr, - nullptr, - bitrate_config_, - time_controller_.GetTaskQueueFactory(), - field_trials ? *field_trials : field_trials_), + transport_controller_( + time_controller_.GetClock(), + RtpTransportConfig{ + .bitrate_config = bitrate_config_, + .event_log = &event_log_, + .task_queue_factory = time_controller_.GetTaskQueueFactory(), + .trials = field_trials ? field_trials : &field_trials_, + }), stats_proxy_(time_controller_.GetClock(), config_, VideoEncoderConfig::ContentType::kRealtimeVideo, @@ -186,14 +187,14 @@ class RtpVideoSenderTestFixture { /*frame_transformer=*/nullptr, field_trials) {} - ~RtpVideoSenderTestFixture() { SetActive(false); } + ~RtpVideoSenderTestFixture() { Stop(); } RtpVideoSender* router() { return router_.get(); } MockTransport& transport() { return transport_; } void AdvanceTime(TimeDelta delta) { time_controller_.AdvanceTime(delta); } - void SetActive(bool active) { - RunOnTransportQueue([&]() { router_->SetActive(active); }); + void Stop() { + RunOnTransportQueue([&]() { router_->Stop(); }); } void SetActiveModules(const std::vector& active_modules) { @@ -248,15 +249,15 @@ TEST(RtpVideoSenderTest, SendOnOneModule) { EXPECT_NE(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); - test.SetActive(true); + test.SetActiveModules({true}); EXPECT_EQ(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); - test.SetActive(false); + test.SetActiveModules({false}); EXPECT_NE(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); - test.SetActive(true); + test.SetActiveModules({true}); EXPECT_EQ(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); } @@ -275,7 +276,7 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActive) { CodecSpecificInfo codec_info; codec_info.codecType = kVideoCodecVP8; - test.SetActive(true); + test.SetActiveModules({true, true}); EXPECT_EQ(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); @@ -285,7 +286,7 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActive) { test.router()->OnEncodedImage(encoded_image_2, &codec_info).error); // Inactive. - test.SetActive(false); + test.Stop(); EXPECT_NE(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); EXPECT_NE(EncodedImageCallback::Result::OK, @@ -369,7 +370,7 @@ TEST( TEST(RtpVideoSenderTest, CreateWithNoPreviousStates) { RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true, true}); std::map initial_states = test.router()->GetRtpPayloadStates(); @@ -394,7 +395,7 @@ TEST(RtpVideoSenderTest, CreateWithPreviousStates) { RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, states); - test.SetActive(true); + test.SetActiveModules({true, true}); std::map initial_states = test.router()->GetRtpPayloadStates(); @@ -434,7 +435,7 @@ TEST(RtpVideoSenderTest, FrameCountCallbacks) { test.router()->OnEncodedImage(encoded_image, nullptr).error); ::testing::Mock::VerifyAndClearExpectations(&callback); - test.SetActive(true); + test.SetActiveModules({true}); FrameCounts frame_counts; EXPECT_CALL(callback, FrameCountUpdated(_, kSsrc1)) @@ -463,7 +464,7 @@ TEST(RtpVideoSenderTest, FrameCountCallbacks) { TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) { RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true, true}); constexpr uint8_t kPayload = 'a'; EncodedImage encoded_image; @@ -628,7 +629,7 @@ TEST(RtpVideoSenderTest, RetransmitsOnTransportWideLossInfo) { TEST(RtpVideoSenderTest, EarlyRetransmits) { RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true, true}); const uint8_t kPayload[1] = {'a'}; EncodedImage encoded_image; @@ -723,7 +724,7 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) { TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) { RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true}); RtpHeaderExtensionMap extensions; extensions.Register( @@ -796,7 +797,7 @@ TEST(RtpVideoSenderTest, sent_packets.emplace_back(&extensions).Parse(packet, length)); return true; }); - test.SetActive(true); + test.SetActiveModules({true}); EncodedImage key_frame_image; key_frame_image._frameType = VideoFrameType::kVideoFrameKey; @@ -830,7 +831,7 @@ TEST(RtpVideoSenderTest, TEST(RtpVideoSenderTest, SupportsDependencyDescriptorForVp9) { RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true}); RtpHeaderExtensionMap extensions; extensions.Register( @@ -886,7 +887,7 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptorForVp9) { TEST(RtpVideoSenderTest, SupportsDependencyDescriptorForVp9NotProvidedByEncoder) { RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true}); RtpHeaderExtensionMap extensions; extensions.Register( @@ -941,7 +942,7 @@ TEST(RtpVideoSenderTest, GenerateDependecyDescriptorForGenericCodecs) { test::ScopedKeyValueConfig field_trials( "WebRTC-GenericCodecDependencyDescriptor/Enabled/"); RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}, &field_trials); - test.SetActive(true); + test.SetActiveModules({true}); RtpHeaderExtensionMap extensions; extensions.Register( @@ -987,7 +988,7 @@ TEST(RtpVideoSenderTest, GenerateDependecyDescriptorForGenericCodecs) { TEST(RtpVideoSenderTest, SupportsStoppingUsingDependencyDescriptor) { RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true}); RtpHeaderExtensionMap extensions; extensions.Register( @@ -1072,7 +1073,7 @@ TEST(RtpVideoSenderTest, OverheadIsSubtractedFromTargetBitrate) { kRtpHeaderSizeBytes + kTransportPacketOverheadBytes; RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}, &field_trials); test.router()->OnTransportOverheadChanged(kTransportPacketOverheadBytes); - test.SetActive(true); + test.SetActiveModules({true}); { test.router()->OnBitrateUpdated(CreateBitrateAllocationUpdate(300000), @@ -1097,4 +1098,81 @@ TEST(RtpVideoSenderTest, OverheadIsSubtractedFromTargetBitrate) { } } +TEST(RtpVideoSenderTest, ClearsPendingPacketsOnInactivation) { + RtpVideoSenderTestFixture test({kSsrc1}, {kRtxSsrc1}, kPayloadType, {}); + test.SetActiveModules({true}); + + RtpHeaderExtensionMap extensions; + extensions.Register( + kDependencyDescriptorExtensionId); + std::vector sent_packets; + ON_CALL(test.transport(), SendRtp) + .WillByDefault([&](const uint8_t* packet, size_t length, + const PacketOptions& options) { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet, length)); + return true; + }); + + // Set a very low bitrate. + test.router()->OnBitrateUpdated( + CreateBitrateAllocationUpdate(/*rate_bps=*/30'000), + /*framerate=*/30); + + // Create and send a large keyframe. + const size_t kImageSizeBytes = 10000; + constexpr uint8_t kPayload[kImageSizeBytes] = {'a'}; + EncodedImage encoded_image; + encoded_image.SetTimestamp(1); + encoded_image.capture_time_ms_ = 2; + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + encoded_image.SetEncodedData( + EncodedImageBuffer::Create(kPayload, sizeof(kPayload))); + EXPECT_EQ(test.router() + ->OnEncodedImage(encoded_image, /*codec_specific=*/nullptr) + .error, + EncodedImageCallback::Result::OK); + + // Advance time a small amount, check that sent data is only part of the + // image. + test.AdvanceTime(TimeDelta::Millis(5)); + DataSize transmittedPayload = DataSize::Zero(); + for (const RtpPacket& packet : sent_packets) { + transmittedPayload += DataSize::Bytes(packet.payload_size()); + // Make sure we don't see the end of the frame. + EXPECT_FALSE(packet.Marker()); + } + EXPECT_GT(transmittedPayload, DataSize::Zero()); + EXPECT_LT(transmittedPayload, DataSize::Bytes(kImageSizeBytes / 4)); + + // Record the RTP timestamp of the first frame. + const uint32_t first_frame_timestamp = sent_packets[0].Timestamp(); + sent_packets.clear(); + + // Disable the sending module and advance time slightly. No packets should be + // sent. + test.SetActiveModules({false}); + test.AdvanceTime(TimeDelta::Millis(20)); + EXPECT_TRUE(sent_packets.empty()); + + // Reactive the send module - any packets should have been removed, so nothing + // should be transmitted. + test.SetActiveModules({true}); + test.AdvanceTime(TimeDelta::Millis(33)); + EXPECT_TRUE(sent_packets.empty()); + + // Send a new frame. + encoded_image.SetTimestamp(3); + encoded_image.capture_time_ms_ = 4; + EXPECT_EQ(test.router() + ->OnEncodedImage(encoded_image, /*codec_specific=*/nullptr) + .error, + EncodedImageCallback::Result::OK); + test.AdvanceTime(TimeDelta::Millis(33)); + + // Advance time, check we get new packets - but only for the second frame. + EXPECT_FALSE(sent_packets.empty()); + EXPECT_NE(sent_packets[0].Timestamp(), first_frame_timestamp); +} + } // namespace webrtc diff --git a/third_party/libwebrtc/call/test/mock_audio_send_stream.h b/third_party/libwebrtc/call/test/mock_audio_send_stream.h index 4164dd550e11..1993de8de04f 100644 --- a/third_party/libwebrtc/call/test/mock_audio_send_stream.h +++ b/third_party/libwebrtc/call/test/mock_audio_send_stream.h @@ -25,7 +25,10 @@ class MockAudioSendStream : public AudioSendStream { GetConfig, (), (const, override)); - MOCK_METHOD(void, Reconfigure, (const Config& config), (override)); + MOCK_METHOD(void, + Reconfigure, + (const Config& config, SetParametersCallback callback), + (override)); MOCK_METHOD(void, Start, (), (override)); MOCK_METHOD(void, Stop, (), (override)); // GMock doesn't like move-only types, such as std::unique_ptr. diff --git a/third_party/libwebrtc/call/version.cc b/third_party/libwebrtc/call/version.cc index 804dd35f0843..4b2b9cc22e7c 100644 --- a/third_party/libwebrtc/call/version.cc +++ b/third_party/libwebrtc/call/version.cc @@ -13,7 +13,7 @@ namespace webrtc { // The timestamp is always in UTC. -const char* const kSourceTimestamp = "WebRTC source stamp 2022-11-07T04:08:20"; +const char* const kSourceTimestamp = "WebRTC source stamp 2022-12-14T04:03:07"; void LoadWebRTCVersionInRegister() { // Using volatile to instruct the compiler to not optimize `p` away even diff --git a/third_party/libwebrtc/call/video_send_stream.h b/third_party/libwebrtc/call/video_send_stream.h index 5fd0bebe205a..de18fc7b92e9 100644 --- a/third_party/libwebrtc/call/video_send_stream.h +++ b/third_party/libwebrtc/call/video_send_stream.h @@ -23,12 +23,14 @@ #include "api/crypto/crypto_options.h" #include "api/frame_transformer_interface.h" #include "api/rtp_parameters.h" +#include "api/rtp_sender_setparameters_callback.h" #include "api/scoped_refptr.h" #include "api/video/video_content_type.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video/video_stream_encoder_settings.h" +#include "api/video_codecs/scalability_mode.h" #include "call/rtp_config.h" #include "common_video/frame_counts.h" #include "common_video/include/quality_limitation_reason.h" @@ -92,6 +94,7 @@ class VideoSendStream { uint64_t total_encode_time_ms = 0; uint64_t total_encoded_bytes_target = 0; uint32_t huge_frames_sent = 0; + absl::optional scalability_mode; }; struct Stats { @@ -215,11 +218,15 @@ class VideoSendStream { // Note: This starts stream activity if it is inactive and one of the layers // is active. This stops stream activity if it is active and all layers are // inactive. - virtual void UpdateActiveSimulcastLayers(std::vector active_layers) = 0; + // `active_layers` should have the same size as the number of configured + // simulcast layers or one if only one rtp stream is used. + virtual void StartPerRtpStream(std::vector active_layers) = 0; // Starts stream activity. // When a stream is active, it can receive, process and deliver packets. + // Prefer to use StartPerRtpStream. virtual void Start() = 0; + // Stops stream activity. // When a stream is stopped, it can't receive, process or deliver packets. virtual void Stop() = 0; @@ -227,9 +234,9 @@ class VideoSendStream { // Accessor for determining if the stream is active. This is an inexpensive // call that must be made on the same thread as `Start()` and `Stop()` methods // are called on and will return `true` iff activity has been started either - // via `Start()` or `UpdateActiveSimulcastLayers()`. If activity is either + // via `Start()` or `StartPerRtpStream()`. If activity is either // stopped or is in the process of being stopped as a result of a call to - // either `Stop()` or `UpdateActiveSimulcastLayers()` where all layers were + // either `Stop()` or `StartPerRtpStream()` where all layers were // deactivated, the return value will be `false`. virtual bool started() = 0; @@ -251,6 +258,9 @@ class VideoSendStream { // with the VideoStream settings. virtual void ReconfigureVideoEncoder(VideoEncoderConfig config) = 0; + virtual void ReconfigureVideoEncoder(VideoEncoderConfig config, + SetParametersCallback callback) = 0; + virtual Stats GetStats() = 0; virtual void GenerateKeyFrame(const std::vector& rids) = 0; diff --git a/third_party/libwebrtc/docs/native-code/development/index.md b/third_party/libwebrtc/docs/native-code/development/index.md index f3cfd556bc5b..f8c65b276bb5 100644 --- a/third_party/libwebrtc/docs/native-code/development/index.md +++ b/third_party/libwebrtc/docs/native-code/development/index.md @@ -116,15 +116,17 @@ When you have Ninja project files generated (see previous section), compile For [Ninja][ninja] project files generated in `out/Default`: ``` -$ ninja -C out/Default +$ autoninja -C out/Default ``` To build everything in the generated folder (`out/Default`): ``` -$ ninja all -C out/Default +$ autoninja all -C out/Default ``` +`autoninja` is a wrapper that automatically provides optimal values for the arguments passed to `ninja`. + See [Ninja build rules][ninja-build-rules] to read more about difference between `ninja` and `ninja all`. diff --git a/third_party/libwebrtc/examples/BUILD.gn b/third_party/libwebrtc/examples/BUILD.gn index e683c192dc8e..7d87a01c77d0 100644 --- a/third_party/libwebrtc/examples/BUILD.gn +++ b/third_party/libwebrtc/examples/BUILD.gn @@ -690,6 +690,7 @@ if (is_linux || is_chromeos || is_win) { "../api:create_peerconnection_factory", "../api:libjingle_peerconnection_api", "../api:media_stream_interface", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api/audio:audio_mixer_api", "../api/audio_codecs:audio_codecs_api", diff --git a/third_party/libwebrtc/infra/config/commit-queue.cfg b/third_party/libwebrtc/infra/config/commit-queue.cfg index 1c703c7022f4..e6911694250a 100644 --- a/third_party/libwebrtc/infra/config/commit-queue.cfg +++ b/third_party/libwebrtc/infra/config/commit-queue.cfg @@ -46,12 +46,20 @@ config_groups { builders { name: "webrtc/try/android_arm_rel" } + builders { + name: "webrtc/try/android_arm_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/android_chromium_compile" } builders { name: "webrtc/try/android_compile_arm64_rel" } + builders { + name: "webrtc/try/android_compile_arm_dbg" + experiment_percentage: 100 + } builders { name: "webrtc/try/android_compile_arm_rel" } @@ -73,6 +81,10 @@ config_groups { builders { name: "webrtc/try/ios_compile_arm64_rel" } + builders { + name: "webrtc/try/ios_compile_arm64_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/ios_sim_x64_dbg_ios12" } @@ -121,6 +133,10 @@ config_groups { builders { name: "webrtc/try/linux_rel" } + builders { + name: "webrtc/try/linux_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/linux_tsan2" } @@ -154,6 +170,10 @@ config_groups { builders { name: "webrtc/try/mac_rel_m1" } + builders { + name: "webrtc/try/mac_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/presubmit" disable_reuse: true @@ -173,6 +193,10 @@ config_groups { builders { name: "webrtc/try/win_compile_x64_clang_rel" } + builders { + name: "webrtc/try/win_compile_x64_clang_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/win_compile_x86_clang_dbg" } @@ -219,9 +243,17 @@ config_groups { builders { name: "webrtc/try/android_arm_rel" } + builders { + name: "webrtc/try/android_arm_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/android_compile_arm64_rel" } + builders { + name: "webrtc/try/android_compile_arm_dbg" + experiment_percentage: 100 + } builders { name: "webrtc/try/android_compile_arm_rel" } @@ -243,6 +275,10 @@ config_groups { builders { name: "webrtc/try/ios_compile_arm64_rel" } + builders { + name: "webrtc/try/ios_compile_arm64_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/ios_sim_x64_dbg_ios12" } @@ -285,6 +321,10 @@ config_groups { builders { name: "webrtc/try/linux_rel" } + builders { + name: "webrtc/try/linux_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/linux_tsan2" } @@ -315,6 +355,10 @@ config_groups { builders { name: "webrtc/try/mac_rel_m1" } + builders { + name: "webrtc/try/mac_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/presubmit" disable_reuse: true @@ -328,6 +372,10 @@ config_groups { builders { name: "webrtc/try/win_compile_x64_clang_rel" } + builders { + name: "webrtc/try/win_compile_x64_clang_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/win_compile_x86_clang_dbg" } diff --git a/third_party/libwebrtc/infra/config/config.star b/third_party/libwebrtc/infra/config/config.star index c66d23b779bb..589f9e144010 100755 --- a/third_party/libwebrtc/infra/config/config.star +++ b/third_party/libwebrtc/infra/config/config.star @@ -597,6 +597,7 @@ def try_builder( properties = properties or {} properties["builder_group"] = "tryserver.webrtc" properties.update(make_goma_properties(enable_ats = goma_enable_ats, jobs = goma_jobs)) + properties.update(make_reclient_properties("rbe-webrtc-untrusted")) if cq != None: luci.cq_tryjob_verifier(name, cq_group = "cq", **cq) if branch_cq: @@ -628,9 +629,13 @@ def perf_builder(name, perf_cat, **kwargs): properties = make_goma_properties() properties.update(make_reclient_properties("rbe-webrtc-trusted")) properties["builder_group"] = "client.webrtc.perf" + dimensions = {"pool": "luci.webrtc.perf", "os": "Linux", "cores": "2"} + if "Android" in name: + # Android perf testers require more performant bots to finish under 3 hours. + dimensions["cores"] = "8" return webrtc_builder( name = name, - dimensions = {"pool": "luci.webrtc.perf", "os": "Linux"}, + dimensions = dimensions, properties = properties, bucket = "perf", service_account = "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com", @@ -703,11 +708,12 @@ ios_builder, ios_try_job = normal_builder_factory( # Actual builder configuration: -android_builder("Android32 (M Nexus5X)(dbg)", "Android|arm|dbg") -android_try_job("android_compile_arm_dbg", cq = None) +android_builder("Android32 (dbg)", "Android|arm|dbg") +android_try_job("android_compile_arm_dbg", cq = {"experiment_percentage": 100}) android_try_job("android_arm_dbg") -android_builder("Android32 (M Nexus5X)", "Android|arm|rel") +android_builder("Android32", "Android|arm|rel") android_try_job("android_arm_rel") +android_try_job("android_arm_rel_reclient", cq = {"experiment_percentage": 100}) android_builder("Android32 Builder arm", "Android|arm|size", perf_cat = "Android|arm|Builder|", prioritized = True) android_try_job("android_compile_arm_rel") perf_builder("Perf Android32 (M Nexus5)", "Android|arm|Tester|M Nexus5", triggered_by = ["Android32 Builder arm"]) @@ -716,7 +722,7 @@ perf_builder("Perf Android32 (O Pixel2)", "Android|arm|Tester|O Pixel2", trigger perf_builder("Perf Android32 (R Pixel5)", "Android|arm|Tester|R Pixel5", triggered_by = ["Android32 Builder arm"]) android_try_job("android_compile_arm64_dbg", cq = None) android_try_job("android_arm64_dbg", cq = None) -android_builder("Android64 (M Nexus5X)", "Android|arm64|rel") +android_builder("Android64", "Android|arm64|rel") android_try_job("android_arm64_rel") android_builder("Android64 Builder arm64", "Android|arm64|size", perf_cat = "Android|arm64|Builder|", prioritized = True) perf_builder("Perf Android64 (M Nexus5X)", "Android|arm64|Tester|M Nexus5X", triggered_by = ["Android64 Builder arm64"]) @@ -738,6 +744,7 @@ ios_builder("iOS64 Debug", "iOS|arm64|dbg") ios_try_job("ios_compile_arm64_dbg") ios_builder("iOS64 Release", "iOS|arm64|rel") ios_try_job("ios_compile_arm64_rel") +ios_try_job("ios_compile_arm64_rel_reclient", cq = {"experiment_percentage": 100}) ios_builder("iOS64 Sim Debug (iOS 14)", "iOS|x64|14") ios_try_job("ios_sim_x64_dbg_ios14") ios_builder("iOS64 Sim Debug (iOS 13)", "iOS|x64|13") @@ -756,6 +763,7 @@ linux_try_job("linux_dbg", cq = None) linux_try_job("linux_compile_dbg") linux_builder("Linux64 Release", "Linux|x64|rel") linux_try_job("linux_rel") +linux_try_job("linux_rel_reclient", cq = {"experiment_percentage": 100}) linux_builder("Linux64 Builder", "Linux|x64|size", perf_cat = "Linux|x64|Builder|", prioritized = True) linux_try_job("linux_compile_rel") perf_builder("Perf Linux Bionic", "Linux|x64|Tester|Bionic", triggered_by = ["Linux64 Builder"]) @@ -783,8 +791,12 @@ linux_builder("Linux (more configs)", "Linux|x64|more") linux_try_job("linux_more_configs") linux_try_job("linux_chromium_compile", recipe = "chromium_trybot", branch_cq = False) linux_try_job("linux_chromium_compile_dbg", recipe = "chromium_trybot", branch_cq = False) +linux_try_job("linux_coverage", cq = None) + +linux_builder("Fuchsia Builder", ci_cat = None, perf_cat = "Fuchsia|x64|Builder|", prioritized = True) linux_builder("Fuchsia Release", "Fuchsia|x64|rel") linux_try_job("fuchsia_rel", cq = None) +perf_builder("Perf Fuchsia", "Fuchsia|x64|Tester|", triggered_by = ["Fuchsia Builder"]) mac_builder("Mac64 Debug", "Mac|x64|dbg") mac_try_job("mac_dbg", cq = None) @@ -792,9 +804,10 @@ mac_try_job("mac_compile_dbg") mac_builder("Mac64 Release", "Mac|x64|rel") mac_try_job("mac_rel") +mac_try_job("mac_rel_reclient", cq = {"experiment_percentage": 100}) mac_try_job("mac_compile_rel", cq = None) mac_builder("Mac64 Builder", ci_cat = None, perf_cat = "Mac|x64|Builder|") -mac_builder("MacArm64 Builder", ci_cat = None, perf_cat = "Mac|arm64|Builder") +mac_builder("MacArm64 Builder", ci_cat = None, perf_cat = "Mac|arm64|Builder|") perf_builder("Perf Mac 11", "Mac|x64|Tester|11", triggered_by = ["Mac64 Builder"]) perf_builder("Perf Mac M1 Arm64 12", "Mac|arm64|Tester|12", triggered_by = ["MacArm64 Builder"]) @@ -811,15 +824,15 @@ win_try_job("win_compile_x86_clang_dbg") win_builder("Win32 Release (Clang)", "Win Clang|x86|rel") win_try_job("win_x86_clang_rel") win_try_job("win_compile_x86_clang_rel", cq = None) -win_builder("Win32 Builder (Clang)", ci_cat = None, perf_cat = "Win|x86|Builder|") -perf_builder("Perf Win7", "Win|x86|Tester|7", triggered_by = ["Win32 Builder (Clang)"]) +win_builder("Win64 Builder (Clang)", ci_cat = None, perf_cat = "Win|x64|Builder|") +perf_builder("Perf Win 10", "Win|x64|Tester|10", triggered_by = ["Win64 Builder (Clang)"]) win_builder("Win64 Debug (Clang)", "Win Clang|x64|dbg") win_try_job("win_x64_clang_dbg", cq = None) -win_try_job("win_x64_clang_dbg_win10", cq = None) win_try_job("win_compile_x64_clang_dbg") win_builder("Win64 Release (Clang)", "Win Clang|x64|rel") win_try_job("win_x64_clang_rel", cq = None) win_try_job("win_compile_x64_clang_rel") +win_try_job("win_compile_x64_clang_rel_reclient", cq = {"experiment_percentage": 100}) win_builder("Win64 ASan", "Win Clang|x64|asan") win_try_job("win_asan") win_builder("Win (more configs)", "Win Clang|x86|more") @@ -870,6 +883,8 @@ lkgr_config = { "WebRTC Chromium FYI Android Builder (dbg)", "WebRTC Chromium FYI Android Builder ARM64 (dbg)", "WebRTC Chromium FYI Android Builder", + "WebRTC Chromium FYI Android Tests (dbg)", + "WebRTC Chromium FYI Android Tests ARM64 (dbg)", "WebRTC Chromium FYI Linux Builder (dbg)", "WebRTC Chromium FYI Linux Builder", "WebRTC Chromium FYI Linux Tester", diff --git a/third_party/libwebrtc/infra/config/cr-buildbucket.cfg b/third_party/libwebrtc/infra/config/cr-buildbucket.cfg index 761b2a49d29e..6d8852c5e7b0 100644 --- a/third_party/libwebrtc/infra/config/cr-buildbucket.cfg +++ b/third_party/libwebrtc/infra/config/cr-buildbucket.cfg @@ -15,7 +15,7 @@ buckets { } swarming { builders { - name: "Android32 (M Nexus5X)" + name: "Android32" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" dimensions: "cpu:x86-64" @@ -65,7 +65,7 @@ buckets { } } builders { - name: "Android32 (M Nexus5X)(dbg)" + name: "Android32 (dbg)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" dimensions: "cpu:x86-64" @@ -265,7 +265,7 @@ buckets { } } builders { - name: "Android64 (M Nexus5X)" + name: "Android64" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" dimensions: "cpu:x86-64" @@ -2089,6 +2089,8 @@ buckets { ' "WebRTC Chromium FYI Android Builder (dbg)",' ' "WebRTC Chromium FYI Android Builder ARM64 (dbg)",' ' "WebRTC Chromium FYI Android Builder",' + ' "WebRTC Chromium FYI Android Tests (dbg)",' + ' "WebRTC Chromium FYI Android Tests ARM64 (dbg)",' ' "WebRTC Chromium FYI Linux Builder (dbg)",' ' "WebRTC Chromium FYI Linux Builder",' ' "WebRTC Chromium FYI Linux Tester",' @@ -2104,12 +2106,12 @@ buckets { ' },' ' "webrtc/ci": {' ' "builders": [' - ' "Android32 (M Nexus5X)",' - ' "Android32 (M Nexus5X)(dbg)",' + ' "Android32",' + ' "Android32 (dbg)",' ' "Android32 (more configs)",' ' "Android32 Builder x86",' ' "Android32 Builder x86 (dbg)",' - ' "Android64 (M Nexus5X)",' + ' "Android64",' ' "Android64 Builder x64 (dbg)",' ' "Linux (more configs)",' ' "Linux Asan",' @@ -2332,6 +2334,57 @@ buckets { } } } + builders { + name: "Fuchsia Builder" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "inside_docker:0" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.ci" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-trusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "client.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 29 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "perf_test_results" + test_results {} + } + } + } builders { name: "Linux64 Builder" swarming_host: "chromium-swarm.appspot.com" @@ -2487,6 +2540,7 @@ buckets { name: "Perf Android32 (M AOSP Nexus6)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2536,6 +2590,7 @@ buckets { name: "Perf Android32 (M Nexus5)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2585,6 +2640,7 @@ buckets { name: "Perf Android32 (O Pixel2)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2634,6 +2690,7 @@ buckets { name: "Perf Android32 (R Pixel5)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2683,6 +2740,7 @@ buckets { name: "Perf Android64 (M Nexus5X)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2732,6 +2790,7 @@ buckets { name: "Perf Android64 (O Pixel2)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2781,6 +2840,57 @@ buckets { name: "Perf Android64 (R Pixel5)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.perf" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-trusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "client.webrtc.perf",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 10800 + build_numbers: YES + service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "perf_test_results" + test_results {} + } + } + } + builders { + name: "Perf Fuchsia" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2830,6 +2940,7 @@ buckets { name: "Perf Linux Bionic" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2879,6 +2990,7 @@ buckets { name: "Perf Mac 11" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2928,6 +3040,7 @@ buckets { name: "Perf Mac M1 Arm64 12" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2974,9 +3087,10 @@ buckets { } } builders { - name: "Perf Win7" + name: "Perf Win 10" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -3023,7 +3137,7 @@ buckets { } } builders { - name: "Win32 Builder (Clang)" + name: "Win64 Builder (Clang)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" dimensions: "cpu:x86-64" @@ -3106,6 +3220,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3152,6 +3270,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3198,6 +3320,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3244,6 +3370,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3290,6 +3420,60 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } + builders { + name: "android_arm_rel_reclient" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3336,6 +3520,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3382,6 +3570,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3428,6 +3620,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3474,6 +3670,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3520,6 +3720,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3566,6 +3770,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3612,6 +3820,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3658,6 +3870,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3704,6 +3920,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3751,6 +3971,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3797,6 +4021,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3848,6 +4076,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3899,6 +4131,65 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone",' + ' "xcode_build_version": "13c100"' + '}' + priority: 30 + execution_timeout_secs: 7200 + caches { + name: "xcode_ios_13c100" + path: "xcode_ios_13c100.app" + } + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } + builders { + name: "ios_compile_arm64_rel_reclient" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "os:Mac" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3950,6 +4241,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4001,6 +4296,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4052,6 +4351,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4104,6 +4407,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4151,6 +4458,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4198,6 +4509,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4245,6 +4560,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4292,6 +4611,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4339,6 +4662,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4386,6 +4713,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4433,6 +4764,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4480,6 +4815,61 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } + builders { + name: "linux_coverage" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "inside_docker:0" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4527,6 +4917,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4574,6 +4968,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4621,6 +5019,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4668,6 +5070,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4715,6 +5121,61 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } + builders { + name: "linux_rel_reclient" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "inside_docker:0" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4762,6 +5223,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4809,6 +5274,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4856,6 +5325,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4903,6 +5376,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4950,6 +5427,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4996,6 +5477,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5042,6 +5527,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5088,6 +5577,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5134,6 +5627,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5180,6 +5677,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5226,6 +5727,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5272,6 +5777,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5318,6 +5827,60 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } + builders { + name: "mac_rel_reclient" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "os:Mac" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5365,6 +5928,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5414,6 +5981,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5462,6 +6033,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5510,6 +6085,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5557,6 +6136,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5604,6 +6187,61 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } + builders { + name: "win_compile_x64_clang_rel_reclient" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "os:Windows" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "enable_ats": false,' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5651,6 +6289,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5698,6 +6340,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5745,52 +6391,9 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' - ' "$recipe_engine/resultdb/test_presentation": {' - ' "column_keys": [],' - ' "grouping_keys": [' - ' "status",' - ' "v.test_suite"' - ' ]' - ' },' - ' "builder_group": "tryserver.webrtc",' - ' "recipe": "webrtc/standalone"' - '}' - priority: 30 - execution_timeout_secs: 7200 - build_numbers: YES - service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" - experiments { - key: "luci.recipes.use_python3" - value: 100 - } - resultdb { - enable: true - bq_exports { - project: "webrtc-ci" - dataset: "resultdb" - table: "try_test_results" - test_results {} - } - } - } - builders { - name: "win_x64_clang_dbg_win10" - swarming_host: "chromium-swarm.appspot.com" - swarming_tags: "vpython:native-python-wrapper" - dimensions: "cpu:x86-64" - dimensions: "os:Windows" - dimensions: "pool:luci.webrtc.try" - exe { - cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" - cipd_version: "refs/heads/main" - cmd: "luciexe" - } - properties: - '{' - ' "$build/goma": {' - ' "enable_ats": false,' - ' "server_host": "goma.chromium.org",' - ' "use_luci_auth": true' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' @@ -5839,6 +6442,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5886,6 +6493,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5933,6 +6544,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5980,6 +6595,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' diff --git a/third_party/libwebrtc/infra/config/luci-milo.cfg b/third_party/libwebrtc/infra/config/luci-milo.cfg index 6f2b8bdac135..d8cb1074a0cf 100644 --- a/third_party/libwebrtc/infra/config/luci-milo.cfg +++ b/third_party/libwebrtc/infra/config/luci-milo.cfg @@ -12,12 +12,12 @@ consoles { refs: "regexp:refs/heads/main" manifest_name: "REVISION" builders { - name: "buildbucket/luci.webrtc.ci/Android32 (M Nexus5X)(dbg)" + name: "buildbucket/luci.webrtc.ci/Android32 (dbg)" category: "Android|arm" short_name: "dbg" } builders { - name: "buildbucket/luci.webrtc.ci/Android32 (M Nexus5X)" + name: "buildbucket/luci.webrtc.ci/Android32" category: "Android|arm" short_name: "rel" } @@ -27,7 +27,7 @@ consoles { short_name: "size" } builders { - name: "buildbucket/luci.webrtc.ci/Android64 (M Nexus5X)" + name: "buildbucket/luci.webrtc.ci/Android64" category: "Android|arm64" short_name: "rel" } @@ -342,14 +342,21 @@ consoles { category: "Linux|x64|Tester" short_name: "Bionic" } + builders { + name: "buildbucket/luci.webrtc.perf/Fuchsia Builder" + category: "Fuchsia|x64|Builder" + } + builders { + name: "buildbucket/luci.webrtc.perf/Perf Fuchsia" + category: "Fuchsia|x64|Tester" + } builders { name: "buildbucket/luci.webrtc.perf/Mac64 Builder" category: "Mac|x64|Builder" } builders { name: "buildbucket/luci.webrtc.perf/MacArm64 Builder" - category: "Mac|arm64" - short_name: "Builder" + category: "Mac|arm64|Builder" } builders { name: "buildbucket/luci.webrtc.perf/Perf Mac 11" @@ -362,13 +369,13 @@ consoles { short_name: "12" } builders { - name: "buildbucket/luci.webrtc.perf/Win32 Builder (Clang)" - category: "Win|x86|Builder" + name: "buildbucket/luci.webrtc.perf/Win64 Builder (Clang)" + category: "Win|x64|Builder" } builders { - name: "buildbucket/luci.webrtc.perf/Perf Win7" - category: "Win|x86|Tester" - short_name: "7" + name: "buildbucket/luci.webrtc.perf/Perf Win 10" + category: "Win|x64|Tester" + short_name: "10" } header { links { @@ -458,6 +465,9 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/android_arm_rel" } + builders { + name: "buildbucket/luci.webrtc.try/android_arm_rel_reclient" + } builders { name: "buildbucket/luci.webrtc.try/android_compile_arm_rel" } @@ -497,6 +507,9 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/ios_compile_arm64_rel" } + builders { + name: "buildbucket/luci.webrtc.try/ios_compile_arm64_rel_reclient" + } builders { name: "buildbucket/luci.webrtc.try/ios_sim_x64_dbg_ios14" } @@ -524,6 +537,9 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/linux_rel" } + builders { + name: "buildbucket/luci.webrtc.try/linux_rel_reclient" + } builders { name: "buildbucket/luci.webrtc.try/linux_compile_rel" } @@ -566,6 +582,9 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/linux_chromium_compile_dbg" } + builders { + name: "buildbucket/luci.webrtc.try/linux_coverage" + } builders { name: "buildbucket/luci.webrtc.try/fuchsia_rel" } @@ -578,6 +597,9 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/mac_rel" } + builders { + name: "buildbucket/luci.webrtc.try/mac_rel_reclient" + } builders { name: "buildbucket/luci.webrtc.try/mac_compile_rel" } @@ -608,9 +630,6 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/win_x64_clang_dbg" } - builders { - name: "buildbucket/luci.webrtc.try/win_x64_clang_dbg_win10" - } builders { name: "buildbucket/luci.webrtc.try/win_compile_x64_clang_dbg" } @@ -620,6 +639,9 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/win_compile_x64_clang_rel" } + builders { + name: "buildbucket/luci.webrtc.try/win_compile_x64_clang_rel_reclient" + } builders { name: "buildbucket/luci.webrtc.try/win_asan" } diff --git a/third_party/libwebrtc/infra/config/luci-notify.cfg b/third_party/libwebrtc/infra/config/luci-notify.cfg index 3a33b239ccb8..c9038914b201 100644 --- a/third_party/libwebrtc/infra/config/luci-notify.cfg +++ b/third_party/libwebrtc/infra/config/luci-notify.cfg @@ -22,7 +22,7 @@ notifiers { } builders { bucket: "ci" - name: "Android32 (M Nexus5X)" + name: "Android32" repository: "https://webrtc.googlesource.com/src" } tree_closers { @@ -49,7 +49,7 @@ notifiers { } builders { bucket: "ci" - name: "Android32 (M Nexus5X)(dbg)" + name: "Android32 (dbg)" repository: "https://webrtc.googlesource.com/src" } tree_closers { @@ -157,7 +157,7 @@ notifiers { } builders { bucket: "ci" - name: "Android64 (M Nexus5X)" + name: "Android64" repository: "https://webrtc.googlesource.com/src" } tree_closers { @@ -1148,6 +1148,33 @@ notifiers { failed_step_regexp_exclude: ".*\\(experimental\\).*" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + notifications { + on_new_status: FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "build_failure" + notify_blamelist {} + } + builders { + bucket: "perf" + name: "Fuchsia Builder" + repository: "https://webrtc.googlesource.com/src" + } + tree_closers { + tree_status_host: "webrtc-status.appspot.com" + failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" + failed_step_regexp_exclude: ".*\\(experimental\\).*" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -1383,6 +1410,28 @@ notifiers { repository: "https://webrtc.googlesource.com/src" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + notifications { + on_new_status: FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "build_failure" + notify_blamelist {} + } + builders { + bucket: "perf" + name: "Perf Fuchsia" + repository: "https://webrtc.googlesource.com/src" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -1467,7 +1516,7 @@ notifiers { } builders { bucket: "perf" - name: "Perf Win7" + name: "Perf Win 10" repository: "https://webrtc.googlesource.com/src" } } @@ -1489,7 +1538,7 @@ notifiers { } builders { bucket: "perf" - name: "Win32 Builder (Clang)" + name: "Win64 Builder (Clang)" repository: "https://webrtc.googlesource.com/src" } tree_closers { @@ -1563,6 +1612,19 @@ notifiers { name: "android_arm_rel" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "android_arm_rel_reclient" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -1732,6 +1794,19 @@ notifiers { name: "ios_compile_arm64_rel" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "ios_compile_arm64_rel_reclient" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -1888,6 +1963,19 @@ notifiers { name: "linux_compile_rel" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "linux_coverage" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -1953,6 +2041,19 @@ notifiers { name: "linux_rel" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "linux_rel_reclient" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -2122,6 +2223,19 @@ notifiers { name: "mac_rel_m1" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "mac_rel_reclient" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -2200,6 +2314,19 @@ notifiers { name: "win_compile_x64_clang_rel" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "win_compile_x64_clang_rel_reclient" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -2239,19 +2366,6 @@ notifiers { name: "win_x64_clang_dbg" } } -notifiers { - notifications { - on_new_status: INFRA_FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "infra_failure" - } - builders { - bucket: "try" - name: "win_x64_clang_dbg_win10" - } -} notifiers { notifications { on_new_status: INFRA_FAILURE diff --git a/third_party/libwebrtc/infra/config/luci-scheduler.cfg b/third_party/libwebrtc/infra/config/luci-scheduler.cfg index 274b7c001c5d..b1158747b337 100644 --- a/third_party/libwebrtc/infra/config/luci-scheduler.cfg +++ b/third_party/libwebrtc/infra/config/luci-scheduler.cfg @@ -5,23 +5,23 @@ # https://luci-config.appspot.com/schemas/projects:luci-scheduler.cfg job { - id: "Android32 (M Nexus5X)" + id: "Android32" realm: "ci" acl_sets: "ci" buildbucket { server: "cr-buildbucket.appspot.com" bucket: "ci" - builder: "Android32 (M Nexus5X)" + builder: "Android32" } } job { - id: "Android32 (M Nexus5X)(dbg)" + id: "Android32 (dbg)" realm: "ci" acl_sets: "ci" buildbucket { server: "cr-buildbucket.appspot.com" bucket: "ci" - builder: "Android32 (M Nexus5X)(dbg)" + builder: "Android32 (dbg)" } } job { @@ -70,13 +70,13 @@ job { } } job { - id: "Android64 (M Nexus5X)" + id: "Android64" realm: "ci" acl_sets: "ci" buildbucket { server: "cr-buildbucket.appspot.com" bucket: "ci" - builder: "Android64 (M Nexus5X)" + builder: "Android64" } } job { @@ -115,6 +115,21 @@ job { builder: "Auto-roll - WebRTC DEPS" } } +job { + id: "Fuchsia Builder" + realm: "perf" + acl_sets: "perf" + triggering_policy { + kind: GREEDY_BATCHING + max_concurrent_invocations: 3 + max_batch_size: 1 + } + buildbucket { + server: "cr-buildbucket.appspot.com" + bucket: "perf" + builder: "Fuchsia Builder" + } +} job { id: "Fuchsia Release" realm: "ci" @@ -476,6 +491,24 @@ job { builder: "Perf Android64 (R Pixel5)" } } +job { + id: "Perf Fuchsia" + realm: "perf" + acls { + role: TRIGGERER + granted_to: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + } + acl_sets: "perf" + triggering_policy { + kind: LOGARITHMIC_BATCHING + log_base: 1.7 + } + buildbucket { + server: "cr-buildbucket.appspot.com" + bucket: "perf" + builder: "Perf Fuchsia" + } +} job { id: "Perf Linux Bionic" realm: "perf" @@ -531,7 +564,7 @@ job { } } job { - id: "Perf Win7" + id: "Perf Win 10" realm: "perf" acls { role: TRIGGERER @@ -545,7 +578,7 @@ job { buildbucket { server: "cr-buildbucket.appspot.com" bucket: "perf" - builder: "Perf Win7" + builder: "Perf Win 10" } } job { @@ -580,16 +613,6 @@ job { builder: "Win (more configs)" } } -job { - id: "Win32 Builder (Clang)" - realm: "perf" - acl_sets: "perf" - buildbucket { - server: "cr-buildbucket.appspot.com" - bucket: "perf" - builder: "Win32 Builder (Clang)" - } -} job { id: "Win32 Debug (Clang)" realm: "ci" @@ -620,6 +643,16 @@ job { builder: "Win64 ASan" } } +job { + id: "Win64 Builder (Clang)" + realm: "perf" + acl_sets: "perf" + buildbucket { + server: "cr-buildbucket.appspot.com" + bucket: "perf" + builder: "Win64 Builder (Clang)" + } +} job { id: "Win64 Debug (Clang)" realm: "ci" @@ -709,12 +742,12 @@ trigger { id: "webrtc-gitiles-trigger-main" realm: "ci" acl_sets: "ci" - triggers: "Android32 (M Nexus5X)" - triggers: "Android32 (M Nexus5X)(dbg)" + triggers: "Android32" + triggers: "Android32 (dbg)" triggers: "Android32 (more configs)" triggers: "Android32 Builder x86" triggers: "Android32 Builder x86 (dbg)" - triggers: "Android64 (M Nexus5X)" + triggers: "Android64" triggers: "Android64 Builder x64 (dbg)" triggers: "Fuchsia Release" triggers: "Linux (more configs)" @@ -750,10 +783,11 @@ trigger { triggers: "iOS64 Sim Debug (iOS 14)" triggers: "Android32 Builder arm" triggers: "Android64 Builder arm64" + triggers: "Fuchsia Builder" triggers: "Linux64 Builder" triggers: "Mac64 Builder" triggers: "MacArm64 Builder" - triggers: "Win32 Builder (Clang)" + triggers: "Win64 Builder (Clang)" gitiles { repo: "https://webrtc.googlesource.com/src" refs: "regexp:refs/heads/main" diff --git a/third_party/libwebrtc/infra/config/project.cfg b/third_party/libwebrtc/infra/config/project.cfg index 60fd23d1c63e..d35a697fd0aa 100644 --- a/third_party/libwebrtc/infra/config/project.cfg +++ b/third_party/libwebrtc/infra/config/project.cfg @@ -7,7 +7,7 @@ name: "webrtc" access: "group:all" lucicfg { - version: "1.33.6" + version: "1.35.2" package_dir: "." config_dir: "." entry_point: "config.star" diff --git a/third_party/libwebrtc/infra/config/realms.cfg b/third_party/libwebrtc/infra/config/realms.cfg index 4fd868546769..c39fbdff612c 100644 --- a/third_party/libwebrtc/infra/config/realms.cfg +++ b/third_party/libwebrtc/infra/config/realms.cfg @@ -118,10 +118,11 @@ realms { values: "Perf Android64 (M Nexus5X)" values: "Perf Android64 (O Pixel2)" values: "Perf Android64 (R Pixel5)" + values: "Perf Fuchsia" values: "Perf Linux Bionic" values: "Perf Mac 11" values: "Perf Mac M1 Arm64 12" - values: "Perf Win7" + values: "Perf Win 10" } } } diff --git a/third_party/libwebrtc/infra/specs/PRESUBMIT.py b/third_party/libwebrtc/infra/specs/PRESUBMIT.py index 306a3f19521d..f064cacaf8a0 100644 --- a/third_party/libwebrtc/infra/specs/PRESUBMIT.py +++ b/third_party/libwebrtc/infra/specs/PRESUBMIT.py @@ -9,6 +9,7 @@ # be found in the AUTHORS file in the root of the source tree. import os +import shlex # Runs PRESUBMIT.py in py3 mode by git cl presubmit. USE_PYTHON3 = True @@ -28,7 +29,8 @@ def CheckPatchFormatted(input_api, output_api): for f in affected_files: cmd = ['yapf', '-i', f.AbsoluteLocalPath()] if input_api.subprocess.call(cmd): - results.append(output_api.PresubmitError('Error calling "' + cmd + '"')) + results.append( + output_api.PresubmitError('Error calling "' + shlex.join(cmd) + '"')) if _HasLocalChanges(input_api): msg = ('Diff found after running "yapf -i" on modified .pyl files.\n' diff --git a/third_party/libwebrtc/infra/specs/client.webrtc.json b/third_party/libwebrtc/infra/specs/client.webrtc.json index e9e9b5622bb2..d5608072b2fb 100644 --- a/third_party/libwebrtc/infra/specs/client.webrtc.json +++ b/third_party/libwebrtc/infra/specs/client.webrtc.json @@ -1,7 +1,7 @@ { "AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {}, "AAAAA2 See generate_buildbot_json.py to make changes": {}, - "Android32 (M Nexus5X)": { + "Android32": { "gtest_tests": [ { "merge": { @@ -14,21 +14,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -44,21 +37,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -74,21 +60,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -104,21 +83,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -134,21 +106,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -164,21 +129,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -194,21 +152,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -225,21 +176,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -256,21 +200,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -287,21 +224,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -317,21 +247,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -347,21 +270,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -377,21 +293,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -408,21 +317,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -438,21 +340,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -469,21 +364,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -499,21 +387,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -529,21 +410,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -559,21 +433,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -590,21 +457,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -620,21 +480,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -663,7 +516,7 @@ } ] }, - "Android32 (M Nexus5X)(dbg)": { + "Android32 (dbg)": { "gtest_tests": [ { "merge": { @@ -676,21 +529,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -706,21 +552,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -736,21 +575,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -766,21 +598,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -796,21 +621,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -826,21 +644,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -856,21 +667,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -887,21 +691,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -918,21 +715,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -949,21 +739,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -979,21 +762,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -1009,21 +785,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -1039,21 +808,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -1070,21 +832,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -1100,21 +855,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -1131,21 +879,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -1161,21 +902,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -1191,21 +925,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -1221,21 +948,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -1252,21 +972,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -1282,21 +995,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -1338,21 +1044,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -1363,7 +1062,7 @@ "Android32 Builder arm": {}, "Android32 Builder x86": {}, "Android32 Builder x86 (dbg)": {}, - "Android64 (M Nexus5X)": { + "Android64": { "gtest_tests": [ { "merge": { @@ -1376,21 +1075,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -1406,21 +1098,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -1436,21 +1121,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -1466,21 +1144,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -1496,21 +1167,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -1526,21 +1190,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -1556,21 +1213,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -1587,21 +1237,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -1618,21 +1261,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -1649,21 +1285,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -1679,21 +1308,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -1709,21 +1331,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -1739,21 +1354,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -1770,21 +1378,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -1800,21 +1401,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -1831,21 +1425,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -1861,21 +1448,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -1891,21 +1471,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -1921,21 +1494,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -1952,21 +1518,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -1982,21 +1541,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -2025,7 +1577,7 @@ } ] }, - "Android64 (M Nexus5X)(dbg)": { + "Android64 (dbg)": { "gtest_tests": [ { "merge": { @@ -2038,21 +1590,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -2068,21 +1613,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -2098,21 +1636,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -2128,21 +1659,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -2158,21 +1682,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -2188,21 +1705,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -2218,21 +1728,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -2249,21 +1752,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -2280,21 +1776,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -2311,21 +1800,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -2341,21 +1823,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -2371,21 +1846,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -2401,21 +1869,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -2432,21 +1893,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -2462,21 +1916,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -2493,21 +1940,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -2523,21 +1963,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -2553,21 +1986,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -2583,21 +2009,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -2614,21 +2033,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -2644,21 +2056,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -2689,13 +2094,10 @@ }, "Android64 Builder arm64": {}, "Android64 Builder x64 (dbg)": {}, + "Fuchsia Builder": {}, "Fuchsia Release": { "isolated_scripts": [ { - "args": [ - "--", - "--undefok=use-vulkan" - ], "isolate_name": "audio_decoder_unittests", "merge": { "args": [], @@ -2703,7 +2105,8 @@ }, "name": "audio_decoder_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2717,10 +2120,6 @@ "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "args": [ - "--", - "--undefok=use-vulkan" - ], "isolate_name": "common_audio_unittests", "merge": { "args": [], @@ -2728,7 +2127,8 @@ }, "name": "common_audio_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2742,10 +2142,6 @@ "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "args": [ - "--", - "--undefok=use-vulkan" - ], "isolate_name": "common_video_unittests", "merge": { "args": [], @@ -2753,7 +2149,8 @@ }, "name": "common_video_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2767,10 +2164,6 @@ "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "args": [ - "--", - "--undefok=use-vulkan" - ], "isolate_name": "dcsctp_unittests", "merge": { "args": [], @@ -2778,7 +2171,8 @@ }, "name": "dcsctp_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2793,8 +2187,7 @@ }, { "args": [ - "--", - "--undefok=use-vulkan" + "--quick" ], "isolate_name": "low_bandwidth_audio_test", "merge": { @@ -2803,7 +2196,8 @@ }, "name": "low_bandwidth_audio_test", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2815,6 +2209,140 @@ ] }, "test_id_prefix": "ninja://audio:low_bandwidth_audio_test/" + }, + { + "isolate_name": "rtc_media_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_media_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://media:rtc_media_unittests/" + }, + { + "isolate_name": "rtc_pc_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_pc_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + }, + { + "isolate_name": "svc_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "svc_tests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:svc_tests/" + }, + { + "isolate_name": "system_wrappers_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "system_wrappers_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" + }, + { + "isolate_name": "video_engine_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_engine_tests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://:video_engine_tests/" + }, + { + "isolate_name": "voip_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "voip_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://:voip_unittests/" } ] }, @@ -8715,7 +8243,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -8736,7 +8264,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -8757,7 +8285,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -8778,7 +8306,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -8802,7 +8330,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -8823,7 +8351,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ], "shards": 2 @@ -8845,7 +8373,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ], "shards": 6 @@ -8867,7 +8395,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ], "shards": 4 @@ -8889,7 +8417,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -8910,7 +8438,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -8931,7 +8459,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -8952,7 +8480,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ], "shards": 6 @@ -8974,7 +8502,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -8995,7 +8523,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ], "shards": 4 @@ -9017,7 +8545,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9038,7 +8566,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9059,34 +8587,12 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, - { - "isolate_name": "video_capture_tests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_capture_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10", - "pool": "WebRTC-baremetal" - } - ] - }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" - }, { "isolate_name": "video_engine_tests", "merge": { @@ -9102,7 +8608,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ], "shards": 4 @@ -9124,7 +8630,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9145,7 +8651,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9586,8 +9092,894 @@ } ] }, - "Win64 Debug (Clang)": {}, - "Win64 Release (Clang)": {}, + "Win64 Debug (Clang)": { + "isolated_scripts": [ + { + "isolate_name": "audio_decoder_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "audio_decoder_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" + }, + { + "isolate_name": "common_audio_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_audio_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://common_audio:common_audio_unittests/" + }, + { + "isolate_name": "common_video_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_video_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://common_video:common_video_unittests/" + }, + { + "isolate_name": "dcsctp_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "dcsctp_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" + }, + { + "args": [ + "--quick" + ], + "isolate_name": "low_bandwidth_audio_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "low_bandwidth_audio_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://audio:low_bandwidth_audio_test/" + }, + { + "isolate_name": "modules_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ], + "shards": 2 + }, + "test_id_prefix": "ninja://modules:modules_tests/" + }, + { + "isolate_name": "modules_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ], + "shards": 6 + }, + "test_id_prefix": "ninja://modules:modules_unittests/" + }, + { + "isolate_name": "peerconnection_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "peerconnection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:peerconnection_unittests/" + }, + { + "isolate_name": "rtc_media_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_media_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://media:rtc_media_unittests/" + }, + { + "isolate_name": "rtc_pc_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_pc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + }, + { + "isolate_name": "rtc_stats_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_stats_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + }, + { + "isolate_name": "rtc_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ], + "shards": 6 + }, + "test_id_prefix": "ninja://:rtc_unittests/" + }, + { + "isolate_name": "slow_peer_connection_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "slow_peer_connection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" + }, + { + "isolate_name": "svc_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "svc_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:svc_tests/" + }, + { + "isolate_name": "system_wrappers_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "system_wrappers_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" + }, + { + "isolate_name": "test_support_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "test_support_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://test:test_support_unittests/" + }, + { + "isolate_name": "tools_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "tools_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://rtc_tools:tools_unittests/" + }, + { + "isolate_name": "video_engine_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_engine_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://:video_engine_tests/" + }, + { + "isolate_name": "voip_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "voip_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://:voip_unittests/" + }, + { + "isolate_name": "webrtc_nonparallel_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "webrtc_nonparallel_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" + } + ] + }, + "Win64 Release (Clang)": { + "isolated_scripts": [ + { + "isolate_name": "audio_decoder_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "audio_decoder_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" + }, + { + "isolate_name": "common_audio_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_audio_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://common_audio:common_audio_unittests/" + }, + { + "isolate_name": "common_video_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_video_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://common_video:common_video_unittests/" + }, + { + "isolate_name": "dcsctp_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "dcsctp_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" + }, + { + "args": [ + "--quick" + ], + "isolate_name": "low_bandwidth_audio_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "low_bandwidth_audio_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://audio:low_bandwidth_audio_test/" + }, + { + "isolate_name": "modules_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ], + "shards": 2 + }, + "test_id_prefix": "ninja://modules:modules_tests/" + }, + { + "isolate_name": "modules_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ], + "shards": 6 + }, + "test_id_prefix": "ninja://modules:modules_unittests/" + }, + { + "isolate_name": "peerconnection_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "peerconnection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:peerconnection_unittests/" + }, + { + "isolate_name": "rtc_media_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_media_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://media:rtc_media_unittests/" + }, + { + "isolate_name": "rtc_pc_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_pc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + }, + { + "isolate_name": "rtc_stats_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_stats_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + }, + { + "isolate_name": "rtc_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ], + "shards": 6 + }, + "test_id_prefix": "ninja://:rtc_unittests/" + }, + { + "isolate_name": "slow_peer_connection_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "slow_peer_connection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" + }, + { + "isolate_name": "svc_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "svc_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:svc_tests/" + }, + { + "isolate_name": "system_wrappers_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "system_wrappers_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" + }, + { + "isolate_name": "test_support_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "test_support_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://test:test_support_unittests/" + }, + { + "isolate_name": "tools_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "tools_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://rtc_tools:tools_unittests/" + }, + { + "isolate_name": "video_capture_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_capture_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10", + "pool": "WebRTC-baremetal" + } + ] + }, + "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" + }, + { + "isolate_name": "video_engine_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_engine_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://:video_engine_tests/" + }, + { + "isolate_name": "voip_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "voip_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://:voip_unittests/" + }, + { + "isolate_name": "webrtc_nonparallel_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "webrtc_nonparallel_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" + } + ] + }, "iOS64 Debug": {}, "iOS64 Release": {}, "iOS64 Sim Debug (iOS 12)": { @@ -9598,11 +9990,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "apprtcmobile_tests", @@ -9646,11 +10038,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "audio_decoder_unittests", "merge": { @@ -9693,11 +10085,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_audio_unittests", "merge": { @@ -9740,11 +10132,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_video_unittests", "merge": { @@ -9787,11 +10179,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "dcsctp_unittests", "merge": { @@ -9834,11 +10226,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_tests", "merge": { @@ -9882,11 +10274,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_unittests", "merge": { @@ -9909,6 +10301,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -9930,11 +10323,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_media_unittests", "merge": { @@ -9977,11 +10370,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_pc_unittests", "merge": { @@ -10024,11 +10417,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_stats_unittests", "merge": { @@ -10071,11 +10464,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_unittests", "merge": { @@ -10119,11 +10512,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_framework_unittests", @@ -10167,11 +10560,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_unittests", @@ -10215,11 +10608,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "svc_tests", "merge": { @@ -10264,11 +10657,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "system_wrappers_unittests", "merge": { @@ -10311,11 +10704,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "test_support_unittests", "merge": { @@ -10358,11 +10751,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "tools_unittests", "merge": { @@ -10405,11 +10798,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_capture_tests", "merge": { @@ -10452,11 +10845,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_engine_tests", "merge": { @@ -10500,11 +10893,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "voip_unittests", "merge": { @@ -10547,11 +10940,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "webrtc_nonparallel_tests", "merge": { @@ -10598,11 +10991,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "apprtcmobile_tests", @@ -10646,11 +11039,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "audio_decoder_unittests", "merge": { @@ -10693,11 +11086,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_audio_unittests", "merge": { @@ -10740,11 +11133,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_video_unittests", "merge": { @@ -10787,11 +11180,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "dcsctp_unittests", "merge": { @@ -10834,11 +11227,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_tests", "merge": { @@ -10882,11 +11275,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_unittests", "merge": { @@ -10909,6 +11302,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -10930,11 +11324,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_media_unittests", "merge": { @@ -10977,11 +11371,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_pc_unittests", "merge": { @@ -11024,11 +11418,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_stats_unittests", "merge": { @@ -11071,11 +11465,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_unittests", "merge": { @@ -11119,11 +11513,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_framework_unittests", @@ -11167,11 +11561,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_unittests", @@ -11215,11 +11609,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "svc_tests", "merge": { @@ -11264,11 +11658,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "system_wrappers_unittests", "merge": { @@ -11311,11 +11705,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "test_support_unittests", "merge": { @@ -11358,11 +11752,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "tools_unittests", "merge": { @@ -11405,11 +11799,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_capture_tests", "merge": { @@ -11452,11 +11846,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_engine_tests", "merge": { @@ -11500,11 +11894,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "voip_unittests", "merge": { @@ -11547,11 +11941,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "webrtc_nonparallel_tests", "merge": { @@ -11598,11 +11992,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "apprtcmobile_tests", @@ -11646,11 +12040,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "audio_decoder_unittests", "merge": { @@ -11693,11 +12087,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_audio_unittests", "merge": { @@ -11740,11 +12134,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_video_unittests", "merge": { @@ -11787,11 +12181,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "dcsctp_unittests", "merge": { @@ -11834,11 +12228,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_tests", "merge": { @@ -11882,11 +12276,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_unittests", "merge": { @@ -11909,6 +12303,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -11930,11 +12325,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_media_unittests", "merge": { @@ -11977,11 +12372,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_pc_unittests", "merge": { @@ -12024,11 +12419,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_stats_unittests", "merge": { @@ -12071,11 +12466,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_unittests", "merge": { @@ -12119,11 +12514,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_framework_unittests", @@ -12167,11 +12562,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_unittests", @@ -12215,11 +12610,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "svc_tests", "merge": { @@ -12264,11 +12659,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "system_wrappers_unittests", "merge": { @@ -12311,11 +12706,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "test_support_unittests", "merge": { @@ -12358,11 +12753,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "tools_unittests", "merge": { @@ -12405,11 +12800,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_capture_tests", "merge": { @@ -12452,11 +12847,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_engine_tests", "merge": { @@ -12500,11 +12895,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "voip_unittests", "merge": { @@ -12547,11 +12942,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "webrtc_nonparallel_tests", "merge": { diff --git a/third_party/libwebrtc/infra/specs/client.webrtc.perf.json b/third_party/libwebrtc/infra/specs/client.webrtc.perf.json index e9ff1219f577..3357bf2417eb 100644 --- a/third_party/libwebrtc/infra/specs/client.webrtc.perf.json +++ b/third_party/libwebrtc/infra/specs/client.webrtc.perf.json @@ -30,13 +30,15 @@ "android_devices": "1", "device_os": "M", "device_type": "shamu", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "low_bandwidth_audio_perf_test", "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" @@ -64,13 +66,15 @@ "android_devices": "1", "device_os": "M", "device_type": "shamu", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -106,13 +110,15 @@ "android_devices": "1", "device_os": "M", "device_type": "hammerhead", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "low_bandwidth_audio_perf_test", "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" @@ -140,13 +146,15 @@ "android_devices": "1", "device_os": "M", "device_type": "hammerhead", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -181,13 +189,15 @@ { "android_devices": "1", "device_type": "walleye", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "low_bandwidth_audio_perf_test", "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" @@ -214,13 +224,15 @@ { "android_devices": "1", "device_type": "walleye", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -255,13 +267,15 @@ { "android_devices": "1", "device_type": "redfin", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "low_bandwidth_audio_perf_test", "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" @@ -288,13 +302,15 @@ { "android_devices": "1", "device_type": "redfin", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -330,13 +346,15 @@ "android_devices": "1", "device_os": "MMB29Q", "device_type": "bullhead", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "low_bandwidth_audio_perf_test", "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" @@ -364,13 +382,15 @@ "android_devices": "1", "device_os": "MMB29Q", "device_type": "bullhead", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -405,13 +425,15 @@ { "android_devices": "1", "device_type": "walleye", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "low_bandwidth_audio_perf_test", "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" @@ -438,13 +460,15 @@ { "android_devices": "1", "device_type": "walleye", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -479,13 +503,15 @@ { "android_devices": "1", "device_type": "redfin", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "low_bandwidth_audio_perf_test", "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" @@ -512,19 +538,59 @@ { "android_devices": "1", "device_type": "redfin", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] }, + "Perf Fuchsia": { + "isolated_scripts": [ + { + "args": [ + "--nologs" + ], + "isolate_name": "fuchsia_perf_tests", + "merge": { + "args": [ + "--test-suite", + "fuchsia_perf_tests" + ], + "script": "//tools_webrtc/perf/process_perf_results.py" + }, + "name": "fuchsia_perf_tests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "gce": "1", + "os": "Ubuntu-18.04", + "pool": "WebRTC-perf" + } + ], + "hard_timeout": 10800, + "idempotent": false, + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test_id_prefix": "ninja://:fuchsia_perf_tests/" + } + ] + }, "Perf Linux Bionic": { "isolated_scripts": [ { @@ -550,6 +616,7 @@ "can_use_on_swarming_builders": true, "dimension_sets": [ { + "gce": "0", "os": "Ubuntu-18.04", "pool": "WebRTC-perf" } @@ -584,6 +651,7 @@ "can_use_on_swarming_builders": true, "dimension_sets": [ { + "gce": "0", "os": "Ubuntu-18.04", "pool": "WebRTC-perf" } @@ -622,6 +690,7 @@ "dimension_sets": [ { "cpu": "x86-64", + "gce": "0", "os": "Mac-11", "pool": "WebRTC-perf" } @@ -657,6 +726,7 @@ "dimension_sets": [ { "cpu": "x86-64", + "gce": "0", "os": "Mac-11", "pool": "WebRTC-perf" } @@ -694,6 +764,7 @@ "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", + "gce": "0", "os": "Mac-12", "pool": "WebRTC-perf" } @@ -706,7 +777,7 @@ } ] }, - "Perf Win7": { + "Perf Win 10": { "isolated_scripts": [ { "args": [ @@ -731,6 +802,7 @@ "can_use_on_swarming_builders": true, "dimension_sets": [ { + "gce": "0", "os": "Windows-10", "pool": "WebRTC-perf" } @@ -765,6 +837,7 @@ "can_use_on_swarming_builders": true, "dimension_sets": [ { + "gce": "0", "os": "Windows-10", "pool": "WebRTC-perf" } diff --git a/third_party/libwebrtc/infra/specs/gn_isolate_map.pyl b/third_party/libwebrtc/infra/specs/gn_isolate_map.pyl index 25ab4f2e2d67..c7c472fcfa37 100644 --- a/third_party/libwebrtc/infra/specs/gn_isolate_map.pyl +++ b/third_party/libwebrtc/infra/specs/gn_isolate_map.pyl @@ -55,10 +55,18 @@ "label": "//net/dcsctp:dcsctp_unittests", "type": "console_test_launcher", }, + "default": { + "label": "//:default", + "type": "additional_compile_target", + }, "android_instrumentation_test_apk": { "label": "//sdk/android:android_instrumentation_test_apk", "type": "console_test_launcher", }, + "fuchsia_perf_tests": { + "label": "//:fuchsia_perf_tests", + "type": "raw", + }, "low_bandwidth_audio_test": { "label": "//audio:low_bandwidth_audio_test", "type": "console_test_launcher", diff --git a/third_party/libwebrtc/infra/specs/internal.client.webrtc.json b/third_party/libwebrtc/infra/specs/internal.client.webrtc.json index c40ec54b3f75..b300db697b2e 100644 --- a/third_party/libwebrtc/infra/specs/internal.client.webrtc.json +++ b/third_party/libwebrtc/infra/specs/internal.client.webrtc.json @@ -6,7 +6,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -50,7 +49,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -95,7 +93,6 @@ "args": [ "--readline-timeout=1200", "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -142,7 +139,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -187,7 +183,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -231,7 +226,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -275,7 +269,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -319,7 +312,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -363,7 +355,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -407,7 +398,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -451,7 +441,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -555,7 +544,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -599,7 +587,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -644,7 +631,6 @@ "args": [ "--readline-timeout=1200", "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -691,7 +677,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -736,7 +721,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -780,7 +764,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -824,7 +807,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -868,7 +850,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -912,7 +893,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -956,7 +936,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -1000,7 +979,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", diff --git a/third_party/libwebrtc/infra/specs/mixins.pyl b/third_party/libwebrtc/infra/specs/mixins.pyl index 3e3c9b716e1a..2f11efd3f81e 100644 --- a/third_party/libwebrtc/infra/specs/mixins.pyl +++ b/third_party/libwebrtc/infra/specs/mixins.pyl @@ -59,11 +59,6 @@ } } }, - 'fuchsia': { - '$mixin_append': { - 'args': ['--', '--undefok=use-vulkan'] - } - }, 'hammerhead': { 'swarming': { 'dimensions': { @@ -98,10 +93,7 @@ }, 'ios-simulator-12.4': { '$mixin_append': { - 'args': [ - '--platform', 'iPhone X', '--version', '12.4', - '--undefok=write-compiled-tests-json-to-writable-path' - ], + 'args': ['--platform', 'iPhone X', '--version', '12.4'], 'swarming': { 'named_caches': [{ 'name': 'runtime_ios_12_4', @@ -112,10 +104,7 @@ }, 'ios-simulator-13.6': { '$mixin_append': { - 'args': [ - '--platform', 'iPhone X', '--version', '13.6', - '--undefok=write-compiled-tests-json-to-writable-path' - ], + 'args': ['--platform', 'iPhone X', '--version', '13.6'], 'swarming': { 'named_caches': [{ 'name': 'runtime_ios_13_6', @@ -126,10 +115,7 @@ }, 'ios-simulator-14.5': { '$mixin_append': { - 'args': [ - '--platform', 'iPhone X', '--version', '14.5', - '--undefok=write-compiled-tests-json-to-writable-path' - ], + 'args': ['--platform', 'iPhone X', '--version', '14.5'], 'swarming': { 'named_caches': [{ 'name': 'runtime_ios_14_5', @@ -138,6 +124,9 @@ } } }, + 'isolate_profile_data': { + 'isolate_profile_data': True + }, 'linux-bionic': { 'swarming': { 'dimensions': { @@ -145,18 +134,6 @@ } } }, - 'logdog-butler': { - 'swarming': { - 'cipd_packages': [{ - 'cipd_package': - 'infra/tools/luci/logdog/butler/${platform}', - 'location': - 'bin', - 'revision': - 'git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c' - }] - } - }, 'mac-m1-cpu': { 'swarming': { 'dimensions': { @@ -218,6 +195,15 @@ 'args': ['--out-dir', '${ISOLATED_OUTDIR}'] } }, + 'perf-fuchsia-perf-tests': { + 'merge': { + 'script': '//tools_webrtc/perf/process_perf_results.py', + 'args': ['--test-suite', 'fuchsia_perf_tests'] + }, + '$mixin_append': { + 'args': ['--nologs'] + } + }, 'perf-low-bandwidth-audio-perf-test': { 'merge': { 'script': '//tools_webrtc/perf/process_perf_results.py', @@ -235,7 +221,17 @@ 'swarming': { 'idempotent': False, 'dimensions': { - 'pool': 'WebRTC-perf' + 'pool': 'WebRTC-perf', + 'gce': '0' + } + } + }, + 'perf-pool-vm': { + 'swarming': { + 'idempotent': False, + 'dimensions': { + 'pool': 'WebRTC-perf', + 'gce': '1' } } }, @@ -250,8 +246,7 @@ }, 'quick-perf-tests': { '$mixin_append': { - 'args': - ['--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/', '--nologs'] + 'args': ['--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/', '--nologs'] } }, 'redfin': { @@ -321,7 +316,7 @@ }, 'webrtc-xctest': { '$mixin_append': { - 'args': ['--xctest', '--undefok=enable-run-ios-unittests-with-xctest'] + 'args': ['--xctest'] } }, 'win10': { diff --git a/third_party/libwebrtc/infra/specs/mixins_webrtc.pyl b/third_party/libwebrtc/infra/specs/mixins_webrtc.pyl index 357b94e5ce38..56de235da73f 100644 --- a/third_party/libwebrtc/infra/specs/mixins_webrtc.pyl +++ b/third_party/libwebrtc/infra/specs/mixins_webrtc.pyl @@ -35,6 +35,14 @@ } } }, + 'hammerhead': { + 'swarming': { + 'dimensions': { + 'device_type': 'hammerhead', + 'os': 'Android' + } + } + }, 'ios-device-15.7': { 'swarming': { 'dimensions': { @@ -60,7 +68,6 @@ 'iPhone X', '--version', '12.4', - '--undefok=write-compiled-tests-json-to-writable-path', ], 'swarming': { 'named_caches': [ @@ -79,7 +86,6 @@ 'iPhone X', '--version', '13.6', - '--undefok=write-compiled-tests-json-to-writable-path', ], 'swarming': { 'named_caches': [ @@ -98,7 +104,6 @@ 'iPhone X', '--version', '14.5', - '--undefok=write-compiled-tests-json-to-writable-path', ], 'swarming': { 'named_caches': [ @@ -110,17 +115,6 @@ }, } }, - 'logdog-butler': { - 'swarming': { - 'cipd_packages': [ - { - "cipd_package": 'infra/tools/luci/logdog/butler/${platform}', - 'location': 'bin', - 'revision': 'git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c', - }, - ], - }, - }, 'mac-m1-cpu': { 'swarming': { 'dimensions': { @@ -135,6 +129,22 @@ } } }, + 'marshmallow_generic': { + 'swarming': { + 'dimensions': { + 'device_os': 'M' + } + } + }, + 'perf-fuchsia-perf-tests': { + 'merge': { + 'script': '//tools_webrtc/perf/process_perf_results.py', + 'args': ['--test-suite', 'fuchsia_perf_tests'], + }, + '$mixin_append': { + 'args': ['--nologs'], + }, + }, 'perf-low-bandwidth-audio-perf-test': { 'merge': { 'script': '//tools_webrtc/perf/process_perf_results.py', @@ -156,6 +166,16 @@ 'idempotent': False, 'dimensions': { 'pool': 'WebRTC-perf', + 'gce': '0', + }, + }, + }, + 'perf-pool-vm': { + 'swarming': { + 'idempotent': False, + 'dimensions': { + 'pool': 'WebRTC-perf', + 'gce': '1', }, }, }, @@ -184,11 +204,6 @@ } } }, - 'fuchsia': { - '$mixin_append': { - 'args': ['--', '--undefok=use-vulkan'] - } - }, 'resultdb-gtest-json-format': { '$mixin_append': { 'args': [ @@ -244,7 +259,6 @@ '$mixin_append': { 'args': [ '--xctest', - '--undefok=enable-run-ios-unittests-with-xctest', ], }, }, diff --git a/third_party/libwebrtc/infra/specs/setup.cfg b/third_party/libwebrtc/infra/specs/setup.cfg index 7dd0a8a68e52..d5ed6957bbd0 100644 --- a/third_party/libwebrtc/infra/specs/setup.cfg +++ b/third_party/libwebrtc/infra/specs/setup.cfg @@ -8,4 +8,5 @@ # This is the style settings used when running yapf on .pyl files. [yapf] -continuation_indent_width = 2 \ No newline at end of file +continuation_indent_width = 2 +column_limit = 80 diff --git a/third_party/libwebrtc/infra/specs/test_suites.pyl b/third_party/libwebrtc/infra/specs/test_suites.pyl index 06338d3f32f8..1706f6d94d7a 100644 --- a/third_party/libwebrtc/infra/specs/test_suites.pyl +++ b/third_party/libwebrtc/infra/specs/test_suites.pyl @@ -116,12 +116,39 @@ 'voip_unittests': {}, 'webrtc_nonparallel_tests': {}, }, + 'fuchsia_compatible_perf_tests': { + 'fuchsia_perf_tests': { + 'mixins': ['perf-fuchsia-perf-tests'], + }, + }, 'fuchsia_compatible_tests': { 'audio_decoder_unittests': {}, 'common_audio_unittests': {}, 'common_video_unittests': {}, 'dcsctp_unittests': {}, - 'low_bandwidth_audio_test': {}, + 'low_bandwidth_audio_test': { + 'args': ['--quick'] + }, + 'rtc_media_unittests': {}, + # TODO(bugs.webrtc.org/14705): Enable when NonGlobalFieldTrialsInstanceDoesNotModifyGlobalString is fixed. + # TODO(bugs.webrtc.org/14700): Enable when NetworkTest tests are fixed. + # 'rtc_unittests': {}, + 'rtc_pc_unittests': {}, + 'svc_tests': { + 'mixins': ['shards-4'], + }, + 'system_wrappers_unittests': {}, + # TODO(bugs.webrtc.org/14712): Enable once network issue is fixed. + # 'peerconnection_unittests': {}, + 'video_engine_tests': { + 'mixins': ['shards-4'], + }, + 'voip_unittests': {}, + # TODO(bugs.fuchsia.dev/115601): Enable when cpu time API's are implemented in Fuchsia + # 'test_support_unittests': {}, + # TODO(bugs.webrtc.org/14707): chromium.test component needs to allow creating listening ports. + # 'tools_unittests': {}, + # }, 'ios_device_tests': { # TODO(bugs.webrtc.org/11362): Real XCTests fail to start on devices. @@ -163,7 +190,7 @@ 'mixins': ['shards-2'], }, 'modules_unittests': { - 'mixins': ['shards-6'], + 'mixins': ['shards-6', 'cores-12'], }, 'rtc_media_unittests': {}, 'rtc_pc_unittests': {}, diff --git a/third_party/libwebrtc/infra/specs/tryserver.webrtc.json b/third_party/libwebrtc/infra/specs/tryserver.webrtc.json index 302c94cfcd9e..5f65f32566e8 100644 --- a/third_party/libwebrtc/infra/specs/tryserver.webrtc.json +++ b/third_party/libwebrtc/infra/specs/tryserver.webrtc.json @@ -14,21 +14,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -44,21 +37,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -74,21 +60,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -104,21 +83,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -134,21 +106,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -164,21 +129,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -194,21 +152,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -225,21 +176,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -256,21 +200,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -287,21 +224,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -317,21 +247,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -347,21 +270,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -377,21 +293,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -408,21 +317,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -438,21 +340,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -469,21 +364,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -499,21 +387,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -529,21 +410,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -559,21 +433,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -590,21 +457,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -620,21 +480,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -654,21 +507,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -710,21 +556,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -740,21 +579,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -770,21 +602,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -800,21 +625,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -830,21 +648,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -860,21 +671,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -890,21 +694,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -921,21 +718,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -952,21 +742,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -983,21 +766,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -1013,21 +789,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -1043,21 +812,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -1073,21 +835,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -1104,21 +859,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -1134,21 +882,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -1165,21 +906,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -1195,21 +929,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -1225,21 +952,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -1255,21 +975,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -1286,21 +999,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -1316,21 +1022,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -1350,21 +1049,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -1406,21 +1098,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -1436,21 +1121,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -1466,21 +1144,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -1496,21 +1167,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -1526,21 +1190,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -1556,21 +1213,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -1586,21 +1236,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -1617,21 +1260,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -1648,21 +1284,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -1679,21 +1308,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -1709,21 +1331,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -1739,21 +1354,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -1769,21 +1377,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -1800,21 +1401,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -1830,21 +1424,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -1861,21 +1448,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -1891,21 +1471,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -1921,21 +1494,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -1951,21 +1517,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -1982,21 +1541,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -2012,21 +1564,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -2046,21 +1591,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -2102,21 +1640,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -2137,21 +1668,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -2167,21 +1691,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -2197,21 +1714,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -2227,21 +1737,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -2257,21 +1760,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -2287,21 +1783,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -2317,21 +1806,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -2348,21 +1830,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -2379,21 +1854,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -2410,21 +1878,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -2440,21 +1901,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -2470,21 +1924,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -2500,21 +1947,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -2531,21 +1971,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -2561,21 +1994,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -2592,21 +2018,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -2622,21 +2041,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -2652,21 +2064,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -2682,21 +2087,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -2713,21 +2111,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -2743,21 +2134,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -2777,21 +2161,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -2831,10 +2208,6 @@ "fuchsia_rel": { "isolated_scripts": [ { - "args": [ - "--", - "--undefok=use-vulkan" - ], "isolate_name": "audio_decoder_unittests", "merge": { "args": [], @@ -2842,7 +2215,8 @@ }, "name": "audio_decoder_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2856,10 +2230,6 @@ "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "args": [ - "--", - "--undefok=use-vulkan" - ], "isolate_name": "common_audio_unittests", "merge": { "args": [], @@ -2867,7 +2237,8 @@ }, "name": "common_audio_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2881,10 +2252,6 @@ "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "args": [ - "--", - "--undefok=use-vulkan" - ], "isolate_name": "common_video_unittests", "merge": { "args": [], @@ -2892,7 +2259,8 @@ }, "name": "common_video_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2906,10 +2274,6 @@ "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "args": [ - "--", - "--undefok=use-vulkan" - ], "isolate_name": "dcsctp_unittests", "merge": { "args": [], @@ -2917,7 +2281,8 @@ }, "name": "dcsctp_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2932,8 +2297,7 @@ }, { "args": [ - "--", - "--undefok=use-vulkan" + "--quick" ], "isolate_name": "low_bandwidth_audio_test", "merge": { @@ -2942,7 +2306,8 @@ }, "name": "low_bandwidth_audio_test", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2954,6 +2319,140 @@ ] }, "test_id_prefix": "ninja://audio:low_bandwidth_audio_test/" + }, + { + "isolate_name": "rtc_media_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_media_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://media:rtc_media_unittests/" + }, + { + "isolate_name": "rtc_pc_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_pc_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + }, + { + "isolate_name": "svc_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "svc_tests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:svc_tests/" + }, + { + "isolate_name": "system_wrappers_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "system_wrappers_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" + }, + { + "isolate_name": "video_engine_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_engine_tests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://:video_engine_tests/" + }, + { + "isolate_name": "voip_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "voip_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://:voip_unittests/" } ] }, @@ -2967,11 +2466,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "apprtcmobile_tests", @@ -3015,11 +2514,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "audio_decoder_unittests", "merge": { @@ -3062,11 +2561,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_audio_unittests", "merge": { @@ -3109,11 +2608,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_video_unittests", "merge": { @@ -3156,11 +2655,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "dcsctp_unittests", "merge": { @@ -3203,11 +2702,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_tests", "merge": { @@ -3251,11 +2750,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_unittests", "merge": { @@ -3278,6 +2777,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -3299,11 +2799,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_media_unittests", "merge": { @@ -3346,11 +2846,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_pc_unittests", "merge": { @@ -3393,11 +2893,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_stats_unittests", "merge": { @@ -3440,11 +2940,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_unittests", "merge": { @@ -3488,11 +2988,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_framework_unittests", @@ -3536,11 +3036,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_unittests", @@ -3584,11 +3084,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "svc_tests", "merge": { @@ -3633,11 +3133,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "system_wrappers_unittests", "merge": { @@ -3680,11 +3180,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "test_support_unittests", "merge": { @@ -3727,11 +3227,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "tools_unittests", "merge": { @@ -3774,11 +3274,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_capture_tests", "merge": { @@ -3821,11 +3321,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_engine_tests", "merge": { @@ -3869,11 +3369,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "voip_unittests", "merge": { @@ -3916,11 +3416,11 @@ "iPhone X", "--version", "12.4", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "webrtc_nonparallel_tests", "merge": { @@ -3967,11 +3467,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "apprtcmobile_tests", @@ -4015,11 +3515,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "audio_decoder_unittests", "merge": { @@ -4062,11 +3562,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_audio_unittests", "merge": { @@ -4109,11 +3609,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_video_unittests", "merge": { @@ -4156,11 +3656,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "dcsctp_unittests", "merge": { @@ -4203,11 +3703,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_tests", "merge": { @@ -4251,11 +3751,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_unittests", "merge": { @@ -4278,6 +3778,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -4299,11 +3800,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_media_unittests", "merge": { @@ -4346,11 +3847,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_pc_unittests", "merge": { @@ -4393,11 +3894,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_stats_unittests", "merge": { @@ -4440,11 +3941,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_unittests", "merge": { @@ -4488,11 +3989,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_framework_unittests", @@ -4536,11 +4037,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_unittests", @@ -4584,11 +4085,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "svc_tests", "merge": { @@ -4633,11 +4134,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "system_wrappers_unittests", "merge": { @@ -4680,11 +4181,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "test_support_unittests", "merge": { @@ -4727,11 +4228,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "tools_unittests", "merge": { @@ -4774,11 +4275,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_capture_tests", "merge": { @@ -4821,11 +4322,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_engine_tests", "merge": { @@ -4869,11 +4370,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "voip_unittests", "merge": { @@ -4916,11 +4417,11 @@ "iPhone X", "--version", "13.6", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "webrtc_nonparallel_tests", "merge": { @@ -4967,11 +4468,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "apprtcmobile_tests", @@ -5015,11 +4516,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "audio_decoder_unittests", "merge": { @@ -5062,11 +4563,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_audio_unittests", "merge": { @@ -5109,11 +4610,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_video_unittests", "merge": { @@ -5156,11 +4657,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "dcsctp_unittests", "merge": { @@ -5203,11 +4704,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_tests", "merge": { @@ -5251,11 +4752,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_unittests", "merge": { @@ -5278,6 +4779,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -5299,11 +4801,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_media_unittests", "merge": { @@ -5346,11 +4848,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_pc_unittests", "merge": { @@ -5393,11 +4895,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_stats_unittests", "merge": { @@ -5440,11 +4942,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_unittests", "merge": { @@ -5488,11 +4990,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_framework_unittests", @@ -5536,11 +5038,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_unittests", @@ -5584,11 +5086,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "svc_tests", "merge": { @@ -5633,11 +5135,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "system_wrappers_unittests", "merge": { @@ -5680,11 +5182,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "test_support_unittests", "merge": { @@ -5727,11 +5229,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "tools_unittests", "merge": { @@ -5774,11 +5276,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_capture_tests", "merge": { @@ -5821,11 +5323,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_engine_tests", "merge": { @@ -5869,11 +5371,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "voip_unittests", "merge": { @@ -5916,11 +5418,11 @@ "iPhone X", "--version", "14.5", - "--undefok=write-compiled-tests-json-to-writable-path", "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "webrtc_nonparallel_tests", "merge": { @@ -6419,6 +5921,532 @@ "linux_compile_arm_rel": {}, "linux_compile_dbg": {}, "linux_compile_rel": {}, + "linux_coverage": { + "isolated_scripts": [ + { + "isolate_name": "audio_decoder_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "audio_decoder_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" + }, + { + "isolate_name": "common_audio_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_audio_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://common_audio:common_audio_unittests/" + }, + { + "isolate_name": "common_video_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_video_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://common_video:common_video_unittests/" + }, + { + "isolate_name": "dcsctp_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "dcsctp_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" + }, + { + "args": [ + "--quick" + ], + "isolate_name": "low_bandwidth_audio_test", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "low_bandwidth_audio_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://audio:low_bandwidth_audio_test/" + }, + { + "isolate_name": "modules_tests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 2 + }, + "test_id_prefix": "ninja://modules:modules_tests/" + }, + { + "isolate_name": "modules_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 6 + }, + "test_id_prefix": "ninja://modules:modules_unittests/" + }, + { + "isolate_name": "peerconnection_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "peerconnection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:peerconnection_unittests/" + }, + { + "isolate_name": "rtc_media_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_media_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://media:rtc_media_unittests/" + }, + { + "isolate_name": "rtc_pc_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_pc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + }, + { + "isolate_name": "rtc_stats_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_stats_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + }, + { + "isolate_name": "rtc_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 6 + }, + "test_id_prefix": "ninja://:rtc_unittests/" + }, + { + "isolate_name": "shared_screencast_stream_test", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, + { + "isolate_name": "slow_peer_connection_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "slow_peer_connection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" + }, + { + "isolate_name": "svc_tests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "svc_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:svc_tests/" + }, + { + "isolate_name": "system_wrappers_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "system_wrappers_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" + }, + { + "isolate_name": "test_support_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "test_support_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://test:test_support_unittests/" + }, + { + "isolate_name": "tools_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "tools_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://rtc_tools:tools_unittests/" + }, + { + "isolate_name": "video_capture_tests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_capture_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04", + "pool": "WebRTC-baremetal-try" + } + ] + }, + "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" + }, + { + "isolate_name": "video_engine_tests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_engine_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://:video_engine_tests/" + }, + { + "isolate_name": "voip_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "voip_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://:voip_unittests/" + }, + { + "isolate_name": "webrtc_nonparallel_tests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "webrtc_nonparallel_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" + }, + { + "args": [ + "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", + "--nologs", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" + ], + "isolate_name": "webrtc_perf_tests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "webrtc_perf_tests", + "resultdb": { + "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", + "result_format": "gtest_json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://:webrtc_perf_tests/" + } + ] + }, "linux_dbg": { "isolated_scripts": [ { @@ -13623,439 +13651,6 @@ } ] }, - "win_x64_clang_dbg_win10": { - "isolated_scripts": [ - { - "isolate_name": "audio_decoder_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "audio_decoder_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" - }, - { - "isolate_name": "common_audio_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "common_audio_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://common_audio:common_audio_unittests/" - }, - { - "isolate_name": "common_video_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "common_video_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://common_video:common_video_unittests/" - }, - { - "isolate_name": "dcsctp_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "dcsctp_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" - }, - { - "args": [ - "--quick" - ], - "isolate_name": "low_bandwidth_audio_test", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "low_bandwidth_audio_test", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://audio:low_bandwidth_audio_test/" - }, - { - "isolate_name": "modules_tests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "modules_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ], - "shards": 2 - }, - "test_id_prefix": "ninja://modules:modules_tests/" - }, - { - "isolate_name": "modules_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "modules_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ], - "shards": 6 - }, - "test_id_prefix": "ninja://modules:modules_unittests/" - }, - { - "isolate_name": "peerconnection_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peerconnection_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ], - "shards": 4 - }, - "test_id_prefix": "ninja://pc:peerconnection_unittests/" - }, - { - "isolate_name": "rtc_media_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "rtc_media_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://media:rtc_media_unittests/" - }, - { - "isolate_name": "rtc_pc_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "rtc_pc_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/" - }, - { - "isolate_name": "rtc_stats_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "rtc_stats_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/" - }, - { - "isolate_name": "rtc_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "rtc_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ], - "shards": 6 - }, - "test_id_prefix": "ninja://:rtc_unittests/" - }, - { - "isolate_name": "slow_peer_connection_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "slow_peer_connection_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" - }, - { - "isolate_name": "svc_tests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "svc_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ], - "shards": 4 - }, - "test_id_prefix": "ninja://pc:svc_tests/" - }, - { - "isolate_name": "system_wrappers_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "system_wrappers_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" - }, - { - "isolate_name": "test_support_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "test_support_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://test:test_support_unittests/" - }, - { - "isolate_name": "tools_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "tools_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://rtc_tools:tools_unittests/" - }, - { - "isolate_name": "video_engine_tests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_engine_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ], - "shards": 4 - }, - "test_id_prefix": "ninja://:video_engine_tests/" - }, - { - "isolate_name": "voip_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "voip_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://:voip_unittests/" - }, - { - "isolate_name": "webrtc_nonparallel_tests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "webrtc_nonparallel_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" - } - ] - }, "win_x64_clang_rel": { "isolated_scripts": [ { diff --git a/third_party/libwebrtc/infra/specs/waterfalls.pyl b/third_party/libwebrtc/infra/specs/waterfalls.pyl index 2dcaac9262e8..0db19e71e5aa 100644 --- a/third_party/libwebrtc/infra/specs/waterfalls.pyl +++ b/third_party/libwebrtc/infra/specs/waterfalls.pyl @@ -11,20 +11,20 @@ 'name': 'client.webrtc', 'mixins': [], 'machines': { - 'Android32 (M Nexus5X)': { + 'Android32': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests', 'junit_tests': 'android_junit_tests', }, }, - 'Android32 (M Nexus5X)(dbg)': { + 'Android32 (dbg)': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests', @@ -33,8 +33,8 @@ }, 'Android32 (more configs)': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'more_configs_tests', @@ -43,20 +43,20 @@ 'Android32 Builder arm': {}, 'Android32 Builder x86': {}, 'Android32 Builder x86 (dbg)': {}, - 'Android64 (M Nexus5X)': { + 'Android64': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests', 'junit_tests': 'android_junit_tests', }, }, - 'Android64 (M Nexus5X)(dbg)': { + 'Android64 (dbg)': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests', @@ -65,10 +65,10 @@ }, 'Android64 Builder arm64': {}, 'Android64 Builder x64 (dbg)': {}, + 'Fuchsia Builder': {}, 'Fuchsia Release': { 'os_type': 'linux', - 'mixins': - ['linux-bionic', 'x86-64', 'resultdb-json-format', 'fuchsia'], + 'mixins': ['linux-bionic', 'x86-64', 'has_native_resultdb_integration'], 'test_suites': { 'isolated_scripts': 'fuchsia_compatible_tests', }, @@ -198,9 +198,9 @@ 'Win32 Debug (Clang)': {}, 'Win32 Release (Clang)': { 'os_type': 'win', - 'mixins': ['win10-any', 'x86-64', 'resultdb-json-format'], + 'mixins': ['win10', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests_with_video_capture', + 'isolated_scripts': 'desktop_tests', }, }, 'Win64 ASan': { @@ -210,15 +210,27 @@ 'isolated_scripts': 'desktop_tests', }, }, - 'Win64 Debug (Clang)': {}, - 'Win64 Release (Clang)': {}, + 'Win64 Debug (Clang)': { + 'os_type': 'win', + 'mixins': ['win10', 'x86-64', 'resultdb-json-format'], + 'test_suites': { + 'isolated_scripts': 'desktop_tests', + }, + }, + 'Win64 Release (Clang)': { + 'os_type': 'win', + 'mixins': ['win10-any', 'x86-64', 'resultdb-json-format'], + 'test_suites': { + 'isolated_scripts': 'desktop_tests_with_video_capture', + }, + }, 'iOS64 Debug': {}, 'iOS64 Release': {}, 'iOS64 Sim Debug (iOS 12)': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', - 'ios-simulator-12.4', 'xcode_13_main', 'mac_toolchain', - 'has_native_resultdb_integration', 'out_dir_arg' + 'mac_12_x64', 'chromium-tester-service-account', 'ios-simulator-12.4', + 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { 'isolated_scripts': 'ios_simulator_tests', @@ -226,9 +238,9 @@ }, 'iOS64 Sim Debug (iOS 13)': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', - 'ios-simulator-13.6', 'xcode_13_main', 'mac_toolchain', - 'has_native_resultdb_integration', 'out_dir_arg' + 'mac_12_x64', 'chromium-tester-service-account', 'ios-simulator-13.6', + 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { 'isolated_scripts': 'ios_simulator_tests', @@ -236,9 +248,9 @@ }, 'iOS64 Sim Debug (iOS 14)': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', - 'ios-simulator-14.5', 'xcode_13_main', 'mac_toolchain', - 'has_native_resultdb_integration', 'out_dir_arg' + 'mac_12_x64', 'chromium-tester-service-account', 'ios-simulator-14.5', + 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { 'isolated_scripts': 'ios_simulator_tests', @@ -253,7 +265,8 @@ 'Perf Android32 (M AOSP Nexus6)': { 'mixins': [ 'shamu', 'marshmallow_generic', 'android-devices', 'perf-pool', - 'perf-output', 'timeout-3h', 'has_native_resultdb_integration' + 'perf-output', 'timeout-3h', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_perf_tests', @@ -262,7 +275,8 @@ 'Perf Android32 (M Nexus5)': { 'mixins': [ 'hammerhead', 'marshmallow_generic', 'android-devices', 'perf-pool', - 'perf-output', 'timeout-3h', 'has_native_resultdb_integration' + 'perf-output', 'timeout-3h', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_perf_tests', @@ -271,7 +285,8 @@ 'Perf Android32 (O Pixel2)': { 'mixins': [ 'walleye', 'android-devices', 'perf-pool', 'timeout-3h', - 'perf-output', 'has_native_resultdb_integration' + 'perf-output', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_perf_tests', @@ -279,8 +294,8 @@ }, 'Perf Android32 (R Pixel5)': { 'mixins': [ - 'redfin', 'android-devices', 'perf-pool', 'timeout-3h', - 'perf-output', 'has_native_resultdb_integration' + 'redfin', 'android-devices', 'perf-pool', 'timeout-3h', 'perf-output', + 'has_native_resultdb_integration', 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_perf_tests', @@ -289,7 +304,8 @@ 'Perf Android64 (M Nexus5X)': { 'mixins': [ 'bullhead', 'marshmallow', 'android-devices', 'perf-pool', - 'perf-output', 'timeout-3h', 'has_native_resultdb_integration' + 'perf-output', 'timeout-3h', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_perf_tests', @@ -298,7 +314,8 @@ 'Perf Android64 (O Pixel2)': { 'mixins': [ 'walleye', 'android-devices', 'perf-pool', 'timeout-3h', - 'perf-output', 'has_native_resultdb_integration' + 'perf-output', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_perf_tests', @@ -306,13 +323,24 @@ }, 'Perf Android64 (R Pixel5)': { 'mixins': [ - 'redfin', 'android-devices', 'perf-pool', 'timeout-3h', - 'perf-output', 'has_native_resultdb_integration' + 'redfin', 'android-devices', 'perf-pool', 'timeout-3h', 'perf-output', + 'has_native_resultdb_integration', 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_perf_tests', }, }, + 'Perf Fuchsia': { + 'os_type': + 'linux', + 'mixins': [ + 'linux-bionic', 'x86-64', 'perf-pool-vm', 'timeout-3h', + 'has_native_resultdb_integration', 'chromium-tester-service-account' + ], + 'test_suites': { + 'isolated_scripts': 'fuchsia_compatible_perf_tests', + } + }, 'Perf Linux Bionic': { 'os_type': 'linux', @@ -346,7 +374,7 @@ 'isolated_scripts': 'webrtc_perf_tests', }, }, - 'Perf Win7': { + 'Perf Win 10': { 'os_type': 'win', 'mixins': @@ -402,8 +430,8 @@ 'machines': { 'android_arm64_dbg': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests_tryserver', @@ -412,8 +440,8 @@ }, 'android_arm64_rel': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests_tryserver', @@ -422,8 +450,8 @@ }, 'android_arm_dbg': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests_tryserver', @@ -432,8 +460,8 @@ }, 'android_arm_more_configs': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'more_configs_tests', @@ -441,8 +469,8 @@ }, 'android_arm_rel': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests_tryserver', @@ -459,8 +487,7 @@ 'android_compile_x86_rel': {}, 'fuchsia_rel': { 'os_type': 'linux', - 'mixins': - ['linux-bionic', 'x86-64', 'resultdb-json-format', 'fuchsia'], + 'mixins': ['linux-bionic', 'x86-64', 'has_native_resultdb_integration'], 'test_suites': { 'isolated_scripts': 'fuchsia_compatible_tests', }, @@ -469,9 +496,9 @@ 'ios_compile_arm64_rel': {}, 'ios_sim_x64_dbg_ios12': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', - 'ios-simulator-12.4', 'xcode_13_main', 'mac_toolchain', - 'has_native_resultdb_integration', 'out_dir_arg' + 'mac_12_x64', 'chromium-tester-service-account', 'ios-simulator-12.4', + 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { 'isolated_scripts': 'ios_simulator_tests', @@ -479,9 +506,9 @@ }, 'ios_sim_x64_dbg_ios13': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', - 'ios-simulator-13.6', 'xcode_13_main', 'mac_toolchain', - 'has_native_resultdb_integration', 'out_dir_arg' + 'mac_12_x64', 'chromium-tester-service-account', 'ios-simulator-13.6', + 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { 'isolated_scripts': 'ios_simulator_tests', @@ -489,9 +516,9 @@ }, 'ios_sim_x64_dbg_ios14': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', - 'ios-simulator-14.5', 'xcode_13_main', 'mac_toolchain', - 'has_native_resultdb_integration', 'out_dir_arg' + 'mac_12_x64', 'chromium-tester-service-account', 'ios-simulator-14.5', + 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { 'isolated_scripts': 'ios_simulator_tests', @@ -510,6 +537,17 @@ 'linux_compile_arm_rel': {}, 'linux_compile_dbg': {}, 'linux_compile_rel': {}, + 'linux_coverage': { + 'os_type': + 'linux', + 'mixins': [ + 'linux-bionic', 'x86-64', 'resultdb-json-format', + 'isolate_profile_data' + ], + 'test_suites': { + 'isolated_scripts': 'linux_desktop_tests_tryserver', + }, + }, 'linux_dbg': { 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], @@ -644,13 +682,6 @@ 'isolated_scripts': 'desktop_tests', }, }, - 'win_x64_clang_dbg_win10': { - 'os_type': 'win', - 'mixins': ['win10', 'x86-64', 'resultdb-json-format'], - 'test_suites': { - 'isolated_scripts': 'desktop_tests', - }, - }, 'win_x64_clang_rel': { 'os_type': 'win', 'mixins': ['win10', 'x86-64', 'resultdb-json-format'], diff --git a/third_party/libwebrtc/media/BUILD.gn b/third_party/libwebrtc/media/BUILD.gn index 1d6d274c296d..02f5aa8a3ae9 100644 --- a/third_party/libwebrtc/media/BUILD.gn +++ b/third_party/libwebrtc/media/BUILD.gn @@ -57,13 +57,16 @@ rtc_library("rtc_media_base") { "../api:media_stream_interface", "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_setparameters_callback", "../api:scoped_refptr", "../api:sequence_checker", + "../api:transport_api", "../api/audio:audio_frame_processor", "../api/audio_codecs:audio_codecs_api", "../api/crypto:frame_decryptor_interface", "../api/crypto:frame_encryptor_interface", "../api/crypto:options", + "../api/task_queue", "../api/task_queue:pending_task_safety_flag", "../api/transport:datagram_transport_interface", "../api/transport:stun_types", @@ -73,6 +76,7 @@ rtc_library("rtc_media_base") { "../api/video:video_bitrate_allocator_factory", "../api/video:video_frame", "../api/video:video_rtp_headers", + "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../call:call_interfaces", "../call:video_stream_api", @@ -92,6 +96,7 @@ rtc_library("rtc_media_base") { "../rtc_base:socket", "../rtc_base:stringutils", "../rtc_base:timeutils", + "../rtc_base/network:sent_packet", "../rtc_base/synchronization:mutex", "../rtc_base/system:file_wrapper", "../rtc_base/system:no_unique_address", @@ -103,6 +108,7 @@ rtc_library("rtc_media_base") { absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -113,8 +119,9 @@ rtc_library("rtc_media_base") { "base/codec.cc", "base/codec.h", "base/delayable.h", - "base/media_channel.cc", "base/media_channel.h", + "base/media_channel_impl.cc", + "base/media_channel_impl.h", "base/media_constants.cc", "base/media_constants.h", "base/media_engine.cc", @@ -142,8 +149,9 @@ rtc_library("rtc_media_base") { "base/adapted_video_track_source.h", "base/audio_source.h", "base/delayable.h", - "base/media_channel.cc", "base/media_channel.h", + "base/media_channel_impl.cc", + "base/media_channel_impl.h", "base/media_engine.cc", "base/media_engine.h", "base/rid_description.cc", @@ -652,6 +660,7 @@ if (rtc_include_tests) { "../call:call_interfaces", "../common_video", "../modules/audio_device:mock_audio_device", + "../modules/audio_mixer:audio_mixer_impl", "../modules/audio_processing", "../modules/audio_processing:api", "../modules/audio_processing:mocks", diff --git a/third_party/libwebrtc/media/base/fake_media_engine.h b/third_party/libwebrtc/media/base/fake_media_engine.h index ece77e51747c..a03a8a66464c 100644 --- a/third_party/libwebrtc/media/base/fake_media_engine.h +++ b/third_party/libwebrtc/media/base/fake_media_engine.h @@ -30,6 +30,7 @@ #include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/network_route.h" +#include "rtc_base/thread.h" using webrtc::RtpExtension; @@ -149,20 +150,25 @@ class RtpHelper : public Base { } virtual webrtc::RTCError SetRtpSendParameters( uint32_t ssrc, - const webrtc::RtpParameters& parameters) { + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback) { auto parameters_iterator = rtp_send_parameters_.find(ssrc); if (parameters_iterator != rtp_send_parameters_.end()) { auto result = CheckRtpParametersInvalidModificationAndValues( parameters_iterator->second, parameters); - if (!result.ok()) - return result; + if (!result.ok()) { + return webrtc::InvokeSetParametersCallback(callback, result); + } parameters_iterator->second = parameters; - return webrtc::RTCError::OK(); + + return webrtc::InvokeSetParametersCallback(callback, + webrtc::RTCError::OK()); } // Replicate the behavior of the real media channel: return false // when setting parameters for unknown SSRCs. - return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR); + return InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); } virtual webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const { @@ -232,6 +238,24 @@ class RtpHelper : public Base { rtcp_packets_.push_back(std::string(packet->cdata(), packet->size())); } + // Stuff that deals with encryptors, transformers and the like + void SetFrameEncryptor(uint32_t ssrc, + rtc::scoped_refptr + frame_encryptor) override {} + void SetEncoderToPacketizerFrameTransformer( + uint32_t ssrc, + rtc::scoped_refptr frame_transformer) + override {} + + void SetFrameDecryptor(uint32_t ssrc, + rtc::scoped_refptr + frame_decryptor) override {} + + void SetDepacketizerToDecoderFrameTransformer( + uint32_t ssrc, + rtc::scoped_refptr frame_transformer) + override {} + protected: bool MuteStream(uint32_t ssrc, bool mute) { if (!HasSendStream(ssrc) && ssrc != 0) { diff --git a/third_party/libwebrtc/media/base/fake_network_interface.h b/third_party/libwebrtc/media/base/fake_network_interface.h index 099b7cad381a..53c55639352c 100644 --- a/third_party/libwebrtc/media/base/fake_network_interface.h +++ b/third_party/libwebrtc/media/base/fake_network_interface.h @@ -31,7 +31,7 @@ namespace cricket { // Fake NetworkInterface that sends/receives RTP/RTCP packets. -class FakeNetworkInterface : public MediaChannel::NetworkInterface { +class FakeNetworkInterface : public MediaChannelNetworkInterface { public: FakeNetworkInterface() : thread_(rtc::Thread::Current()), diff --git a/third_party/libwebrtc/media/base/media_channel.h b/third_party/libwebrtc/media/base/media_channel.h index 5f1d5455036f..2bccde104ad0 100644 --- a/third_party/libwebrtc/media/base/media_channel.h +++ b/third_party/libwebrtc/media/base/media_channel.h @@ -26,6 +26,7 @@ #include "api/media_stream_interface.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/data_channel_transport_interface.h" #include "api/transport/rtp/rtp_source.h" @@ -34,6 +35,7 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video/video_timing.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_encoder_factory.h" #include "call/video_receive_stream.h" #include "common_video/include/quality_limitation_reason.h" @@ -69,6 +71,10 @@ class AudioSource; class VideoCapturer; struct RtpHeader; struct VideoFormat; +class VideoMediaSendChannelInterface; +class VideoMediaReceiveChannelInterface; +class VoiceMediaSendChannelInterface; +class VoiceMediaReceiveChannelInterface; const int kScreencastDefaultFps = 5; @@ -157,29 +163,32 @@ struct VideoOptions { } }; -class MediaChannel { +class MediaChannelNetworkInterface { public: - class NetworkInterface { - public: - enum SocketType { ST_RTP, ST_RTCP }; - virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) = 0; - virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet, + enum SocketType { ST_RTP, ST_RTCP }; + virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet, const rtc::PacketOptions& options) = 0; - virtual int SetOption(SocketType type, - rtc::Socket::Option opt, - int option) = 0; - virtual ~NetworkInterface() {} - }; - - explicit MediaChannel(webrtc::TaskQueueBase* network_thread, - bool enable_dscp = false); - virtual ~MediaChannel(); + virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet, + const rtc::PacketOptions& options) = 0; + virtual int SetOption(SocketType type, + rtc::Socket::Option opt, + int option) = 0; + virtual ~MediaChannelNetworkInterface() {} +}; +// Functions shared across all MediaChannel interfaces. +// Because there are implementation types that implement multiple +// interfaces, this is not a base class (no diamond inheritance). +template +class MediaBaseChannelInterface { + public: + virtual ~MediaBaseChannelInterface() = default; virtual cricket::MediaType media_type() const = 0; - // Sets the abstract interface class for sending RTP/RTCP data. - virtual void SetInterface(NetworkInterface* iface); + // Networking functions. We assume that both the send channel and the + // receive channel send RTP packets (RTCP packets in the case of a receive + // channel). + // Called on the network when an RTP packet is received. virtual void OnPacketReceived(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) = 0; @@ -192,6 +201,29 @@ class MediaChannel { virtual void OnNetworkRouteChanged( absl::string_view transport_name, const rtc::NetworkRoute& network_route) = 0; + + // Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285. + // Set to true if it's allowed to mix one- and two-byte RTP header extensions + // in the same stream. The setter and getter must only be called from + // worker_thread. + virtual void SetExtmapAllowMixed(bool extmap_allow_mixed) = 0; + virtual bool ExtmapAllowMixed() const = 0; +}; + +class MediaSendChannelInterface + : public MediaBaseChannelInterface { + public: + virtual ~MediaSendChannelInterface() = default; + + virtual VideoMediaSendChannelInterface* AsVideoSendChannel() { + RTC_CHECK_NOTREACHED(); + return nullptr; + } + virtual VoiceMediaSendChannelInterface* AsVoiceSendChannel() { + RTC_CHECK_NOTREACHED(); + return nullptr; + } + // Creates a new outgoing media stream with SSRCs and CNAME as described // by sp. virtual bool AddSendStream(const StreamParams& sp) = 0; @@ -200,6 +232,46 @@ class MediaChannel { // multiple SSRCs. In the case of an ssrc of 0, the possibly cached // StreamParams is removed. virtual bool RemoveSendStream(uint32_t ssrc) = 0; + // Set the frame encryptor to use on all outgoing frames. This is optional. + // This pointers lifetime is managed by the set of RtpSender it is attached + // to. + virtual void SetFrameEncryptor( + uint32_t ssrc, + rtc::scoped_refptr frame_encryptor) = 0; + + virtual webrtc::RTCError SetRtpSendParameters( + uint32_t ssrc, + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback = nullptr) = 0; + + virtual void SetEncoderToPacketizerFrameTransformer( + uint32_t ssrc, + rtc::scoped_refptr + frame_transformer) = 0; + + // note: The encoder_selector object must remain valid for the lifetime of the + // MediaChannel, unless replaced. + virtual void SetEncoderSelector( + uint32_t ssrc, + webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { + } + virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0; +}; + +class MediaReceiveChannelInterface + : public MediaBaseChannelInterface, + public Delayable { + public: + virtual ~MediaReceiveChannelInterface() = default; + + virtual VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() { + RTC_CHECK_NOTREACHED(); + return nullptr; + } + virtual VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() { + RTC_CHECK_NOTREACHED(); + return nullptr; + } // Creates a new incoming media stream with SSRCs, CNAME as described // by sp. In the case of a sp without SSRCs, the unsignaled sp is cached // to be used later for unsignaled streams received. @@ -225,107 +297,17 @@ class MediaChannel { // new unsignalled ssrcs. virtual void OnDemuxerCriteriaUpdatePending() = 0; virtual void OnDemuxerCriteriaUpdateComplete() = 0; - // Returns the absoulte sendtime extension id value from media channel. - virtual int GetRtpSendTimeExtnId() const; - // Set the frame encryptor to use on all outgoing frames. This is optional. - // This pointers lifetime is managed by the set of RtpSender it is attached - // to. - // TODO(benwright) make pure virtual once internal supports it. - virtual void SetFrameEncryptor( - uint32_t ssrc, - rtc::scoped_refptr frame_encryptor); // Set the frame decryptor to use on all incoming frames. This is optional. // This pointers lifetimes is managed by the set of RtpReceivers it is // attached to. - // TODO(benwright) make pure virtual once internal supports it. virtual void SetFrameDecryptor( uint32_t ssrc, - rtc::scoped_refptr frame_decryptor); + rtc::scoped_refptr frame_decryptor) = 0; - // Enable network condition based codec switching. - virtual void SetVideoCodecSwitchingEnabled(bool enabled); - - // note: The encoder_selector object must remain valid for the lifetime of the - // MediaChannel, unless replaced. - virtual void SetEncoderSelector( - uint32_t ssrc, - webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { - } - - // Base method to send packet using NetworkInterface. - bool SendPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options); - - bool SendRtcp(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options); - - int SetOption(NetworkInterface::SocketType type, - rtc::Socket::Option opt, - int option); - - // Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285. - // Set to true if it's allowed to mix one- and two-byte RTP header extensions - // in the same stream. The setter and getter must only be called from - // worker_thread. - void SetExtmapAllowMixed(bool extmap_allow_mixed); - bool ExtmapAllowMixed() const; - - // Returns `true` if a non-null NetworkInterface pointer is held. - // Must be called on the network thread. - bool HasNetworkInterface() const; - - virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0; - virtual webrtc::RTCError SetRtpSendParameters( - uint32_t ssrc, - const webrtc::RtpParameters& parameters) = 0; - - virtual void SetEncoderToPacketizerFrameTransformer( - uint32_t ssrc, - rtc::scoped_refptr frame_transformer); virtual void SetDepacketizerToDecoderFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr frame_transformer); - - protected: - int SetOptionLocked(NetworkInterface::SocketType type, - rtc::Socket::Option opt, - int option) RTC_RUN_ON(network_thread_); - - bool DscpEnabled() const; - - // This is the DSCP value used for both RTP and RTCP channels if DSCP is - // enabled. It can be changed at any time via `SetPreferredDscp`. - rtc::DiffServCodePoint PreferredDscp() const; - void SetPreferredDscp(rtc::DiffServCodePoint new_dscp); - - rtc::scoped_refptr network_safety(); - - // Utility implementation for derived classes (video/voice) that applies - // the packet options and passes the data onwards to `SendPacket`. - void SendRtp(const uint8_t* data, - size_t len, - const webrtc::PacketOptions& options); - - void SendRtcp(const uint8_t* data, size_t len); - - private: - // Apply the preferred DSCP setting to the underlying network interface RTP - // and RTCP channels. If DSCP is disabled, then apply the default DSCP value. - void UpdateDscp() RTC_RUN_ON(network_thread_); - - bool DoSendPacket(rtc::CopyOnWriteBuffer* packet, - bool rtcp, - const rtc::PacketOptions& options); - - const bool enable_dscp_; - const rtc::scoped_refptr network_safety_ - RTC_PT_GUARDED_BY(network_thread_); - webrtc::TaskQueueBase* const network_thread_; - NetworkInterface* network_interface_ RTC_GUARDED_BY(network_thread_) = - nullptr; - rtc::DiffServCodePoint preferred_dscp_ RTC_GUARDED_BY(network_thread_) = - rtc::DSCP_DEFAULT; - bool extmap_allow_mixed_ = false; + rtc::scoped_refptr + frame_transformer) = 0; }; // The stats information is structured as follows: @@ -412,6 +394,7 @@ struct MediaSenderInfo { struct MediaReceiverInfo { MediaReceiverInfo(); ~MediaReceiverInfo(); + void add_ssrc(const SsrcReceiverInfo& stat) { local_stats.push_back(stat); } // Temporary utility function for call sites that only provide SSRC. // As more info is added into SsrcSenderInfo, this function should go away. @@ -606,6 +589,7 @@ struct VideoSenderInfo : public MediaSenderInfo { uint32_t aggregated_huge_frames_sent = 0; absl::optional rid; absl::optional power_efficient_encoder; + absl::optional scalability_mode; }; struct VideoReceiverInfo : public MediaReceiverInfo { @@ -803,24 +787,9 @@ struct AudioSendParameters : RtpSendParameters { struct AudioRecvParameters : RtpParameters {}; -class VoiceMediaChannel : public MediaChannel, public Delayable { +class VoiceMediaSendChannelInterface : public MediaSendChannelInterface { public: - VoiceMediaChannel(webrtc::TaskQueueBase* network_thread, - bool enable_dscp = false) - : MediaChannel(network_thread, enable_dscp) {} - ~VoiceMediaChannel() override {} - - cricket::MediaType media_type() const override; virtual bool SetSendParameters(const AudioSendParameters& params) = 0; - virtual bool SetRecvParameters(const AudioRecvParameters& params) = 0; - // Get the receive parameters for the incoming stream identified by `ssrc`. - virtual webrtc::RtpParameters GetRtpReceiveParameters( - uint32_t ssrc) const = 0; - // Retrieve the receive parameters for the default receive - // stream, which is used when SSRCs are not signaled. - virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const = 0; - // Starts or stops playout of received audio. - virtual void SetPlayout(bool playout) = 0; // Starts or stops sending (and potentially capture) of local audio. virtual void SetSend(bool send) = 0; // Configure stream for sending. @@ -828,10 +797,6 @@ class VoiceMediaChannel : public MediaChannel, public Delayable { bool enable, const AudioOptions* options, AudioSource* source) = 0; - // Set speaker output volume of the specified ssrc. - virtual bool SetOutputVolume(uint32_t ssrc, double volume) = 0; - // Set speaker output volume for future unsignaled streams. - virtual bool SetDefaultOutputVolume(double volume) = 0; // Returns if the telephone-event has been negotiated. virtual bool CanInsertDtmf() = 0; // Send a DTMF `event`. The DTMF out-of-band signal will be used. @@ -839,17 +804,29 @@ class VoiceMediaChannel : public MediaChannel, public Delayable { // The valid value for the `event` are 0 to 15 which corresponding to // DTMF event 0-9, *, #, A-D. virtual bool InsertDtmf(uint32_t ssrc, int event, int duration) = 0; - // Gets quality stats for the channel. - virtual bool GetStats(VoiceMediaInfo* info, - bool get_and_clear_legacy_stats) = 0; +}; +class VoiceMediaReceiveChannelInterface : public MediaReceiveChannelInterface { + public: + virtual bool SetRecvParameters(const AudioRecvParameters& params) = 0; + // Get the receive parameters for the incoming stream identified by `ssrc`. + virtual webrtc::RtpParameters GetRtpReceiveParameters( + uint32_t ssrc) const = 0; + virtual std::vector GetSources(uint32_t ssrc) const = 0; + // Retrieve the receive parameters for the default receive + // stream, which is used when SSRCs are not signaled. + virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const = 0; + // Starts or stops playout of received audio. + virtual void SetPlayout(bool playout) = 0; + // Set speaker output volume of the specified ssrc. + virtual bool SetOutputVolume(uint32_t ssrc, double volume) = 0; + // Set speaker output volume for future unsignaled streams. + virtual bool SetDefaultOutputVolume(double volume) = 0; virtual void SetRawAudioSink( uint32_t ssrc, std::unique_ptr sink) = 0; virtual void SetDefaultRawAudioSink( std::unique_ptr sink) = 0; - - virtual std::vector GetSources(uint32_t ssrc) const = 0; }; // TODO(deadbeef): Rename to VideoSenderParameters, since they're intended to @@ -873,22 +850,9 @@ struct VideoSendParameters : RtpSendParameters { // encapsulate all the parameters needed for a video RtpReceiver. struct VideoRecvParameters : RtpParameters {}; -class VideoMediaChannel : public MediaChannel, public Delayable { +class VideoMediaSendChannelInterface : public MediaSendChannelInterface { public: - explicit VideoMediaChannel(webrtc::TaskQueueBase* network_thread, - bool enable_dscp = false) - : MediaChannel(network_thread, enable_dscp) {} - ~VideoMediaChannel() override {} - - cricket::MediaType media_type() const override; virtual bool SetSendParameters(const VideoSendParameters& params) = 0; - virtual bool SetRecvParameters(const VideoRecvParameters& params) = 0; - // Get the receive parameters for the incoming stream identified by `ssrc`. - virtual webrtc::RtpParameters GetRtpReceiveParameters( - uint32_t ssrc) const = 0; - // Retrieve the receive parameters for the default receive - // stream, which is used when SSRCs are not signaled. - virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const = 0; // Gets the currently set codecs/payload types to be used for outgoing media. virtual bool GetSendCodec(VideoCodec* send_codec) = 0; // Starts or stops transmission (and potentially capture) of local video. @@ -899,37 +863,39 @@ class VideoMediaChannel : public MediaChannel, public Delayable { uint32_t ssrc, const VideoOptions* options, rtc::VideoSourceInterface* source) = 0; + // Cause generation of a keyframe for `ssrc` on a sending channel. + virtual void GenerateSendKeyFrame(uint32_t ssrc, + const std::vector& rids) = 0; + // Enable network condition based codec switching. + virtual void SetVideoCodecSwitchingEnabled(bool enabled) = 0; +}; + +class VideoMediaReceiveChannelInterface : public MediaReceiveChannelInterface { + public: + virtual bool SetRecvParameters(const VideoRecvParameters& params) = 0; + // Get the receive parameters for the incoming stream identified by `ssrc`. + virtual webrtc::RtpParameters GetRtpReceiveParameters( + uint32_t ssrc) const = 0; + // Retrieve the receive parameters for the default receive + // stream, which is used when SSRCs are not signaled. + virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const = 0; // Sets the sink object to be used for the specified stream. virtual bool SetSink(uint32_t ssrc, rtc::VideoSinkInterface* sink) = 0; // The sink is used for the 'default' stream. virtual void SetDefaultSink( rtc::VideoSinkInterface* sink) = 0; - // This fills the "bitrate parts" (rtx, video bitrate) of the - // BandwidthEstimationInfo, since that part that isn't possible to get - // through webrtc::Call::GetStats, as they are statistics of the send - // streams. - // TODO(holmer): We should change this so that either BWE graphs doesn't - // need access to bitrates of the streams, or change the (RTC)StatsCollector - // so that it's getting the send stream stats separately by calling - // GetStats(), and merges with BandwidthEstimationInfo by itself. - virtual void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) = 0; - // Gets quality stats for the channel. - virtual bool GetStats(VideoMediaInfo* info) = 0; + // Request generation of a keyframe for `ssrc` on a receiving channel via + // RTCP feedback. + virtual void RequestRecvKeyFrame(uint32_t ssrc) = 0; + + virtual std::vector GetSources(uint32_t ssrc) const = 0; // Set recordable encoded frame callback for `ssrc` virtual void SetRecordableEncodedFrameCallback( uint32_t ssrc, std::function callback) = 0; // Clear recordable encoded frame callback for `ssrc` virtual void ClearRecordableEncodedFrameCallback(uint32_t ssrc) = 0; - // Request generation of a keyframe for `ssrc` on a receiving channel via - // RTCP feedback. - virtual void RequestRecvKeyFrame(uint32_t ssrc) = 0; - // Cause generation of a keyframe for `ssrc` on a sending channel. - virtual void GenerateSendKeyFrame(uint32_t ssrc, - const std::vector& rids) = 0; - - virtual std::vector GetSources(uint32_t ssrc) const = 0; }; // Info about data received in DataMediaChannel. For use in diff --git a/third_party/libwebrtc/media/base/media_channel.cc b/third_party/libwebrtc/media/base/media_channel_impl.cc similarity index 88% rename from third_party/libwebrtc/media/base/media_channel.cc rename to third_party/libwebrtc/media/base/media_channel_impl.cc index e01bfb1a8233..02d8575356b5 100644 --- a/third_party/libwebrtc/media/base/media_channel.cc +++ b/third_party/libwebrtc/media/base/media_channel_impl.cc @@ -8,9 +8,26 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/audio_options.h" +#include "api/media_stream_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_sender_interface.h" +#include "api/units/time_delta.h" +#include "api/video/video_timing.h" +#include "common_video/include/quality_limitation_reason.h" +#include "media/base/codec.h" +#include "media/base/media_channel.h" #include "media/base/rtp_utils.h" +#include "media/base/stream_params.h" +#include "modules/rtp_rtcp/include/report_block_data.h" +#include "rtc_base/checks.h" namespace cricket { using webrtc::FrameDecryptorInterface; @@ -34,7 +51,7 @@ MediaChannel::~MediaChannel() { RTC_DCHECK(!network_interface_); } -void MediaChannel::SetInterface(NetworkInterface* iface) { +void MediaChannel::SetInterface(MediaChannelNetworkInterface* iface) { RTC_DCHECK_RUN_ON(network_thread_); iface ? network_safety_->SetAlive() : network_safety_->SetNotAlive(); network_interface_ = iface; @@ -57,8 +74,6 @@ void MediaChannel::SetFrameDecryptor( // Placeholder should be pure virtual once internal supports it. } -void MediaChannel::SetVideoCodecSwitchingEnabled(bool enabled) {} - bool MediaChannel::SendPacket(rtc::CopyOnWriteBuffer* packet, const rtc::PacketOptions& options) { return DoSendPacket(packet, false, options); @@ -69,7 +84,7 @@ bool MediaChannel::SendRtcp(rtc::CopyOnWriteBuffer* packet, return DoSendPacket(packet, true, options); } -int MediaChannel::SetOption(NetworkInterface::SocketType type, +int MediaChannel::SetOption(MediaChannelNetworkInterface::SocketType type, rtc::Socket::Option opt, int option) { RTC_DCHECK_RUN_ON(network_thread_); @@ -101,7 +116,7 @@ void MediaChannel::SetDepacketizerToDecoderFrameTransformer( uint32_t ssrc, rtc::scoped_refptr frame_transformer) {} -int MediaChannel::SetOptionLocked(NetworkInterface::SocketType type, +int MediaChannel::SetOptionLocked(MediaChannelNetworkInterface::SocketType type, rtc::Socket::Option opt, int option) { if (!network_interface_) @@ -145,10 +160,11 @@ rtc::scoped_refptr MediaChannel::network_safety() { void MediaChannel::UpdateDscp() { rtc::DiffServCodePoint value = enable_dscp_ ? preferred_dscp_ : rtc::DSCP_DEFAULT; - int ret = - SetOptionLocked(NetworkInterface::ST_RTP, rtc::Socket::OPT_DSCP, value); + int ret = SetOptionLocked(MediaChannelNetworkInterface::ST_RTP, + rtc::Socket::OPT_DSCP, value); if (ret == 0) - SetOptionLocked(NetworkInterface::ST_RTCP, rtc::Socket::OPT_DSCP, value); + SetOptionLocked(MediaChannelNetworkInterface::ST_RTCP, + rtc::Socket::OPT_DSCP, value); } bool MediaChannel::DoSendPacket(rtc::CopyOnWriteBuffer* packet, @@ -260,4 +276,6 @@ cricket::MediaType VideoMediaChannel::media_type() const { return cricket::MediaType::MEDIA_TYPE_VIDEO; } +void VideoMediaChannel::SetVideoCodecSwitchingEnabled(bool enabled) {} + } // namespace cricket diff --git a/third_party/libwebrtc/media/base/media_channel_impl.h b/third_party/libwebrtc/media/base/media_channel_impl.h new file mode 100644 index 000000000000..41bead70b900 --- /dev/null +++ b/third_party/libwebrtc/media/base/media_channel_impl.h @@ -0,0 +1,242 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MEDIA_BASE_MEDIA_CHANNEL_IMPL_H_ +#define MEDIA_BASE_MEDIA_CHANNEL_IMPL_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/call/transport.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/crypto/frame_encryptor_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/media_types.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "media/base/media_channel.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/dscp.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" +#include "rtc_base/socket.h" +#include "rtc_base/thread_annotations.h" +// This file contains the base classes for classes that implement +// the MediaChannel interfaces. +// These implementation classes used to be the exposed interface names, +// but this is in the process of being changed. +// TODO(bugs.webrtc.org/13931): Consider removing these classes. + +namespace cricket { + +class VoiceMediaChannel; +class VideoMediaChannel; + +class MediaChannel : public MediaSendChannelInterface, + public MediaReceiveChannelInterface { + public: + explicit MediaChannel(webrtc::TaskQueueBase* network_thread, + bool enable_dscp = false); + virtual ~MediaChannel(); + + // Downcasting to the implemented interfaces. + MediaSendChannelInterface* AsSendChannel() { return this; } + + MediaReceiveChannelInterface* AsReceiveChannel() { return this; } + + // Downcasting to the subclasses. + virtual VideoMediaChannel* AsVideoChannel() { + RTC_CHECK_NOTREACHED(); + return nullptr; + } + + virtual VoiceMediaChannel* AsVoiceChannel() { + RTC_CHECK_NOTREACHED(); + return nullptr; + } + + // Must declare the methods inherited from the base interface template, + // even when abstract, to tell the compiler that all instances of the name + // referred to by subclasses of this share the same implementation. + cricket::MediaType media_type() const override = 0; + void OnPacketReceived(rtc::CopyOnWriteBuffer packet, + int64_t packet_time_us) override = 0; + void OnPacketSent(const rtc::SentPacket& sent_packet) override = 0; + void OnReadyToSend(bool ready) override = 0; + void OnNetworkRouteChanged(absl::string_view transport_name, + const rtc::NetworkRoute& network_route) override = + 0; + + // Sets the abstract interface class for sending RTP/RTCP data. + virtual void SetInterface(MediaChannelNetworkInterface* iface); + // Returns the absolute sendtime extension id value from media channel. + virtual int GetRtpSendTimeExtnId() const; + // Base method to send packet using MediaChannelNetworkInterface. + bool SendPacket(rtc::CopyOnWriteBuffer* packet, + const rtc::PacketOptions& options); + + bool SendRtcp(rtc::CopyOnWriteBuffer* packet, + const rtc::PacketOptions& options); + + int SetOption(MediaChannelNetworkInterface::SocketType type, + rtc::Socket::Option opt, + int option); + + // Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285. + // Set to true if it's allowed to mix one- and two-byte RTP header extensions + // in the same stream. The setter and getter must only be called from + // worker_thread. + void SetExtmapAllowMixed(bool extmap_allow_mixed) override; + bool ExtmapAllowMixed() const override; + + // Returns `true` if a non-null MediaChannelNetworkInterface pointer is held. + // Must be called on the network thread. + bool HasNetworkInterface() const; + + void SetFrameEncryptor(uint32_t ssrc, + rtc::scoped_refptr + frame_encryptor) override; + void SetFrameDecryptor(uint32_t ssrc, + rtc::scoped_refptr + frame_decryptor) override; + + void SetEncoderToPacketizerFrameTransformer( + uint32_t ssrc, + rtc::scoped_refptr frame_transformer) + override; + void SetDepacketizerToDecoderFrameTransformer( + uint32_t ssrc, + rtc::scoped_refptr frame_transformer) + override; + + protected: + int SetOptionLocked(MediaChannelNetworkInterface::SocketType type, + rtc::Socket::Option opt, + int option) RTC_RUN_ON(network_thread_); + + bool DscpEnabled() const; + + // This is the DSCP value used for both RTP and RTCP channels if DSCP is + // enabled. It can be changed at any time via `SetPreferredDscp`. + rtc::DiffServCodePoint PreferredDscp() const; + void SetPreferredDscp(rtc::DiffServCodePoint new_dscp); + + rtc::scoped_refptr network_safety(); + + // Utility implementation for derived classes (video/voice) that applies + // the packet options and passes the data onwards to `SendPacket`. + void SendRtp(const uint8_t* data, + size_t len, + const webrtc::PacketOptions& options); + + void SendRtcp(const uint8_t* data, size_t len); + + private: + // Apply the preferred DSCP setting to the underlying network interface RTP + // and RTCP channels. If DSCP is disabled, then apply the default DSCP value. + void UpdateDscp() RTC_RUN_ON(network_thread_); + + bool DoSendPacket(rtc::CopyOnWriteBuffer* packet, + bool rtcp, + const rtc::PacketOptions& options); + + const bool enable_dscp_; + const rtc::scoped_refptr network_safety_ + RTC_PT_GUARDED_BY(network_thread_); + webrtc::TaskQueueBase* const network_thread_; + MediaChannelNetworkInterface* network_interface_ + RTC_GUARDED_BY(network_thread_) = nullptr; + rtc::DiffServCodePoint preferred_dscp_ RTC_GUARDED_BY(network_thread_) = + rtc::DSCP_DEFAULT; + bool extmap_allow_mixed_ = false; +}; + +// Base class for implementation classes + +class VideoMediaChannel : public MediaChannel, + public VideoMediaSendChannelInterface, + public VideoMediaReceiveChannelInterface { + public: + explicit VideoMediaChannel(webrtc::TaskQueueBase* network_thread, + bool enable_dscp = false) + : MediaChannel(network_thread, enable_dscp) {} + ~VideoMediaChannel() override {} + + // Downcasting to the implemented interfaces. + VideoMediaSendChannelInterface* AsVideoSendChannel() override { return this; } + + VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() override { + return this; + } + cricket::MediaType media_type() const override; + + // Downcasting to the subclasses. + VideoMediaChannel* AsVideoChannel() override { return this; } + + void SetExtmapAllowMixed(bool mixed) override { + MediaChannel::SetExtmapAllowMixed(mixed); + } + bool ExtmapAllowMixed() const override { + return MediaChannel::ExtmapAllowMixed(); + } + // This fills the "bitrate parts" (rtx, video bitrate) of the + // BandwidthEstimationInfo, since that part that isn't possible to get + // through webrtc::Call::GetStats, as they are statistics of the send + // streams. + // TODO(holmer): We should change this so that either BWE graphs doesn't + // need access to bitrates of the streams, or change the (RTC)StatsCollector + // so that it's getting the send stream stats separately by calling + // GetStats(), and merges with BandwidthEstimationInfo by itself. + virtual void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) = 0; + // Gets quality stats for the channel. + virtual bool GetStats(VideoMediaInfo* info) = 0; + // Enable network condition based codec switching. + void SetVideoCodecSwitchingEnabled(bool enabled) override; +}; + +// Base class for implementation classes +class VoiceMediaChannel : public MediaChannel, + public VoiceMediaSendChannelInterface, + public VoiceMediaReceiveChannelInterface { + public: + MediaType media_type() const override; + VoiceMediaChannel(webrtc::TaskQueueBase* network_thread, + bool enable_dscp = false) + : MediaChannel(network_thread, enable_dscp) {} + ~VoiceMediaChannel() override {} + + // Downcasting to the implemented interfaces. + VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { return this; } + + VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() override { + return this; + } + + VoiceMediaChannel* AsVoiceChannel() override { return this; } + + void SetExtmapAllowMixed(bool mixed) override { + MediaChannel::SetExtmapAllowMixed(mixed); + } + bool ExtmapAllowMixed() const override { + return MediaChannel::ExtmapAllowMixed(); + } + + // Gets quality stats for the channel. + virtual bool GetStats(VoiceMediaInfo* info, + bool get_and_clear_legacy_stats) = 0; +}; + +} // namespace cricket + +#endif // MEDIA_BASE_MEDIA_CHANNEL_IMPL_H_ diff --git a/third_party/libwebrtc/media/base/media_engine.h b/third_party/libwebrtc/media/base/media_engine.h index e533691751c4..96b54babcc4e 100644 --- a/third_party/libwebrtc/media/base/media_engine.h +++ b/third_party/libwebrtc/media/base/media_engine.h @@ -24,6 +24,7 @@ #include "call/audio_state.h" #include "media/base/codec.h" #include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" #include "media/base/media_config.h" #include "media/base/video_common.h" #include "rtc_base/system/file_wrapper.h" diff --git a/third_party/libwebrtc/media/engine/fake_webrtc_call.cc b/third_party/libwebrtc/media/engine/fake_webrtc_call.cc index 48a8b12092b7..8046c3ad3a2a 100644 --- a/third_party/libwebrtc/media/engine/fake_webrtc_call.cc +++ b/third_party/libwebrtc/media/engine/fake_webrtc_call.cc @@ -15,6 +15,7 @@ #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" #include "api/call/audio_sink.h" +#include "media/base/media_channel.h" #include "modules/rtp_rtcp/source/rtp_util.h" #include "rtc_base/checks.h" #include "rtc_base/gunit.h" @@ -31,8 +32,10 @@ FakeAudioSendStream::FakeAudioSendStream( : id_(id), config_(config) {} void FakeAudioSendStream::Reconfigure( - const webrtc::AudioSendStream::Config& config) { + const webrtc::AudioSendStream::Config& config, + webrtc::SetParametersCallback callback) { config_ = config; + webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); } const webrtc::AudioSendStream::Config& FakeAudioSendStream::GetConfig() const { @@ -275,6 +278,12 @@ webrtc::VideoSendStream::Stats FakeVideoSendStream::GetStats() { void FakeVideoSendStream::ReconfigureVideoEncoder( webrtc::VideoEncoderConfig config) { + ReconfigureVideoEncoder(std::move(config), nullptr); +} + +void FakeVideoSendStream::ReconfigureVideoEncoder( + webrtc::VideoEncoderConfig config, + webrtc::SetParametersCallback callback) { int width, height; if (last_frame_) { width = last_frame_->width(); @@ -326,9 +335,10 @@ void FakeVideoSendStream::ReconfigureVideoEncoder( codec_settings_set_ = config.encoder_specific_settings != nullptr; encoder_config_ = std::move(config); ++num_encoder_reconfigurations_; + webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); } -void FakeVideoSendStream::UpdateActiveSimulcastLayers( +void FakeVideoSendStream::StartPerRtpStream( const std::vector active_layers) { sending_ = false; for (const bool active_layer : active_layers) { diff --git a/third_party/libwebrtc/media/engine/fake_webrtc_call.h b/third_party/libwebrtc/media/engine/fake_webrtc_call.h index 1e0568e46e13..370b70700f62 100644 --- a/third_party/libwebrtc/media/engine/fake_webrtc_call.h +++ b/third_party/libwebrtc/media/engine/fake_webrtc_call.h @@ -62,7 +62,8 @@ class FakeAudioSendStream final : public webrtc::AudioSendStream { private: // webrtc::AudioSendStream implementation. - void Reconfigure(const webrtc::AudioSendStream::Config& config) override; + void Reconfigure(const webrtc::AudioSendStream::Config& config, + webrtc::SetParametersCallback callback) override; void Start() override { sending_ = true; } void Stop() override { sending_ = false; } void SendAudioData(std::unique_ptr audio_frame) override { @@ -201,7 +202,7 @@ class FakeVideoSendStream final void OnFrame(const webrtc::VideoFrame& frame) override; // webrtc::VideoSendStream implementation. - void UpdateActiveSimulcastLayers(std::vector active_layers) override; + void StartPerRtpStream(std::vector active_layers) override; void Start() override; void Stop() override; bool started() override { return IsSending(); } @@ -213,7 +214,10 @@ class FakeVideoSendStream final rtc::VideoSourceInterface* source, const webrtc::DegradationPreference& degradation_preference) override; webrtc::VideoSendStream::Stats GetStats() override; + void ReconfigureVideoEncoder(webrtc::VideoEncoderConfig config) override; + void ReconfigureVideoEncoder(webrtc::VideoEncoderConfig config, + webrtc::SetParametersCallback callback) override; bool sending_; webrtc::VideoSendStream::Config config_; diff --git a/third_party/libwebrtc/media/engine/internal_decoder_factory_unittest.cc b/third_party/libwebrtc/media/engine/internal_decoder_factory_unittest.cc index d37c1a8247a5..53811b4879b6 100644 --- a/third_party/libwebrtc/media/engine/internal_decoder_factory_unittest.cc +++ b/third_party/libwebrtc/media/engine/internal_decoder_factory_unittest.cc @@ -16,6 +16,7 @@ #include "api/video_codecs/vp9_profile.h" #include "media/base/media_constants.h" #include "system_wrappers/include/field_trial.h" +#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" @@ -122,7 +123,7 @@ TEST(InternalDecoderFactoryTest, Av1Profile1_Dav1dDecoderTrialEnabled) { } TEST(InternalDecoderFactoryTest, Av1Profile1_Dav1dDecoderTrialDisabled) { - InitFieldTrialsFromString(kDav1dDecoderFieldTrialDisabled); + test::ScopedFieldTrials disable_dav1d(kDav1dDecoderFieldTrialDisabled); InternalDecoderFactory factory; std::unique_ptr decoder = factory.CreateVideoDecoder( SdpVideoFormat(cricket::kAv1CodecName, diff --git a/third_party/libwebrtc/media/engine/simulcast_encoder_adapter.cc b/third_party/libwebrtc/media/engine/simulcast_encoder_adapter.cc index e7f6205ab661..3a73a4ac1094 100644 --- a/third_party/libwebrtc/media/engine/simulcast_encoder_adapter.cc +++ b/third_party/libwebrtc/media/engine/simulcast_encoder_adapter.cc @@ -86,22 +86,24 @@ int CountActiveStreams(const webrtc::VideoCodec& codec) { return active_streams_count; } -int VerifyCodec(const webrtc::VideoCodec* inst) { - if (inst == nullptr) { +int VerifyCodec(const webrtc::VideoCodec* codec_settings) { + if (codec_settings == nullptr) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (inst->maxFramerate < 1) { + if (codec_settings->maxFramerate < 1) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } // allow zero to represent an unspecified maxBitRate - if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { + if (codec_settings->maxBitrate > 0 && + codec_settings->startBitrate > codec_settings->maxBitrate) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (inst->width <= 1 || inst->height <= 1) { + if (codec_settings->width <= 1 || codec_settings->height <= 1) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (inst->codecType == webrtc::kVideoCodecVP8 && - inst->VP8().automaticResizeOn && CountActiveStreams(*inst) > 1) { + if (codec_settings->codecType == webrtc::kVideoCodecVP8 && + codec_settings->VP8().automaticResizeOn && + CountActiveStreams(*codec_settings) > 1) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } return WEBRTC_VIDEO_CODEC_OK; @@ -299,7 +301,7 @@ int SimulcastEncoderAdapter::Release() { } int SimulcastEncoderAdapter::InitEncode( - const VideoCodec* inst, + const VideoCodec* codec_settings, const VideoEncoder::Settings& settings) { RTC_DCHECK_RUN_ON(&encoder_queue_); @@ -307,15 +309,15 @@ int SimulcastEncoderAdapter::InitEncode( return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - int ret = VerifyCodec(inst); + int ret = VerifyCodec(codec_settings); if (ret < 0) { return ret; } Release(); - codec_ = *inst; - total_streams_count_ = CountAllStreams(*inst); + codec_ = *codec_settings; + total_streams_count_ = CountAllStreams(*codec_settings); // TODO(ronghuawu): Remove once this is handled in LibvpxVp8Encoder. if (codec_.qpMax < kDefaultMinQp) { @@ -349,7 +351,7 @@ int SimulcastEncoderAdapter::InitEncode( // (active_streams_count >= 1). SEA creates N=active_streams_count encoders // and configures each to produce a single stream. - int active_streams_count = CountActiveStreams(*inst); + int active_streams_count = CountActiveStreams(*codec_settings); // If we only have a single active layer it is better to create an encoder // with only one configured layer than creating it with all-but-one disabled // layers because that way we control scaling. @@ -461,24 +463,14 @@ int SimulcastEncoderAdapter::Encode( } } - // All active streams should generate a key frame if - // a key frame is requested by any stream. bool is_keyframe_needed = false; - if (frame_types) { - for (const auto& frame_type : *frame_types) { - if (frame_type == VideoFrameType::kVideoFrameKey) { - is_keyframe_needed = true; - break; - } - } - } - - if (!is_keyframe_needed) { - for (const auto& layer : stream_contexts_) { - if (layer.is_keyframe_needed()) { - is_keyframe_needed = true; - break; - } + for (const auto& layer : stream_contexts_) { + if (layer.is_keyframe_needed()) { + // This is legacy behavior, generating a keyframe on all layers + // when generating one for a layer that became active for the first time + // or after being disabled. + is_keyframe_needed = true; + break; } } @@ -501,17 +493,38 @@ int SimulcastEncoderAdapter::Encode( // frame types for all streams should be passed to the encoder unchanged. // Otherwise a single per-encoder frame type is passed. std::vector stream_frame_types( - bypass_mode_ ? total_streams_count_ : 1); + bypass_mode_ + ? std::max(codec_.numberOfSimulcastStreams, 1) + : 1, + VideoFrameType::kVideoFrameDelta); + + bool keyframe_requested = false; if (is_keyframe_needed) { std::fill(stream_frame_types.begin(), stream_frame_types.end(), VideoFrameType::kVideoFrameKey); - layer.OnKeyframe(frame_timestamp); - } else { - if (layer.ShouldDropFrame(frame_timestamp)) { - continue; + keyframe_requested = true; + } else if (frame_types) { + if (bypass_mode_) { + // In bypass mode, we effectively pass on frame_types. + RTC_DCHECK_EQ(frame_types->size(), stream_frame_types.size()); + stream_frame_types = *frame_types; + keyframe_requested = + absl::c_any_of(*frame_types, [](const VideoFrameType frame_type) { + return frame_type == VideoFrameType::kVideoFrameKey; + }); + } else { + size_t stream_idx = static_cast(layer.stream_idx()); + if (frame_types->size() >= stream_idx && + (*frame_types)[stream_idx] == VideoFrameType::kVideoFrameKey) { + stream_frame_types[0] = VideoFrameType::kVideoFrameKey; + keyframe_requested = true; + } } - std::fill(stream_frame_types.begin(), stream_frame_types.end(), - VideoFrameType::kVideoFrameDelta); + } + if (keyframe_requested) { + layer.OnKeyframe(frame_timestamp); + } else if (layer.ShouldDropFrame(frame_timestamp)) { + continue; } // If scaling isn't required, because the input resolution diff --git a/third_party/libwebrtc/media/engine/simulcast_encoder_adapter_unittest.cc b/third_party/libwebrtc/media/engine/simulcast_encoder_adapter_unittest.cc index e0e3ea86eced..15a8aeb71ef1 100644 --- a/third_party/libwebrtc/media/engine/simulcast_encoder_adapter_unittest.cc +++ b/third_party/libwebrtc/media/engine/simulcast_encoder_adapter_unittest.cc @@ -176,7 +176,7 @@ class MockVideoEncoderFactory : public VideoEncoderFactory { } void set_init_encode_return_value(int32_t value); void set_requested_resolution_alignments( - std::vector requested_resolution_alignments) { + std::vector requested_resolution_alignments) { requested_resolution_alignments_ = requested_resolution_alignments; } void set_supports_simulcast(bool supports_simulcast) { @@ -195,7 +195,7 @@ class MockVideoEncoderFactory : public VideoEncoderFactory { std::vector encoders_; std::vector encoder_names_; // Keep number of entries in sync with `kMaxSimulcastStreams`. - std::vector requested_resolution_alignments_ = {1, 1, 1}; + std::vector requested_resolution_alignments_ = {1, 1, 1}; bool supports_simulcast_ = false; std::vector resolution_bitrate_limits_; }; @@ -284,7 +284,8 @@ class MockVideoEncoder : public VideoEncoder { scaling_settings_ = settings; } - void set_requested_resolution_alignment(int requested_resolution_alignment) { + void set_requested_resolution_alignment( + uint32_t requested_resolution_alignment) { requested_resolution_alignment_ = requested_resolution_alignment; } @@ -332,7 +333,7 @@ class MockVideoEncoder : public VideoEncoder { bool supports_native_handle_ = false; std::string implementation_name_ = "unknown"; VideoEncoder::ScalingSettings scaling_settings_; - int requested_resolution_alignment_ = 1; + uint32_t requested_resolution_alignment_ = 1; bool apply_alignment_to_all_simulcast_layers_ = false; bool has_trusted_rate_controller_ = false; bool is_hardware_accelerated_ = false; @@ -1089,6 +1090,89 @@ TEST_F(TestSimulcastEncoderAdapterFake, NativeHandleForwardingOnlyIfSupported) { EXPECT_EQ(0, adapter_->Encode(input_frame, &frame_types)); } +TEST_F(TestSimulcastEncoderAdapterFake, GeneratesKeyFramesOnRequestedLayers) { + // Set up common settings for three streams. + SimulcastTestFixtureImpl::DefaultSettings( + &codec_, static_cast(kTestTemporalLayerProfile), + kVideoCodecVP8); + rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + adapter_->RegisterEncodeCompleteCallback(this); + + // Input data. + rtc::scoped_refptr buffer(I420Buffer::Create(1280, 720)); + + // Encode with three streams. + codec_.startBitrate = 3000; + EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); + + std::vector frame_types; + frame_types.resize(3, VideoFrameType::kVideoFrameKey); + + std::vector expected_keyframe(1, + VideoFrameType::kVideoFrameKey); + std::vector expected_deltaframe( + 1, VideoFrameType::kVideoFrameDelta); + + std::vector original_encoders = + helper_->factory()->encoders(); + ASSERT_EQ(3u, original_encoders.size()); + EXPECT_CALL(*original_encoders[0], + Encode(_, ::testing::Pointee(::testing::Eq(expected_keyframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + EXPECT_CALL(*original_encoders[1], + Encode(_, ::testing::Pointee(::testing::Eq(expected_keyframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + EXPECT_CALL(*original_encoders[2], + Encode(_, ::testing::Pointee(::testing::Eq(expected_keyframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + VideoFrame first_frame = VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(0) + .set_timestamp_ms(0) + .build(); + EXPECT_EQ(0, adapter_->Encode(first_frame, &frame_types)); + + // Request [key, delta, delta]. + EXPECT_CALL(*original_encoders[0], + Encode(_, ::testing::Pointee(::testing::Eq(expected_keyframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + EXPECT_CALL(*original_encoders[1], + Encode(_, ::testing::Pointee(::testing::Eq(expected_deltaframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + EXPECT_CALL(*original_encoders[2], + Encode(_, ::testing::Pointee(::testing::Eq(expected_deltaframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + frame_types[1] = VideoFrameType::kVideoFrameKey; + frame_types[1] = VideoFrameType::kVideoFrameDelta; + frame_types[2] = VideoFrameType::kVideoFrameDelta; + VideoFrame second_frame = VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(10000) + .set_timestamp_ms(100000) + .build(); + EXPECT_EQ(0, adapter_->Encode(second_frame, &frame_types)); + + // Request [delta, key, delta]. + EXPECT_CALL(*original_encoders[0], + Encode(_, ::testing::Pointee(::testing::Eq(expected_deltaframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + EXPECT_CALL(*original_encoders[1], + Encode(_, ::testing::Pointee(::testing::Eq(expected_keyframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + EXPECT_CALL(*original_encoders[2], + Encode(_, ::testing::Pointee(::testing::Eq(expected_deltaframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + frame_types[0] = VideoFrameType::kVideoFrameDelta; + frame_types[1] = VideoFrameType::kVideoFrameKey; + frame_types[2] = VideoFrameType::kVideoFrameDelta; + VideoFrame third_frame = VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(20000) + .set_timestamp_ms(200000) + .build(); + EXPECT_EQ(0, adapter_->Encode(third_frame, &frame_types)); +} + TEST_F(TestSimulcastEncoderAdapterFake, TestFailureReturnCodesFromEncodeCalls) { SimulcastTestFixtureImpl::DefaultSettings( &codec_, static_cast(kTestTemporalLayerProfile), @@ -1313,7 +1397,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, helper_->factory()->set_requested_resolution_alignments({2, 4, 7}); EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); - EXPECT_EQ(adapter_->GetEncoderInfo().requested_resolution_alignment, 28); + EXPECT_EQ(adapter_->GetEncoderInfo().requested_resolution_alignment, 28u); } TEST_F(TestSimulcastEncoderAdapterFake, @@ -1384,7 +1468,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, EncoderInfoFromFieldTrial) { EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); ASSERT_EQ(3u, helper_->factory()->encoders().size()); - EXPECT_EQ(8, adapter_->GetEncoderInfo().requested_resolution_alignment); + EXPECT_EQ(8u, adapter_->GetEncoderInfo().requested_resolution_alignment); EXPECT_TRUE( adapter_->GetEncoderInfo().apply_alignment_to_all_simulcast_layers); EXPECT_TRUE(adapter_->GetEncoderInfo().resolution_bitrate_limits.empty()); @@ -1407,7 +1491,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); ASSERT_EQ(1u, helper_->factory()->encoders().size()); - EXPECT_EQ(9, adapter_->GetEncoderInfo().requested_resolution_alignment); + EXPECT_EQ(9u, adapter_->GetEncoderInfo().requested_resolution_alignment); EXPECT_FALSE( adapter_->GetEncoderInfo().apply_alignment_to_all_simulcast_layers); EXPECT_THAT( diff --git a/third_party/libwebrtc/media/engine/webrtc_video_engine.cc b/third_party/libwebrtc/media/engine/webrtc_video_engine.cc index cfb15b0576e5..34beacdab728 100644 --- a/third_party/libwebrtc/media/engine/webrtc_video_engine.cc +++ b/third_party/libwebrtc/media/engine/webrtc_video_engine.cc @@ -1042,7 +1042,8 @@ webrtc::RtpParameters WebRtcVideoChannel::GetRtpSendParameters( webrtc::RTCError WebRtcVideoChannel::SetRtpSendParameters( uint32_t ssrc, - const webrtc::RtpParameters& parameters) { + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&thread_checker_); TRACE_EVENT0("webrtc", "WebRtcVideoChannel::SetRtpSendParameters"); auto it = send_streams_.find(ssrc); @@ -1050,7 +1051,8 @@ webrtc::RTCError WebRtcVideoChannel::SetRtpSendParameters( RTC_LOG(LS_ERROR) << "Attempting to set RTP send parameters for stream " "with ssrc " << ssrc << " which doesn't exist."; - return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR); + return webrtc::InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); } // TODO(deadbeef): Handle setting parameters with a list of codecs in a @@ -1059,7 +1061,8 @@ webrtc::RTCError WebRtcVideoChannel::SetRtpSendParameters( if (current_parameters.codecs != parameters.codecs) { RTC_DLOG(LS_ERROR) << "Using SetParameters to change the set of codecs " "is not currently supported."; - return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR); + return webrtc::InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); } if (!parameters.encodings.empty()) { @@ -1085,7 +1088,7 @@ webrtc::RTCError WebRtcVideoChannel::SetRtpSendParameters( SetPreferredDscp(new_dscp); } - return it->second->SetRtpParameters(parameters); + return it->second->SetRtpParameters(parameters, std::move(callback)); } webrtc::RtpParameters WebRtcVideoChannel::GetRtpReceiveParameters( @@ -1884,12 +1887,12 @@ void WebRtcVideoChannel::OnNetworkRouteChanged( })); } -void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) { +void WebRtcVideoChannel::SetInterface(MediaChannelNetworkInterface* iface) { RTC_DCHECK_RUN_ON(&network_thread_checker_); MediaChannel::SetInterface(iface); // Set the RTP recv/send buffer to a bigger size. - MediaChannel::SetOption(NetworkInterface::ST_RTP, rtc::Socket::OPT_RCVBUF, - kVideoRtpRecvBufferSize); + MediaChannel::SetOption(MediaChannelNetworkInterface::ST_RTP, + rtc::Socket::OPT_RCVBUF, kVideoRtpRecvBufferSize); // Speculative change to increase the outbound socket buffer size. // In b/15152257, we are seeing a significant number of packets discarded @@ -1906,8 +1909,8 @@ void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) { send_buffer_size = kVideoRtpSendBufferSize; } - MediaChannel::SetOption(NetworkInterface::ST_RTP, rtc::Socket::OPT_SNDBUF, - send_buffer_size); + MediaChannel::SetOption(MediaChannelNetworkInterface::ST_RTP, + rtc::Socket::OPT_SNDBUF, send_buffer_size); } void WebRtcVideoChannel::SetFrameDecryptor( @@ -2156,7 +2159,7 @@ bool WebRtcVideoChannel::WebRtcVideoSendStream::SetVideoSend( old_options.is_screencast = options->is_screencast; } if (parameters_.options != old_options) { - ReconfigureEncoder(); + ReconfigureEncoder(nullptr); } } @@ -2283,7 +2286,7 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::SetSendParameters( } if (params.max_bandwidth_bps) { parameters_.max_bitrate_bps = *params.max_bandwidth_bps; - ReconfigureEncoder(); + ReconfigureEncoder(nullptr); } if (params.conference_mode) { parameters_.conference_mode = *params.conference_mode; @@ -2305,7 +2308,8 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::SetSendParameters( } webrtc::RTCError WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters( - const webrtc::RtpParameters& new_parameters) { + const webrtc::RtpParameters& new_parameters, + webrtc::SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&thread_checker_); // This is checked higher in the stack (RtpSender), so this is only checking // for users accessing the private APIs or tests, not specification @@ -2366,7 +2370,9 @@ webrtc::RTCError WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters( // Codecs are currently handled at the WebRtcVideoChannel level. rtp_parameters_.codecs.clear(); if (reconfigure_encoder || new_send_state) { - ReconfigureEncoder(); + // Callback responsibility is delegated to ReconfigureEncoder() + ReconfigureEncoder(std::move(callback)); + callback = nullptr; } if (new_send_state) { UpdateSendState(); @@ -2376,7 +2382,7 @@ webrtc::RTCError WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters( stream_->SetSource(source_, GetDegradationPreference()); } } - return webrtc::RTCError::OK(); + return webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); } webrtc::RtpParameters @@ -2433,7 +2439,7 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::UpdateSendState() { } // This updates what simulcast layers are sending, and possibly starts // or stops the VideoSendStream. - stream_->UpdateActiveSimulcastLayers(active_layers); + stream_->StartPerRtpStream(active_layers); } else { if (stream_ != nullptr) { stream_->Stop(); @@ -2564,11 +2570,13 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig( return encoder_config; } -void WebRtcVideoChannel::WebRtcVideoSendStream::ReconfigureEncoder() { +void WebRtcVideoChannel::WebRtcVideoSendStream::ReconfigureEncoder( + webrtc::SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&thread_checker_); if (!stream_) { // The webrtc::VideoSendStream `stream_` has not yet been created but other // parameters has changed. + webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); return; } @@ -2583,7 +2591,7 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::ReconfigureEncoder() { encoder_config.encoder_specific_settings = ConfigureVideoEncoderSettings(codec_settings.codec); - stream_->ReconfigureVideoEncoder(encoder_config.Copy()); + stream_->ReconfigureVideoEncoder(encoder_config.Copy(), std::move(callback)); encoder_config.encoder_specific_settings = NULL; @@ -2723,6 +2731,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( info.total_encode_time_ms = stream_stats.total_encode_time_ms; info.total_encoded_bytes_target = stream_stats.total_encoded_bytes_target; info.huge_frames_sent = stream_stats.huge_frames_sent; + info.scalability_mode = stream_stats.scalability_mode; infos.push_back(info); } return infos; @@ -2838,7 +2847,7 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::RecreateWebRtcStream() { parameters_.encoder_config.encoder_specific_settings = NULL; - // Calls stream_->UpdateActiveSimulcastLayers() to start the VideoSendStream + // Calls stream_->StartPerRtpStream() to start the VideoSendStream // if necessary conditions are met. UpdateSendState(); diff --git a/third_party/libwebrtc/media/engine/webrtc_video_engine.h b/third_party/libwebrtc/media/engine/webrtc_video_engine.h index ee5b8c3b5abf..03732330e501 100644 --- a/third_party/libwebrtc/media/engine/webrtc_video_engine.h +++ b/third_party/libwebrtc/media/engine/webrtc_video_engine.h @@ -149,7 +149,8 @@ class WebRtcVideoChannel : public VideoMediaChannel, webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override; webrtc::RTCError SetRtpSendParameters( uint32_t ssrc, - const webrtc::RtpParameters& parameters) override; + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback) override; webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const override; webrtc::RtpParameters GetDefaultRtpReceiveParameters() const override; bool GetSendCodec(VideoCodec* send_codec) override; @@ -179,7 +180,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, void OnReadyToSend(bool ready) override; void OnNetworkRouteChanged(absl::string_view transport_name, const rtc::NetworkRoute& network_route) override; - void SetInterface(NetworkInterface* iface) override; + void SetInterface(MediaChannelNetworkInterface* iface) override; // E2E Encrypted Video Frame API // Set a frame decryptor to a particular ssrc that will intercept all @@ -363,7 +364,8 @@ class WebRtcVideoChannel : public VideoMediaChannel, ~WebRtcVideoSendStream(); void SetSendParameters(const ChangedSendParameters& send_params); - webrtc::RTCError SetRtpParameters(const webrtc::RtpParameters& parameters); + webrtc::RTCError SetRtpParameters(const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback); webrtc::RtpParameters GetRtpParameters() const; void SetFrameEncryptor( @@ -422,7 +424,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, void RecreateWebRtcStream(); webrtc::VideoEncoderConfig CreateVideoEncoderConfig( const VideoCodec& codec) const; - void ReconfigureEncoder(); + void ReconfigureEncoder(webrtc::SetParametersCallback callback); // Calls Start or Stop according to whether or not `sending_` is true, // and whether or not the encoding in `rtp_parameters_` is active. diff --git a/third_party/libwebrtc/media/engine/webrtc_video_engine_unittest.cc b/third_party/libwebrtc/media/engine/webrtc_video_engine_unittest.cc index 053fd173bee6..91cd59ab37a2 100644 --- a/third_party/libwebrtc/media/engine/webrtc_video_engine_unittest.cc +++ b/third_party/libwebrtc/media/engine/webrtc_video_engine_unittest.cc @@ -437,7 +437,8 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeCapturer) { std::unique_ptr channel( SetSendParamsWithAllSupportedCodecs()); - EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + StreamParams::CreateLegacy(kSsrc))); // Add CVO extension. const int id = 1; @@ -481,7 +482,8 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeAddSendStream) { parameters.extensions.push_back( RtpExtension(RtpExtension::kVideoRotationUri, id)); EXPECT_TRUE(channel->SetSendParameters(parameters)); - EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + StreamParams::CreateLegacy(kSsrc))); // Set source. EXPECT_CALL( @@ -498,7 +500,8 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionAfterCapturer) { std::unique_ptr channel( SetSendParamsWithAllSupportedCodecs()); - EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + StreamParams::CreateLegacy(kSsrc))); // Set capturer. EXPECT_CALL( @@ -541,7 +544,8 @@ TEST_F(WebRtcVideoEngineTest, SetSendFailsBeforeSettingCodecs) { call_.get(), GetMediaConfig(), VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(123))); + EXPECT_TRUE( + channel->AsSendChannel()->AddSendStream(StreamParams::CreateLegacy(123))); EXPECT_FALSE(channel->SetSend(true)) << "Channel should not start without codecs."; @@ -555,7 +559,8 @@ TEST_F(WebRtcVideoEngineTest, GetStatsWithoutSendCodecsSetDoesNotCrash) { std::unique_ptr channel(engine_.CreateMediaChannel( call_.get(), GetMediaConfig(), VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(123))); + EXPECT_TRUE( + channel->AsSendChannel()->AddSendStream(StreamParams::CreateLegacy(123))); VideoMediaInfo info; channel->GetStats(&info); } @@ -565,10 +570,10 @@ TEST_F(WebRtcVideoEngineTest, UseFactoryForVp8WhenSupported) { std::unique_ptr channel( SetSendParamsWithAllSupportedCodecs()); - channel->OnReadyToSend(true); + channel->AsSendChannel()->OnReadyToSend(true); - EXPECT_TRUE( - channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); EXPECT_EQ(0, encoder_factory_->GetNumCreatedEncoders()); EXPECT_TRUE(channel->SetSend(true)); webrtc::test::FrameForwarder frame_forwarder; @@ -593,7 +598,7 @@ TEST_F(WebRtcVideoEngineTest, UseFactoryForVp8WhenSupported) { EXPECT_EQ(num_created_encoders, encoder_factory_->GetNumCreatedEncoders()); // Remove stream previously added to free the external encoder instance. - EXPECT_TRUE(channel->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel->AsSendChannel()->RemoveSendStream(kSsrc)); EXPECT_EQ(0u, encoder_factory_->encoders().size()); } @@ -645,8 +650,8 @@ TEST_F(WebRtcVideoEngineTest, CanConstructDecoderForVp9EncoderFactory) { std::unique_ptr channel( SetSendParamsWithAllSupportedCodecs()); - EXPECT_TRUE( - channel->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); } #endif // defined(RTC_ENABLE_VP9) @@ -657,8 +662,8 @@ TEST_F(WebRtcVideoEngineTest, PropagatesInputFrameTimestamp) { std::unique_ptr channel( SetSendParamsWithAllSupportedCodecs()); - EXPECT_TRUE( - channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); webrtc::test::FrameForwarder frame_forwarder; cricket::FakeFrameSource frame_source(1280, 720, @@ -701,7 +706,7 @@ TEST_F(WebRtcVideoEngineTest, PropagatesInputFrameTimestamp) { } // Remove stream previously added to free the external encoder instance. - EXPECT_TRUE(channel->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel->AsSendChannel()->RemoveSendStream(kSsrc)); } void WebRtcVideoEngineTest::AssignDefaultAptRtxTypes() { @@ -819,7 +824,8 @@ TEST_F(WebRtcVideoEngineTest, UsesSimulcastAdapterForVp8Factories) { std::vector ssrcs = MAKE_VECTOR(kSsrcs3); - EXPECT_TRUE(channel->AddSendStream(CreateSimStreamParams("cname", ssrcs))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + CreateSimStreamParams("cname", ssrcs))); EXPECT_TRUE(channel->SetSend(true)); webrtc::test::FrameForwarder frame_forwarder; @@ -864,8 +870,8 @@ TEST_F(WebRtcVideoEngineTest, ChannelWithH264CanChangeToVp8) { parameters.codecs.push_back(GetEngineCodec("H264")); EXPECT_TRUE(channel->SetSendParameters(parameters)); - EXPECT_TRUE( - channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); // Sending one frame will have allocate the encoder. frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -898,7 +904,8 @@ TEST_F(WebRtcVideoEngineTest, std::vector ssrcs = MAKE_VECTOR(kSsrcs3); - EXPECT_TRUE(channel->AddSendStream(CreateSimStreamParams("cname", ssrcs))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + CreateSimStreamParams("cname", ssrcs))); EXPECT_TRUE(channel->SetSend(true)); // Send a fake frame, or else the media engine will configure the simulcast @@ -933,8 +940,8 @@ TEST_F(WebRtcVideoEngineTest, parameters.codecs.push_back(GetEngineCodec("H264")); EXPECT_TRUE(channel->SetSendParameters(parameters)); - EXPECT_TRUE( - channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); // Send a frame of 720p. This should trigger a "real" encoder initialization. webrtc::test::FrameForwarder frame_forwarder; @@ -967,8 +974,8 @@ TEST_F(WebRtcVideoEngineTest, SimulcastEnabledForH264BehindFieldTrial) { EXPECT_TRUE(channel->SetSendParameters(parameters)); const std::vector ssrcs = MAKE_VECTOR(kSsrcs3); - EXPECT_TRUE( - channel->AddSendStream(cricket::CreateSimStreamParams("cname", ssrcs))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + cricket::CreateSimStreamParams("cname", ssrcs))); // Send a frame of 720p. This should trigger a "real" encoder initialization. webrtc::test::FrameForwarder frame_forwarder; @@ -1086,8 +1093,8 @@ TEST_F(WebRtcVideoEngineTest, RegisterDecodersIfSupported) { std::unique_ptr channel( SetRecvParamsWithSupportedCodecs(parameters.codecs)); - EXPECT_TRUE( - channel->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); // Decoders are not created until they are used. time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); EXPECT_EQ(0u, decoder_factory_->decoders().size()); @@ -1097,7 +1104,7 @@ TEST_F(WebRtcVideoEngineTest, RegisterDecodersIfSupported) { EXPECT_EQ(0, decoder_factory_->GetNumCreatedDecoders()); // Remove stream previously added to free the external decoder instance. - EXPECT_TRUE(channel->RemoveRecvStream(kSsrc)); + EXPECT_TRUE(channel->AsReceiveChannel()->RemoveRecvStream(kSsrc)); EXPECT_EQ(0u, decoder_factory_->decoders().size()); } @@ -1114,8 +1121,8 @@ TEST_F(WebRtcVideoEngineTest, RegisterH264DecoderIfSupported) { std::unique_ptr channel( SetRecvParamsWithSupportedCodecs(codecs)); - EXPECT_TRUE( - channel->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); // Decoders are not created until they are used. time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); ASSERT_EQ(0u, decoder_factory_->decoders().size()); @@ -1131,8 +1138,8 @@ TEST_F(WebRtcVideoEngineTest, GetSourcesWithNonExistingSsrc) { std::unique_ptr channel( SetRecvParamsWithSupportedCodecs(parameters.codecs)); - EXPECT_TRUE( - channel->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); // Call GetSources with |kSsrc + 1| which doesn't exist. std::vector sources = channel->GetSources(kSsrc + 1); @@ -1258,9 +1265,9 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) { cricket::VideoSendParameters send_parameters; send_parameters.codecs.push_back(engine_codecs.at(0)); EXPECT_TRUE(send_channel->SetSendParameters(send_parameters)); - send_channel->OnReadyToSend(true); - EXPECT_TRUE( - send_channel->AddSendStream(StreamParams::CreateLegacy(send_ssrc))); + send_channel->AsSendChannel()->OnReadyToSend(true); + EXPECT_TRUE(send_channel->AsSendChannel()->AddSendStream( + StreamParams::CreateLegacy(send_ssrc))); EXPECT_TRUE(send_channel->SetSend(true)); // Set capturer. @@ -1280,15 +1287,15 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) { cricket::VideoRecvParameters recv_parameters; recv_parameters.codecs.push_back(engine_codecs.at(0)); EXPECT_TRUE(recv_channel->SetRecvParameters(recv_parameters)); - EXPECT_TRUE(recv_channel->AddRecvStream( + EXPECT_TRUE(recv_channel->AsReceiveChannel()->AddRecvStream( cricket::StreamParams::CreateLegacy(recv_ssrc))); // Remove streams previously added to free the encoder and decoder instance. EXPECT_CALL(*encoder_factory, Die()); EXPECT_CALL(*decoder_factory, Die()); EXPECT_CALL(*rate_allocator_factory, Die()); - EXPECT_TRUE(send_channel->RemoveSendStream(send_ssrc)); - EXPECT_TRUE(recv_channel->RemoveRecvStream(recv_ssrc)); + EXPECT_TRUE(send_channel->AsSendChannel()->RemoveSendStream(send_ssrc)); + EXPECT_TRUE(recv_channel->AsReceiveChannel()->RemoveRecvStream(recv_ssrc)); } TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) { @@ -1296,12 +1303,12 @@ TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) { std::unique_ptr fake_call(new FakeCall()); std::unique_ptr channel( SetSendParamsWithAllSupportedCodecs()); - ASSERT_TRUE( - channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + ASSERT_TRUE(channel->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); cricket::VideoCodec codec = GetEngineCodec("VP8"); cricket::VideoSendParameters parameters; parameters.codecs.push_back(codec); - channel->OnReadyToSend(true); + channel->AsSendChannel()->OnReadyToSend(true); channel->SetSend(true); ASSERT_TRUE(channel->SetSendParameters(parameters)); @@ -1347,7 +1354,7 @@ TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) { encoder_factory_->encoders().back()->GetCodecSettings().mode); // Remove stream previously added to free the external encoder instance. - EXPECT_TRUE(channel->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel->AsSendChannel()->RemoveSendStream(kSsrc)); EXPECT_EQ(0u, encoder_factory_->encoders().size()); } @@ -1467,7 +1474,7 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); DeliverKeyFrame(kSsrc); EXPECT_EQ_WAIT(1, renderer_.num_rendered_frames(), kTimeout); - channel_->RemoveRecvStream(kSsrc); + channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc); } TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, @@ -1480,7 +1487,7 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); DeliverKeyFrame(kSsrc); EXPECT_EQ_WAIT(1, renderer_.num_rendered_frames(), kTimeout); - channel_->RemoveRecvStream(kSsrc); + channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc); } TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, @@ -1493,7 +1500,7 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); DeliverKeyFrame(kSsrc); EXPECT_EQ_WAIT(1, renderer_.num_rendered_frames(), kTimeout); - channel_->RemoveRecvStream(kSsrc); + channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc); } TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, @@ -1508,7 +1515,7 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); DeliverKeyFrame(kSsrc); // Expected to not cause function to fire. DeliverKeyFrameAndWait(kSsrc + 1); - channel_->RemoveRecvStream(kSsrc + 1); + channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc + 1); } TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, @@ -1523,7 +1530,7 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); DeliverKeyFrame(kSsrc); // Expected to not cause function to fire. DeliverKeyFrameAndWait(kSsrc + 1); - channel_->RemoveRecvStream(kSsrc + 1); + channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc + 1); } class WebRtcVideoChannelBaseTest : public ::testing::Test { @@ -1554,14 +1561,15 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { static_cast(engine_.CreateMediaChannel( call_.get(), media_config, cricket::VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get()))); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); EXPECT_TRUE(channel_.get() != NULL); network_interface_.SetDestination(channel_.get()); channel_->SetInterface(&network_interface_); cricket::VideoRecvParameters parameters; parameters.codecs = engine_.send_codecs(); channel_->SetRecvParameters(parameters); - EXPECT_TRUE(channel_->AddSendStream(DefaultSendStreamParams())); + EXPECT_TRUE( + channel_->AsSendChannel()->AddSendStream(DefaultSendStreamParams())); frame_forwarder_ = std::make_unique(); frame_source_ = std::make_unique( 640, 480, rtc::kNumMicrosecsPerSec / kFramerate); @@ -1573,7 +1581,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { void SetUpSecondStream() { SetUpSecondStreamWithNoRecv(); // Setup recv for second stream. - EXPECT_TRUE(channel_->AddRecvStream( + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( cricket::StreamParams::CreateLegacy(kSsrc + 2))); // Make the second renderer available for use by a new stream. EXPECT_TRUE(channel_->SetSink(kSsrc + 2, &renderer2_)); @@ -1583,12 +1591,12 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { // This is required if you want to test unsignalled recv of video rtp packets. void SetUpSecondStreamWithNoRecv() { // SetUp() already added kSsrc make sure duplicate SSRCs cant be added. - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); - EXPECT_FALSE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); - EXPECT_TRUE(channel_->AddSendStream( + EXPECT_FALSE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( cricket::StreamParams::CreateLegacy(kSsrc + 2))); // We dont add recv for the second stream. @@ -1841,8 +1849,10 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleRecvStreams) { parameters.conference_mode = true; EXPECT_TRUE(channel_->SetSendParameters(parameters)); EXPECT_TRUE(SetSend(true)); - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(1))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(2))); EXPECT_TRUE(channel_->SetSink(1, &renderer1)); EXPECT_TRUE(channel_->SetSink(2, &renderer2)); EXPECT_EQ(0, renderer1.num_rendered_frames()); @@ -1891,8 +1901,8 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleSendStreams) { parameters.codecs.push_back(DefaultCodec()); parameters.conference_mode = true; EXPECT_TRUE(channel_->SetSendParameters(parameters)); - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); EXPECT_TRUE(SetSend(true)); SendFrame(); @@ -1906,11 +1916,11 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleSendStreams) { const int kTestHeight = 120; cricket::FakeFrameSource frame_source(kTestWidth, kTestHeight, rtc::kNumMicrosecsPerSec / 5); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(5678))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(5678))); EXPECT_TRUE(channel_->SetVideoSend(5678, nullptr, &frame_forwarder)); - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(5678))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(5678))); EXPECT_TRUE(channel_->SetSink(5678, &renderer2)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); EXPECT_FRAME_ON_RENDERER_WAIT(renderer2, 1, kTestWidth, kTestHeight, @@ -1978,10 +1988,10 @@ TEST_F(WebRtcVideoChannelBaseTest, SetSendSsrc) { // Test that we can set the SSRC even after codecs are set. TEST_F(WebRtcVideoChannelBaseTest, SetSendSsrcAfterSetCodecs) { // Remove stream added in Setup. - EXPECT_TRUE(channel_->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSsrc)); EXPECT_TRUE(SetDefaultCodec()); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(999))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(999))); EXPECT_TRUE(channel_->SetVideoSend(999u, nullptr, frame_forwarder_.get())); EXPECT_TRUE(SetSend(true)); EXPECT_TRUE(WaitAndSendFrame(0)); @@ -2027,11 +2037,11 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveSendStreams) { EXPECT_EQ(kSsrc, header.Ssrc()); // Remove the send stream that was added during Setup. - EXPECT_TRUE(channel_->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSsrc)); int rtp_packets = NumRtpPackets(); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(789u))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(789u))); EXPECT_TRUE(channel_->SetVideoSend(789u, nullptr, frame_forwarder_.get())); EXPECT_EQ(rtp_packets, NumRtpPackets()); // Wait 30ms to guarantee the engine does not drop the frame. @@ -2052,8 +2062,10 @@ TEST_F(WebRtcVideoChannelBaseTest, SimulateConference) { parameters.conference_mode = true; EXPECT_TRUE(channel_->SetSendParameters(parameters)); EXPECT_TRUE(SetSend(true)); - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(1))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(2))); EXPECT_TRUE(channel_->SetSink(1, &renderer1)); EXPECT_TRUE(channel_->SetSink(2, &renderer2)); EXPECT_EQ(0, renderer1.num_rendered_frames()); @@ -2073,8 +2085,8 @@ TEST_F(WebRtcVideoChannelBaseTest, SimulateConference) { EXPECT_EQ(kVideoHeight, renderer1.height()); EXPECT_EQ(kVideoWidth, renderer2.width()); EXPECT_EQ(kVideoHeight, renderer2.height()); - EXPECT_TRUE(channel_->RemoveRecvStream(2)); - EXPECT_TRUE(channel_->RemoveRecvStream(1)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(2)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(1)); } // Tests that we can add and remove capturers and frames are sent out properly @@ -2172,8 +2184,8 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { // WebRTC implementation will drop frames if pushed to quickly. Wait the // interval time to avoid that. // Set up the stream associated with the engine. - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); cricket::VideoFormat capture_format( kVideoWidth, kVideoHeight, @@ -2181,9 +2193,11 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { // Set up additional stream 1. cricket::FakeVideoRenderer renderer1; EXPECT_FALSE(channel_->SetSink(1, &renderer1)); - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(1))); EXPECT_TRUE(channel_->SetSink(1, &renderer1)); - EXPECT_TRUE(channel_->AddSendStream(cricket::StreamParams::CreateLegacy(1))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(1))); webrtc::test::FrameForwarder frame_forwarder1; cricket::FakeFrameSource frame_source(kVideoWidth, kVideoHeight, @@ -2192,9 +2206,11 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { // Set up additional stream 2. cricket::FakeVideoRenderer renderer2; EXPECT_FALSE(channel_->SetSink(2, &renderer2)); - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(2))); EXPECT_TRUE(channel_->SetSink(2, &renderer2)); - EXPECT_TRUE(channel_->AddSendStream(cricket::StreamParams::CreateLegacy(2))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(2))); webrtc::test::FrameForwarder frame_forwarder2; // State for all the streams. @@ -2230,29 +2246,31 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { // Tests empty StreamParams is rejected. TEST_F(WebRtcVideoChannelBaseTest, RejectEmptyStreamParams) { // Remove the send stream that was added during Setup. - EXPECT_TRUE(channel_->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSsrc)); cricket::StreamParams empty; - EXPECT_FALSE(channel_->AddSendStream(empty)); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(789u))); + EXPECT_FALSE(channel_->AsSendChannel()->AddSendStream(empty)); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(789u))); } // Test that multiple send streams can be created and deleted properly. TEST_F(WebRtcVideoChannelBaseTest, MultipleSendStreams) { // Remove stream added in Setup. I.e. remove stream corresponding to default // channel. - EXPECT_TRUE(channel_->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSsrc)); const unsigned int kSsrcsSize = sizeof(kSsrcs4) / sizeof(kSsrcs4[0]); for (unsigned int i = 0; i < kSsrcsSize; ++i) { - EXPECT_TRUE(channel_->AddSendStream( + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( cricket::StreamParams::CreateLegacy(kSsrcs4[i]))); } // Delete one of the non default channel streams, let the destructor delete // the remaining ones. - EXPECT_TRUE(channel_->RemoveSendStream(kSsrcs4[kSsrcsSize - 1])); + EXPECT_TRUE( + channel_->AsSendChannel()->RemoveSendStream(kSsrcs4[kSsrcsSize - 1])); // Stream should already be deleted. - EXPECT_FALSE(channel_->RemoveSendStream(kSsrcs4[kSsrcsSize - 1])); + EXPECT_FALSE( + channel_->AsSendChannel()->RemoveSendStream(kSsrcs4[kSsrcsSize - 1])); } TEST_F(WebRtcVideoChannelBaseTest, SendAndReceiveVp8Vga) { @@ -2366,7 +2384,8 @@ TEST_F(WebRtcVideoChannelBaseTest, SendCodecIsMovedToFrontInRtpParameters) { EXPECT_TRUE(channel_->SetSendParameters(parameters)); channel_->SetVideoCodecSwitchingEnabled(true); - auto send_codecs = channel_->GetRtpSendParameters(kSsrc).codecs; + auto send_codecs = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrc).codecs; ASSERT_EQ(send_codecs.size(), 2u); EXPECT_THAT("VP9", send_codecs[0].name); @@ -2375,7 +2394,7 @@ TEST_F(WebRtcVideoChannelBaseTest, SendCodecIsMovedToFrontInRtpParameters) { channel_->RequestEncoderFallback(); rtc::Thread::Current()->ProcessMessages(30); - send_codecs = channel_->GetRtpSendParameters(kSsrc).codecs; + send_codecs = channel_->AsSendChannel()->GetRtpSendParameters(kSsrc).codecs; ASSERT_EQ(send_codecs.size(), 2u); EXPECT_THAT("VP8", send_codecs[0].name); } @@ -2400,7 +2419,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { channel_.reset(engine_.CreateMediaChannel( fake_call_.get(), GetMediaConfig(), VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); last_ssrc_ = 123; send_parameters_.codecs = engine_.send_codecs(); recv_parameters_.codecs = engine_.recv_codecs(); @@ -2434,7 +2453,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { // the unsignalled receive stream cooldown is no longer in effect. void ReceivePacketAndAdvanceTime(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) { - channel_->OnPacketReceived(packet, packet_time_us); + channel_->AsReceiveChannel()->OnPacketReceived(packet, packet_time_us); rtc::Thread::Current()->ProcessMessages(0); time_controller_.AdvanceTime( webrtc::TimeDelta::Millis(kUnsignalledReceiveStreamCooldownMs)); @@ -2447,7 +2466,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { FakeVideoSendStream* AddSendStream(const StreamParams& sp) { size_t num_streams = fake_call_->GetVideoSendStreams().size(); - EXPECT_TRUE(channel_->AddSendStream(sp)); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); std::vector streams = fake_call_->GetVideoSendStreams(); EXPECT_EQ(num_streams + 1, streams.size()); @@ -2464,7 +2483,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { FakeVideoReceiveStream* AddRecvStream(const StreamParams& sp) { size_t num_streams = fake_call_->GetVideoReceiveStreams().size(); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); std::vector streams = fake_call_->GetVideoReceiveStreams(); EXPECT_EQ(num_streams + 1, streams.size()); @@ -2509,8 +2528,8 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { void TestExtmapAllowMixedCaller(bool extmap_allow_mixed) { // For a caller, the answer will be applied in set remote description // where SetSendParameters() is called. - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); send_parameters_.extmap_allow_mixed = extmap_allow_mixed; EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); const webrtc::VideoSendStream::Config& config = @@ -2521,9 +2540,9 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { void TestExtmapAllowMixedCallee(bool extmap_allow_mixed) { // For a callee, the answer will be applied in set local description // where SetExtmapAllowMixed() and AddSendStream() are called. - channel_->SetExtmapAllowMixed(extmap_allow_mixed); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + channel_->AsSendChannel()->SetExtmapAllowMixed(extmap_allow_mixed); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); const webrtc::VideoSendStream::Config& config = fake_call_->GetVideoSendStreams()[0]->GetConfig(); EXPECT_EQ(extmap_allow_mixed, config.rtp.extmap_allow_mixed); @@ -2702,12 +2721,14 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { limited_send_params.max_bandwidth_bps = global_max; EXPECT_TRUE(channel_->SetSendParameters(limited_send_params)); webrtc::RtpParameters parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = stream_max; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Read back the parameteres and verify they have the correct value - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_EQ(stream_max, parameters.encodings[0].max_bitrate_bps); // Verify that the new value propagated down to the encoder @@ -2743,7 +2764,7 @@ TEST_F(WebRtcVideoChannelTest, SetsSyncGroupFromSyncLabel) { cricket::StreamParams sp = cricket::StreamParams::CreateLegacy(kVideoSsrc); sp.set_stream_ids({kSyncLabel}); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()); EXPECT_EQ(kSyncLabel, @@ -3037,7 +3058,8 @@ TEST_F(WebRtcVideoChannelTest, SetRecvRtpHeaderExtensionsRejectsDuplicateIds) { } TEST_F(WebRtcVideoChannelTest, AddRecvStreamOnlyUsesOneReceiveStream) { - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(1))); EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()); } @@ -3318,7 +3340,7 @@ TEST_F(WebRtcVideoChannelTest, SetMediaConfigSuspendBelowMinBitrate) { channel_.reset(engine_.CreateMediaChannel( fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); channel_->SetSendParameters(send_parameters_); @@ -3329,7 +3351,7 @@ TEST_F(WebRtcVideoChannelTest, SetMediaConfigSuspendBelowMinBitrate) { channel_.reset(engine_.CreateMediaChannel( fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); channel_->SetSendParameters(send_parameters_); @@ -3498,13 +3520,15 @@ TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) { EXPECT_TRUE(vp9_settings.automaticResizeOn); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_THAT( rtp_parameters.encodings, ElementsAre(Field(&webrtc::RtpEncodingParameters::scalability_mode, absl::nullopt))); rtp_parameters.encodings[0].scalability_mode = "L2T1"; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, rtp_parameters) + .ok()); ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set."; EXPECT_TRUE(vp9_settings.denoisingOn); @@ -3512,12 +3536,14 @@ TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) { EXPECT_FALSE(vp9_settings.automaticResizeOn) << "Automatic resize off for multiple spatial layers."; - rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + rtp_parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_THAT(rtp_parameters.encodings, ElementsAre(Field( &webrtc::RtpEncodingParameters::scalability_mode, "L2T1"))); rtp_parameters.encodings[0].scalability_mode = "L1T1"; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, rtp_parameters) + .ok()); ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set."; EXPECT_TRUE(vp9_settings.denoisingOn); @@ -3619,14 +3645,17 @@ TEST_F(Vp9SettingsTest, AllEncodingParametersCopied) { FakeVideoSendStream* stream = AddSendStream(CreateSimStreamParams("cname", ssrcs)); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(ssrcs[0]); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(ssrcs[0]); ASSERT_EQ(kNumSpatialLayers, parameters.encodings.size()); ASSERT_TRUE(parameters.encodings[0].active); ASSERT_TRUE(parameters.encodings[1].active); ASSERT_TRUE(parameters.encodings[2].active); // Invert value to verify copying. parameters.encodings[1].active = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(ssrcs[0], parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(ssrcs[0], parameters) + .ok()); webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); @@ -3859,7 +3888,7 @@ TEST_F(WebRtcVideoChannelTest, PreviousAdaptationDoesNotApplyToScreenshare) { channel_.reset(engine_.CreateMediaChannel( fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); ASSERT_TRUE(channel_->SetSendParameters(parameters)); AddSendStream(); @@ -3909,7 +3938,7 @@ void WebRtcVideoChannelTest::TestDegradationPreference( channel_.reset(engine_.CreateMediaChannel( fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); EXPECT_TRUE(channel_->SetSendParameters(parameters)); @@ -3942,7 +3971,7 @@ void WebRtcVideoChannelTest::TestCpuAdaptation(bool enable_overuse, channel_.reset(engine_.CreateMediaChannel( fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); EXPECT_TRUE(channel_->SetSendParameters(parameters)); @@ -4673,10 +4702,13 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(300000, video_send_stream->GetVideoStreams()[0].max_bitrate_bps); // The RtpParameter max bitrate overrides the codec's. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(1u, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = 500000; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); ASSERT_EQ(1u, video_send_stream->GetVideoStreams().size()); EXPECT_EQ(parameters.encodings[0].max_bitrate_bps, video_send_stream->GetVideoStreams()[0].max_bitrate_bps); @@ -4693,16 +4725,21 @@ TEST_F(WebRtcVideoChannelTest, stream->GetVideoStreams()[0].max_bitrate_bps); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1u, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = 99999 - 1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_EQ(parameters.encodings[0].max_bitrate_bps, stream->GetVideoStreams()[0].max_bitrate_bps); parameters.encodings[0].max_bitrate_bps = 99999 + 1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_EQ(send_parameters_.max_bandwidth_bps, stream->GetVideoStreams()[0].max_bitrate_bps); } @@ -5288,16 +5325,21 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) { EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp()); // Create a send stream to configure - EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); - parameters = channel->GetRtpSendParameters(kSsrc); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + StreamParams::CreateLegacy(kSsrc))); + parameters = channel->AsSendChannel()->GetRtpSendParameters(kSsrc); ASSERT_FALSE(parameters.encodings.empty()); // Various priorities map to various dscp values. parameters.encodings[0].network_priority = webrtc::Priority::kHigh; - ASSERT_TRUE(channel->SetRtpSendParameters(kSsrc, parameters).ok()); + ASSERT_TRUE(channel->AsSendChannel() + ->SetRtpSendParameters(kSsrc, parameters, nullptr) + .ok()); EXPECT_EQ(rtc::DSCP_AF41, network_interface->dscp()); parameters.encodings[0].network_priority = webrtc::Priority::kVeryLow; - ASSERT_TRUE(channel->SetRtpSendParameters(kSsrc, parameters).ok()); + ASSERT_TRUE(channel->AsSendChannel() + ->SetRtpSendParameters(kSsrc, parameters, nullptr) + .ok()); EXPECT_EQ(rtc::DSCP_CS1, network_interface->dscp()); // Packets should also self-identify their dscp in PacketOptions. @@ -5326,7 +5368,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetSendRtcpReducedSize) { FakeVideoSendStream* stream1 = AddSendStream(); EXPECT_EQ(webrtc::RtcpMode::kCompound, stream1->GetConfig().rtp.rtcp_mode); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_FALSE(rtp_parameters.rtcp.reduced_size); // Now enable reduced size mode. @@ -5334,7 +5376,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetSendRtcpReducedSize) { EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); stream1 = fake_call_->GetVideoSendStreams()[0]; EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream1->GetConfig().rtp.rtcp_mode); - rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + rtp_parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_TRUE(rtp_parameters.rtcp.reduced_size); // Create a new stream and ensure it picks up the reduced size mode. @@ -5368,13 +5410,13 @@ TEST_F(WebRtcVideoChannelTest, OnReadyToSendSignalsNetworkState) { EXPECT_EQ(webrtc::kNetworkUp, fake_call_->GetNetworkState(webrtc::MediaType::AUDIO)); - channel_->OnReadyToSend(false); + channel_->AsSendChannel()->OnReadyToSend(false); EXPECT_EQ(webrtc::kNetworkDown, fake_call_->GetNetworkState(webrtc::MediaType::VIDEO)); EXPECT_EQ(webrtc::kNetworkUp, fake_call_->GetNetworkState(webrtc::MediaType::AUDIO)); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); EXPECT_EQ(webrtc::kNetworkUp, fake_call_->GetNetworkState(webrtc::MediaType::VIDEO)); EXPECT_EQ(webrtc::kNetworkUp, @@ -5834,11 +5876,12 @@ TEST_F(WebRtcVideoChannelTest, FakeVideoSendStream* stream = AddSendStream(cricket::CreateSimStreamParams("cname", {kSsrc1, kSsrc2})); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrc1); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrc1); ASSERT_EQ(2u, parameters.encodings.size()); parameters.encodings[0].active = false; parameters.encodings[1].active = true; - channel_->SetRtpSendParameters(kSsrc1, parameters); + channel_->AsSendChannel()->SetRtpSendParameters(kSsrc1, parameters); // Fill in dummy stats. auto stats = GetInitialisedStats(); @@ -5876,12 +5919,13 @@ TEST_F(WebRtcVideoChannelTest, OutboundRtpIsActiveComesFromAnyEncodingInSvc) { ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)); EXPECT_EQ(vp9_settings.numberOfSpatialLayers, 3u); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrc1); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrc1); ASSERT_EQ(3u, parameters.encodings.size()); parameters.encodings[0].active = false; parameters.encodings[1].active = true; parameters.encodings[2].active = false; - channel_->SetRtpSendParameters(kSsrc1, parameters); + channel_->AsSendChannel()->SetRtpSendParameters(kSsrc1, parameters); // Fill in dummy stats. auto stats = GetInitialisedStats(); @@ -5896,12 +5940,12 @@ TEST_F(WebRtcVideoChannelTest, OutboundRtpIsActiveComesFromAnyEncodingInSvc) { ASSERT_TRUE(video_media_info.senders[0].active.has_value()); EXPECT_TRUE(video_media_info.senders[0].active.value()); - parameters = channel_->GetRtpSendParameters(kSsrc1); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(kSsrc1); ASSERT_EQ(3u, parameters.encodings.size()); parameters.encodings[0].active = false; parameters.encodings[1].active = false; parameters.encodings[2].active = false; - channel_->SetRtpSendParameters(kSsrc1, parameters); + channel_->AsSendChannel()->SetRtpSendParameters(kSsrc1, parameters); ASSERT_TRUE(channel_->GetStats(&video_media_info)); ASSERT_EQ(video_media_info.senders.size(), 1u); // No layer is active. @@ -6392,7 +6436,7 @@ TEST_F(WebRtcVideoChannelTest, DefaultReceiveStreamReconfiguresToUseRtx) { EXPECT_EQ(0u, recv_stream->GetConfig().rtp.rtx_ssrc) << "Default receive stream should not have configured RTX"; - EXPECT_TRUE(channel_->AddRecvStream( + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs))); ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()) << "AddRecvStream should have reconfigured, not added a new receiver."; @@ -6417,8 +6461,8 @@ TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithMissingSsrcsForRtx) { cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs); sp.ssrcs = ssrcs; // Without RTXs, this is the important part. - EXPECT_FALSE(channel_->AddSendStream(sp)); - EXPECT_FALSE(channel_->AddRecvStream(sp)); + EXPECT_FALSE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_FALSE(channel_->AsReceiveChannel()->AddRecvStream(sp)); } TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithOverlappingRtxSsrcs) { @@ -6430,20 +6474,20 @@ TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithOverlappingRtxSsrcs) { StreamParams sp = cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs); - EXPECT_TRUE(channel_->AddSendStream(sp)); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); // The RTX SSRC is already used in previous streams, using it should fail. sp = cricket::StreamParams::CreateLegacy(rtx_ssrcs[0]); - EXPECT_FALSE(channel_->AddSendStream(sp)); - EXPECT_FALSE(channel_->AddRecvStream(sp)); + EXPECT_FALSE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_FALSE(channel_->AsReceiveChannel()->AddRecvStream(sp)); // After removing the original stream this should be fine to add (makes sure // that RTX ssrcs are not forever taken). - EXPECT_TRUE(channel_->RemoveSendStream(ssrcs[0])); - EXPECT_TRUE(channel_->RemoveRecvStream(ssrcs[0])); - EXPECT_TRUE(channel_->AddSendStream(sp)); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(ssrcs[0])); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(ssrcs[0])); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); } TEST_F(WebRtcVideoChannelTest, @@ -6455,21 +6499,23 @@ TEST_F(WebRtcVideoChannelTest, StreamParams sp = cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kFirstStreamSsrcs)); - EXPECT_TRUE(channel_->AddSendStream(sp)); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); // One of the SSRCs is already used in previous streams, using it should fail. sp = cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kOverlappingStreamSsrcs)); - EXPECT_FALSE(channel_->AddSendStream(sp)); - EXPECT_FALSE(channel_->AddRecvStream(sp)); + EXPECT_FALSE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_FALSE(channel_->AsReceiveChannel()->AddRecvStream(sp)); // After removing the original stream this should be fine to add (makes sure // that RTX ssrcs are not forever taken). - EXPECT_TRUE(channel_->RemoveSendStream(kFirstStreamSsrcs[0])); - EXPECT_TRUE(channel_->RemoveRecvStream(kFirstStreamSsrcs[0])); - EXPECT_TRUE(channel_->AddSendStream(sp)); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE( + channel_->AsSendChannel()->RemoveSendStream(kFirstStreamSsrcs[0])); + EXPECT_TRUE( + channel_->AsReceiveChannel()->RemoveRecvStream(kFirstStreamSsrcs[0])); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); } TEST_F(WebRtcVideoChannelTest, ReportsSsrcGroupsInStats) { @@ -6481,14 +6527,14 @@ TEST_F(WebRtcVideoChannelTest, ReportsSsrcGroupsInStats) { StreamParams sender_sp = cricket::CreateSimWithRtxStreamParams( "cname", MAKE_VECTOR(kSenderSsrcs), MAKE_VECTOR(kSenderRtxSsrcs)); - EXPECT_TRUE(channel_->AddSendStream(sender_sp)); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sender_sp)); static const uint32_t kReceiverSsrcs[] = {3}; static const uint32_t kReceiverRtxSsrcs[] = {2}; StreamParams receiver_sp = cricket::CreateSimWithRtxStreamParams( "cname", MAKE_VECTOR(kReceiverSsrcs), MAKE_VECTOR(kReceiverRtxSsrcs)); - EXPECT_TRUE(channel_->AddRecvStream(receiver_sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(receiver_sp)); cricket::VideoMediaInfo info; ASSERT_TRUE(channel_->GetStats(&info)); @@ -6531,9 +6577,9 @@ TEST_F(WebRtcVideoChannelTest, RecvUnsignaledSsrcWithSignaledStreamId) { const char kSyncLabel[] = "sync_label"; cricket::StreamParams unsignaled_stream; unsignaled_stream.set_stream_ids({kSyncLabel}); - ASSERT_TRUE(channel_->AddRecvStream(unsignaled_stream)); - channel_->OnDemuxerCriteriaUpdatePending(); - channel_->OnDemuxerCriteriaUpdateComplete(); + ASSERT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(unsignaled_stream)); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); // The stream shouldn't have been created at this point because it doesn't // have any SSRCs. @@ -6551,8 +6597,8 @@ TEST_F(WebRtcVideoChannelTest, RecvUnsignaledSsrcWithSignaledStreamId) { // Reset the unsignaled stream to clear the cache. This deletes the receive // stream. - channel_->ResetUnsignaledRecvStream(); - channel_->OnDemuxerCriteriaUpdatePending(); + channel_->AsReceiveChannel()->ResetUnsignaledRecvStream(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); EXPECT_EQ(0u, fake_call_->GetVideoReceiveStreams().size()); // Until the demuxer criteria has been updated, we ignore in-flight ssrcs of @@ -6563,7 +6609,7 @@ TEST_F(WebRtcVideoChannelTest, RecvUnsignaledSsrcWithSignaledStreamId) { // After the demuxer criteria has been updated, we should proceed to create // unsignalled receive streams. This time when a default video receive stream // is created it won't have a sync_group. - channel_->OnDemuxerCriteriaUpdateComplete(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); ReceivePacketAndAdvanceTime(packet.Buffer(), /* packet_time_us */ -1); EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()); EXPECT_TRUE( @@ -6587,9 +6633,9 @@ TEST_F(WebRtcVideoChannelTest, kIncomingUnsignalledSsrc); // Stream with another SSRC gets signaled. - channel_->ResetUnsignaledRecvStream(); + channel_->AsReceiveChannel()->ResetUnsignaledRecvStream(); constexpr uint32_t kIncomingSignalledSsrc = kIncomingUnsignalledSsrc + 1; - ASSERT_TRUE(channel_->AddRecvStream( + ASSERT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( cricket::StreamParams::CreateLegacy(kIncomingSignalledSsrc))); // New receiver is for the signaled stream. @@ -6604,9 +6650,10 @@ TEST_F(WebRtcVideoChannelTest, const uint32_t kSsrc2 = 2; // Starting point: receiving kSsrc1. - EXPECT_TRUE(channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc1))); - channel_->OnDemuxerCriteriaUpdatePending(); - channel_->OnDemuxerCriteriaUpdateComplete(); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + StreamParams::CreateLegacy(kSsrc1))); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 1u); @@ -6617,7 +6664,7 @@ TEST_F(WebRtcVideoChannelTest, // Emulate a second m= section being created by updating the demuxer criteria // without adding any streams. - channel_->OnDemuxerCriteriaUpdatePending(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); // Emulate there being in-flight packets for kSsrc1 and kSsrc2 arriving before // the demuxer is updated. @@ -6643,7 +6690,7 @@ TEST_F(WebRtcVideoChannelTest, // Signal that the demuxer update is complete. Because there are no more // pending demuxer updates, receiving unknown ssrcs (kSsrc2) should again // result in unsignalled receive streams being created. - channel_->OnDemuxerCriteriaUpdateComplete(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); // Receive packets for kSsrc1 and kSsrc2 again. @@ -6673,10 +6720,12 @@ TEST_F(WebRtcVideoChannelTest, const uint32_t kSsrc2 = 2; // Starting point: receiving kSsrc1 and kSsrc2. - EXPECT_TRUE(channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc1))); - EXPECT_TRUE(channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc2))); - channel_->OnDemuxerCriteriaUpdatePending(); - channel_->OnDemuxerCriteriaUpdateComplete(); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + StreamParams::CreateLegacy(kSsrc1))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + StreamParams::CreateLegacy(kSsrc2))); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 2u); EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc1), 0u); @@ -6684,8 +6733,8 @@ TEST_F(WebRtcVideoChannelTest, // Remove kSsrc1, signal that a demuxer criteria update is pending, but not // completed yet. - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrc1)); - channel_->OnDemuxerCriteriaUpdatePending(); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc1)); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); // We only have a receiver for kSsrc2 now. EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 1u); @@ -6713,7 +6762,7 @@ TEST_F(WebRtcVideoChannelTest, // Signal that the demuxer update is complete. This means we should stop // ignorning kSsrc1. - channel_->OnDemuxerCriteriaUpdateComplete(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); // Receive packets for kSsrc1 and kSsrc2 again. @@ -6741,20 +6790,22 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) { const uint32_t kSsrc = 1; // Starting point: receiving kSsrc. - EXPECT_TRUE(channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc))); - channel_->OnDemuxerCriteriaUpdatePending(); - channel_->OnDemuxerCriteriaUpdateComplete(); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + StreamParams::CreateLegacy(kSsrc))); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); ASSERT_EQ(fake_call_->GetVideoReceiveStreams().size(), 1u); // Remove kSsrc... - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrc)); - channel_->OnDemuxerCriteriaUpdatePending(); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc)); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 0u); // And then add it back again, before the demuxer knows about the new // criteria! - EXPECT_TRUE(channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc))); - channel_->OnDemuxerCriteriaUpdatePending(); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + StreamParams::CreateLegacy(kSsrc))); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 1u); // In-flight packets should arrive because the stream was recreated, even @@ -6767,7 +6818,7 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) { EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 1u); // Signal that the demuxer knows about the first update: the removal. - channel_->OnDemuxerCriteriaUpdateComplete(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); // This still should not prevent in-flight packets from arriving because we @@ -6780,8 +6831,8 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) { EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 2u); // Remove the kSsrc again while previous demuxer updates are still pending. - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrc)); - channel_->OnDemuxerCriteriaUpdatePending(); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc)); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 0u); // Now the packet should be dropped and not create an unsignalled receive @@ -6795,7 +6846,7 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) { EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 2u); // Signal that the demuxer knows about the second update: adding it back. - channel_->OnDemuxerCriteriaUpdateComplete(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); // The packets should continue to be dropped because removal happened after @@ -6809,7 +6860,7 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) { EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 2u); // Signal that the demuxer knows about the last update: the second removal. - channel_->OnDemuxerCriteriaUpdateComplete(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); // If packets still arrive after the demuxer knows about the latest removal we @@ -6832,7 +6883,8 @@ TEST_F(WebRtcVideoChannelTest, UnsignalledSsrcHasACooldown) { // Receive a packet for kSsrc1. RtpPacket packet; packet.SetSsrc(kSsrc1); - channel_->OnPacketReceived(packet.Buffer(), /* packet_time_us */ -1); + channel_->AsReceiveChannel()->OnPacketReceived(packet.Buffer(), + /* packet_time_us */ -1); } rtc::Thread::Current()->ProcessMessages(0); time_controller_.AdvanceTime( @@ -6847,7 +6899,8 @@ TEST_F(WebRtcVideoChannelTest, UnsignalledSsrcHasACooldown) { // Receive a packet for kSsrc2. RtpPacket packet; packet.SetSsrc(kSsrc2); - channel_->OnPacketReceived(packet.Buffer(), /* packet_time_us */ -1); + channel_->AsReceiveChannel()->OnPacketReceived(packet.Buffer(), + /* packet_time_us */ -1); } rtc::Thread::Current()->ProcessMessages(0); @@ -6864,7 +6917,8 @@ TEST_F(WebRtcVideoChannelTest, UnsignalledSsrcHasACooldown) { // Receive a packet for kSsrc2. RtpPacket packet; packet.SetSsrc(kSsrc2); - channel_->OnPacketReceived(packet.Buffer(), /* packet_time_us */ -1); + channel_->AsReceiveChannel()->OnPacketReceived(packet.Buffer(), + /* packet_time_us */ -1); } rtc::Thread::Current()->ProcessMessages(0); @@ -6879,17 +6933,22 @@ TEST_F(WebRtcVideoChannelTest, UnsignalledSsrcHasACooldown) { // Test BaseMinimumPlayoutDelayMs on receive streams. TEST_F(WebRtcVideoChannelTest, BaseMinimumPlayoutDelayMs) { // Test that set won't work for non-existing receive streams. - EXPECT_FALSE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrc + 2, 200)); + EXPECT_FALSE(channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs( + kSsrc + 2, 200)); // Test that get won't work for non-existing receive streams. - EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrc + 2)); + EXPECT_FALSE( + channel_->AsReceiveChannel()->GetBaseMinimumPlayoutDelayMs(kSsrc + 2)); EXPECT_TRUE(AddRecvStream()); // Test that set works for the existing receive stream. - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(last_ssrc_, 200)); + EXPECT_TRUE(channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs( + last_ssrc_, 200)); auto* recv_stream = fake_call_->GetVideoReceiveStream(last_ssrc_); EXPECT_TRUE(recv_stream); EXPECT_EQ(recv_stream->base_mininum_playout_delay_ms(), 200); - EXPECT_EQ(channel_->GetBaseMinimumPlayoutDelayMs(last_ssrc_).value_or(0), + EXPECT_EQ(channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(last_ssrc_) + .value_or(0), 200); } @@ -6899,8 +6958,12 @@ TEST_F(WebRtcVideoChannelTest, BaseMinimumPlayoutDelayMsUnsignaledRecvStream) { const FakeVideoReceiveStream* recv_stream; // Set default stream with SSRC 0 - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(0, 200)); - EXPECT_EQ(200, channel_->GetBaseMinimumPlayoutDelayMs(0).value_or(0)); + EXPECT_TRUE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(0, 200)); + EXPECT_EQ( + 200, + channel_->AsReceiveChannel()->GetBaseMinimumPlayoutDelayMs(0).value_or( + 0)); // Spawn an unsignaled stream by sending a packet, it should inherit // default delay 200. @@ -6910,14 +6973,20 @@ TEST_F(WebRtcVideoChannelTest, BaseMinimumPlayoutDelayMsUnsignaledRecvStream) { recv_stream = fake_call_->GetVideoReceiveStream(kIncomingUnsignalledSsrc); EXPECT_EQ(recv_stream->base_mininum_playout_delay_ms(), 200); - delay_ms = channel_->GetBaseMinimumPlayoutDelayMs(kIncomingUnsignalledSsrc); + delay_ms = channel_->AsReceiveChannel()->GetBaseMinimumPlayoutDelayMs( + kIncomingUnsignalledSsrc); EXPECT_EQ(200, delay_ms.value_or(0)); // Check that now if we change delay for SSRC 0 it will change delay for the // default receiving stream as well. - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(0, 300)); - EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(0).value_or(0)); - delay_ms = channel_->GetBaseMinimumPlayoutDelayMs(kIncomingUnsignalledSsrc); + EXPECT_TRUE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(0, 300)); + EXPECT_EQ( + 300, + channel_->AsReceiveChannel()->GetBaseMinimumPlayoutDelayMs(0).value_or( + 0)); + delay_ms = channel_->AsReceiveChannel()->GetBaseMinimumPlayoutDelayMs( + kIncomingUnsignalledSsrc); EXPECT_EQ(300, delay_ms.value_or(0)); recv_stream = fake_call_->GetVideoReceiveStream(kIncomingUnsignalledSsrc); EXPECT_EQ(recv_stream->base_mininum_playout_delay_ms(), 300); @@ -7139,8 +7208,8 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(kSsrcs3[0], recv_stream0->GetConfig().rtp.remote_ssrc); // Signal the SSRC. - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrcs3[0]))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrcs3[0]))); ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()); recv_stream0 = fake_call_->GetVideoReceiveStreams()[0]; EXPECT_EQ(kSsrcs3[0], recv_stream0->GetConfig().rtp.remote_ssrc); @@ -7185,22 +7254,26 @@ TEST_F(WebRtcVideoChannelTest, CanSetMaxBitrateForExistingStream) { TEST_F(WebRtcVideoChannelTest, CannotSetMaxBitrateForNonexistentStream) { webrtc::RtpParameters nonexistent_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(0u, nonexistent_parameters.encodings.size()); nonexistent_parameters.encodings.push_back(webrtc::RtpEncodingParameters()); - EXPECT_FALSE( - channel_->SetRtpSendParameters(last_ssrc_, nonexistent_parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, nonexistent_parameters) + .ok()); } TEST_F(WebRtcVideoChannelTest, SetLowMaxBitrateOverwritesVideoStreamMinBitrate) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_FALSE(parameters.encodings[0].max_bitrate_bps.has_value()); - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Note that this is testing the behavior of the FakeVideoSendStream, which // also calls to CreateEncoderStreams to get the VideoStreams, so essentially @@ -7212,10 +7285,12 @@ TEST_F(WebRtcVideoChannelTest, // Set a low max bitrate & check that VideoStream.min_bitrate_bps is limited // by this amount. - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); int low_max_bitrate_bps = webrtc::kDefaultMinVideoBitrateBps - 1000; parameters.encodings[0].max_bitrate_bps = low_max_bitrate_bps; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); ASSERT_EQ(1UL, stream->GetVideoStreams().size()); EXPECT_EQ(low_max_bitrate_bps, stream->GetVideoStreams()[0].min_bitrate_bps); @@ -7234,10 +7309,13 @@ TEST_F(WebRtcVideoChannelTest, int high_min_bitrate_bps = stream->GetVideoStreams()[0].max_bitrate_bps + 1; // Set a high min bitrate and check that max_bitrate_bps is adjusted up. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); parameters.encodings[0].min_bitrate_bps = high_min_bitrate_bps; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); ASSERT_EQ(1UL, stream->GetVideoStreams().size()); EXPECT_EQ(high_min_bitrate_bps, stream->GetVideoStreams()[0].min_bitrate_bps); @@ -7258,10 +7336,13 @@ TEST_F(WebRtcVideoChannelTest, // Set min bitrate above global max bitrate and check that min_bitrate_bps is // adjusted down. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); parameters.encodings[0].min_bitrate_bps = 99999 + 1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); ASSERT_EQ(1UL, stream->GetVideoStreams().size()); EXPECT_EQ(send_parameters_.max_bandwidth_bps, stream->GetVideoStreams()[0].min_bitrate_bps); @@ -7272,10 +7353,13 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, SetMaxFramerateOneStream) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_FALSE(parameters.encodings[0].max_framerate.has_value()); - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Note that this is testing the behavior of the FakeVideoSendStream, which // also calls to CreateEncoderStreams to get the VideoStreams, so essentially @@ -7287,9 +7371,11 @@ TEST_F(WebRtcVideoChannelTest, SetMaxFramerateOneStream) { // Set max framerate and check that VideoStream.max_framerate is set. const int kNewMaxFramerate = kDefaultVideoMaxFramerate - 1; - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); parameters.encodings[0].max_framerate = kNewMaxFramerate; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); ASSERT_EQ(1UL, stream->GetVideoStreams().size()); EXPECT_EQ(kNewMaxFramerate, stream->GetVideoStreams()[0].max_framerate); @@ -7298,10 +7384,13 @@ TEST_F(WebRtcVideoChannelTest, SetMaxFramerateOneStream) { TEST_F(WebRtcVideoChannelTest, SetNumTemporalLayersForSingleStream) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_FALSE(parameters.encodings[0].num_temporal_layers.has_value()); - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Note that this is testing the behavior of the FakeVideoSendStream, which // also calls to CreateEncoderStreams to get the VideoStreams, so essentially @@ -7311,9 +7400,11 @@ TEST_F(WebRtcVideoChannelTest, SetNumTemporalLayersForSingleStream) { EXPECT_FALSE(stream->GetVideoStreams()[0].num_temporal_layers.has_value()); // Set temporal layers and check that VideoStream.num_temporal_layers is set. - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); parameters.encodings[0].num_temporal_layers = 2; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); ASSERT_EQ(1UL, stream->GetVideoStreams().size()); EXPECT_EQ(2UL, stream->GetVideoStreams()[0].num_temporal_layers); @@ -7322,13 +7413,18 @@ TEST_F(WebRtcVideoChannelTest, SetNumTemporalLayersForSingleStream) { TEST_F(WebRtcVideoChannelTest, CannotSetRtpSendParametersWithIncorrectNumberOfEncodings) { AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); // Two or more encodings should result in failure. parameters.encodings.push_back(webrtc::RtpEncodingParameters()); - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Zero encodings should also fail. parameters.encodings.clear(); - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); } TEST_F(WebRtcVideoChannelTest, @@ -7337,44 +7433,58 @@ TEST_F(WebRtcVideoChannelTest, StreamParams sp = CreateSimStreamParams("cname", ssrcs); AddSendStream(sp); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); // Additional encodings should result in failure. parameters.encodings.push_back(webrtc::RtpEncodingParameters()); - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Zero encodings should also fail. parameters.encodings.clear(); - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); } // Changing the SSRC through RtpParameters is not allowed. TEST_F(WebRtcVideoChannelTest, CannotSetSsrcInRtpSendParameters) { AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); parameters.encodings[0].ssrc = 0xdeadbeef; - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); } // Tests that when RTCRtpEncodingParameters.bitrate_priority gets set to // a value <= 0, setting the parameters returns false. TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersInvalidBitratePriority) { AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_EQ(webrtc::kDefaultBitratePriority, parameters.encodings[0].bitrate_priority); parameters.encodings[0].bitrate_priority = 0; - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); parameters.encodings[0].bitrate_priority = -2; - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); } // Tests when the the RTCRtpEncodingParameters.bitrate_priority gets set // properly on the VideoChannel and propogates down to the video encoder. TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPriorityOneStream) { AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_EQ(webrtc::kDefaultBitratePriority, parameters.encodings[0].bitrate_priority); @@ -7382,11 +7492,13 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPriorityOneStream) { // Change the value and set it on the VideoChannel. double new_bitrate_priority = 2.0; parameters.encodings[0].bitrate_priority = new_bitrate_priority; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the encoding parameters bitrate_priority is set for the // VideoChannel. - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_EQ(new_bitrate_priority, parameters.encodings[0].bitrate_priority); @@ -7430,17 +7542,19 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPrioritySimulcastStreams) { // Get and set the rtp encoding parameters. webrtc::RtpParameters parameters = - channel_->GetRtpSendParameters(primary_ssrc); + channel_->AsSendChannel()->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_EQ(webrtc::kDefaultBitratePriority, parameters.encodings[0].bitrate_priority); // Change the value and set it on the VideoChannel. double new_bitrate_priority = 2.0; parameters.encodings[0].bitrate_priority = new_bitrate_priority; - EXPECT_TRUE(channel_->SetRtpSendParameters(primary_ssrc, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(primary_ssrc, parameters) + .ok()); // Verify that the encoding parameters priority is set on the VideoChannel. - parameters = channel_->GetRtpSendParameters(primary_ssrc); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_EQ(new_bitrate_priority, parameters.encodings[0].bitrate_priority); @@ -7486,12 +7600,14 @@ TEST_F(WebRtcVideoChannelTest, // Try layers in natural order (smallest to largest). { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 4.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 1.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7508,12 +7624,14 @@ TEST_F(WebRtcVideoChannelTest, // Try layers in reverse natural order (largest to smallest). { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7530,12 +7648,14 @@ TEST_F(WebRtcVideoChannelTest, // Try layers in mixed order. { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 10.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7552,12 +7672,14 @@ TEST_F(WebRtcVideoChannelTest, // Try with a missing scale setting, defaults to 1.0 if any other is set. { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; rtp_parameters.encodings[1].scale_resolution_down_by.reset(); rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7595,13 +7717,14 @@ TEST_F(WebRtcVideoChannelTest, channel_->SetSend(true); // Set `scale_resolution_down_by`'s. - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(rtp_parameters.encodings.size(), 3u); rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - const auto result = - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + const auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); // Use a capture resolution whose width and height are not divisible by 2^3. @@ -7642,12 +7765,14 @@ TEST_F(WebRtcVideoChannelTest, // Try layers in natural order (smallest to largest). { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 4.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 1.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7664,12 +7789,14 @@ TEST_F(WebRtcVideoChannelTest, // Try layers in reverse natural order (largest to smallest). { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7686,12 +7813,14 @@ TEST_F(WebRtcVideoChannelTest, // Try layers in mixed order. { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 10.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7708,12 +7837,14 @@ TEST_F(WebRtcVideoChannelTest, // Try with a missing scale setting, defaults to 1.0 if any other is set. { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; rtp_parameters.encodings[1].scale_resolution_down_by.reset(); rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7751,13 +7882,14 @@ TEST_F(WebRtcVideoChannelTest, channel_->SetSend(true); // Set `scale_resolution_down_by`'s. - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(rtp_parameters.encodings.size(), 3u); rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - const auto result = - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + const auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); // Use a capture resolution whose width and height are not divisible by 2^3. @@ -7786,7 +7918,8 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMaxFramerate) { SetUpSimulcast(true, false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); for (const auto& encoding : parameters.encodings) { EXPECT_FALSE(encoding.max_framerate); @@ -7796,10 +7929,12 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMaxFramerate) { parameters.encodings[0].max_framerate = 10; parameters.encodings[1].max_framerate = 20; parameters.encodings[2].max_framerate = 25; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the bitrates are set on the VideoChannel. - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_EQ(10, parameters.encodings[0].max_framerate); EXPECT_EQ(20, parameters.encodings[1].max_framerate); @@ -7812,16 +7947,21 @@ TEST_F(WebRtcVideoChannelTest, SetUpSimulcast(true, false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); // Num temporal layers should be in the range [1, kMaxTemporalStreams]. parameters.encodings[0].num_temporal_layers = 0; EXPECT_EQ(webrtc::RTCErrorType::INVALID_RANGE, - channel_->SetRtpSendParameters(last_ssrc_, parameters).type()); + channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .type()); parameters.encodings[0].num_temporal_layers = webrtc::kMaxTemporalStreams + 1; EXPECT_EQ(webrtc::RTCErrorType::INVALID_RANGE, - channel_->SetRtpSendParameters(last_ssrc_, parameters).type()); + channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .type()); } TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersNumTemporalLayers) { @@ -7829,7 +7969,8 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersNumTemporalLayers) { SetUpSimulcast(true, false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); for (const auto& encoding : parameters.encodings) EXPECT_FALSE(encoding.num_temporal_layers); @@ -7838,10 +7979,12 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersNumTemporalLayers) { parameters.encodings[0].num_temporal_layers = 3; parameters.encodings[1].num_temporal_layers = 3; parameters.encodings[2].num_temporal_layers = 3; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the number of temporal layers are set on the VideoChannel. - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_EQ(3, parameters.encodings[0].num_temporal_layers); EXPECT_EQ(3, parameters.encodings[1].num_temporal_layers); @@ -7861,12 +8004,15 @@ TEST_F(WebRtcVideoChannelTest, NumTemporalLayersPropagatedToEncoder) { // Get and set the rtp encoding parameters. // Change the value and set it on the VideoChannel. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].num_temporal_layers = 3; parameters.encodings[1].num_temporal_layers = 2; parameters.encodings[2].num_temporal_layers = 1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the new value is propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. @@ -7886,7 +8032,9 @@ TEST_F(WebRtcVideoChannelTest, NumTemporalLayersPropagatedToEncoder) { EXPECT_EQ(1UL, stream->GetVideoStreams()[2].num_temporal_layers); // No parameter changed, encoder should not be reconfigured. - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_EQ(2, stream->num_encoder_reconfigurations()); EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); @@ -7906,11 +8054,14 @@ TEST_F(WebRtcVideoChannelTest, frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Change rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].num_temporal_layers = 2; parameters.encodings[2].num_temporal_layers = 1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that no value is propagated down to the encoder. webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); @@ -7946,11 +8097,14 @@ TEST_F(WebRtcVideoChannelTest, // Get and set the rtp encoding parameters. // Change the value and set it on the VideoChannel. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].max_framerate = 15; parameters.encodings[2].max_framerate = 20; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the new value propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. @@ -7978,7 +8132,8 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMinAndMaxBitrate) { SetUpSimulcast(true, false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); for (const auto& encoding : parameters.encodings) { EXPECT_FALSE(encoding.min_bitrate_bps); @@ -7992,10 +8147,12 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMinAndMaxBitrate) { parameters.encodings[1].max_bitrate_bps = 400000; parameters.encodings[2].min_bitrate_bps = 500000; parameters.encodings[2].max_bitrate_bps = 600000; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the bitrates are set on the VideoChannel. - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_EQ(100000, parameters.encodings[0].min_bitrate_bps); EXPECT_EQ(200000, parameters.encodings[0].max_bitrate_bps); @@ -8010,14 +8167,17 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersFailsWithIncorrectBitrate) { SetUpSimulcast(true, false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); // Max bitrate lower than min bitrate should fail. parameters.encodings[2].min_bitrate_bps = 100000; parameters.encodings[2].max_bitrate_bps = 100000 - 1; EXPECT_EQ(webrtc::RTCErrorType::INVALID_RANGE, - channel_->SetRtpSendParameters(last_ssrc_, parameters).type()); + channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .type()); } // Test that min and max bitrate values set via RtpParameters are correctly @@ -8037,7 +8197,8 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxSimulcastBitratePropagatedToEncoder) { // Get and set the rtp encoding parameters. // Change the value and set it on the VideoChannel. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].min_bitrate_bps = 100000; parameters.encodings[0].max_bitrate_bps = 200000; @@ -8045,7 +8206,9 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxSimulcastBitratePropagatedToEncoder) { parameters.encodings[1].max_bitrate_bps = 400000; parameters.encodings[2].min_bitrate_bps = 500000; parameters.encodings[2].max_bitrate_bps = 600000; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the new value propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. @@ -8077,7 +8240,9 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxSimulcastBitratePropagatedToEncoder) { EXPECT_EQ(600000, stream->GetVideoStreams()[2].max_bitrate_bps); // No parameter changed, encoder should not be reconfigured. - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_EQ(2, stream->num_encoder_reconfigurations()); EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); @@ -8099,7 +8264,8 @@ TEST_F(WebRtcVideoChannelTest, MinOrMaxSimulcastBitratePropagatedToEncoder) { frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); // Change the value and set it on the VideoChannel. @@ -8109,7 +8275,9 @@ TEST_F(WebRtcVideoChannelTest, MinOrMaxSimulcastBitratePropagatedToEncoder) { // Layer 1: only configure max bitrate. const int kMaxBpsLayer1 = kDefault[1].max_bitrate_bps - 1; parameters.encodings[1].max_bitrate_bps = kMaxBpsLayer1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the new value propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. @@ -8168,7 +8336,8 @@ TEST_F(WebRtcVideoChannelTest, SetMinAndMaxSimulcastBitrateAboveBelowDefault) { frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); // Change the value and set it on the VideoChannel. @@ -8178,7 +8347,9 @@ TEST_F(WebRtcVideoChannelTest, SetMinAndMaxSimulcastBitrateAboveBelowDefault) { // For layer 1, set the max bitrate below the default min. const int kMaxBpsLayer1 = kDefault[1].min_bitrate_bps - 1; parameters.encodings[1].max_bitrate_bps = kMaxBpsLayer1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the new value propagated down to the encoder. // FakeVideoSendStream calls CreateEncoderStreams, test that the vector of @@ -8216,11 +8387,14 @@ TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateGivenToMaxLayer) { frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Set max bitrate for all but the highest layer. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = kDefault[0].max_bitrate_bps; parameters.encodings[1].max_bitrate_bps = kDefault[1].max_bitrate_bps; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Set max bandwidth equal to total max bitrate. send_parameters_.max_bandwidth_bps = @@ -8265,10 +8439,13 @@ TEST_F(WebRtcVideoChannelTest, frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Set max bitrate for the highest layer. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[2].max_bitrate_bps = kDefault[2].max_bitrate_bps; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Set max bandwidth above the total max bitrate. send_parameters_.max_bandwidth_bps = @@ -8293,11 +8470,14 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxBitratePropagatedToEncoder) { EXPECT_TRUE(stream->IsSending()); // Set min and max bitrate. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1u, parameters.encodings.size()); parameters.encodings[0].min_bitrate_bps = 80000; parameters.encodings[0].max_bitrate_bps = 150000; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); @@ -8348,16 +8528,21 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersOneEncodingActive) { EXPECT_TRUE(stream->IsSending()); // Get current parameters and change "active" to false. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(1u, parameters.encodings.size()); ASSERT_TRUE(parameters.encodings[0].active); parameters.encodings[0].active = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_FALSE(stream->IsSending()); // Now change it back to active and verify we resume sending. parameters.encodings[0].active = true; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_TRUE(stream->IsSending()); } @@ -8385,7 +8570,7 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersMultipleEncodingsActive) { // Check that all encodings are initially active. webrtc::RtpParameters parameters = - channel_->GetRtpSendParameters(primary_ssrc); + channel_->AsSendChannel()->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_TRUE(parameters.encodings[0].active); EXPECT_TRUE(parameters.encodings[1].active); @@ -8396,9 +8581,11 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersMultipleEncodingsActive) { parameters.encodings[0].active = false; parameters.encodings[1].active = true; parameters.encodings[2].active = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(primary_ssrc, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(primary_ssrc, parameters) + .ok()); // Verify that the active fields are set on the VideoChannel. - parameters = channel_->GetRtpSendParameters(primary_ssrc); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_FALSE(parameters.encodings[0].active); EXPECT_TRUE(parameters.encodings[1].active); @@ -8417,9 +8604,11 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersMultipleEncodingsActive) { parameters.encodings[0].active = false; parameters.encodings[1].active = false; parameters.encodings[2].active = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(primary_ssrc, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(primary_ssrc, parameters) + .ok()); // Verify that the active fields are set on the VideoChannel. - parameters = channel_->GetRtpSendParameters(primary_ssrc); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_FALSE(parameters.encodings[0].active); EXPECT_FALSE(parameters.encodings[1].active); @@ -8459,7 +8648,7 @@ TEST_F(WebRtcVideoChannelTest, // Check that all encodings are initially active. webrtc::RtpParameters parameters = - channel_->GetRtpSendParameters(primary_ssrc); + channel_->AsSendChannel()->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_TRUE(parameters.encodings[0].active); EXPECT_TRUE(parameters.encodings[1].active); @@ -8470,7 +8659,9 @@ TEST_F(WebRtcVideoChannelTest, parameters.encodings[0].active = false; parameters.encodings[1].active = false; parameters.encodings[2].active = true; - EXPECT_TRUE(channel_->SetRtpSendParameters(primary_ssrc, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(primary_ssrc, parameters) + .ok()); // Check that the VideoSendStream is updated appropriately. This means its // send state was updated and it was reconfigured. @@ -8503,13 +8694,16 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_TRUE(stream->IsSending()); // Get current parameters and change "active" to false. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(1u, parameters.encodings.size()); ASSERT_TRUE(parameters.encodings[0].active); parameters.encodings[0].active = false; EXPECT_EQ(1u, GetFakeSendStreams().size()); EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams()); - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_FALSE(stream->IsSending()); // Reorder the codec list, causing the stream to be reconfigured. @@ -8537,7 +8731,7 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersCodecs) { EXPECT_TRUE(channel_->SetSendParameters(parameters)); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(2u, rtp_parameters.codecs.size()); EXPECT_EQ(GetEngineCodec("VP8").ToCodecParameters(), rtp_parameters.codecs[0]); @@ -8551,7 +8745,8 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersRtcpCname) { params.cname = "rtcpcname"; AddSendStream(params); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrc); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrc); EXPECT_STREQ("rtcpcname", rtp_parameters.rtcp.cname.c_str()); } @@ -8561,7 +8756,7 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersSsrc) { AddSendStream(); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_EQ(last_ssrc_, rtp_parameters.encodings[0].ssrc); } @@ -8570,13 +8765,13 @@ TEST_F(WebRtcVideoChannelTest, DetectRtpSendParameterHeaderExtensionsChange) { AddSendStream(); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); rtp_parameters.header_extensions.emplace_back(); EXPECT_NE(0u, rtp_parameters.header_extensions.size()); - webrtc::RTCError result = - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + webrtc::RTCError result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); EXPECT_EQ(webrtc::RTCErrorType::INVALID_MODIFICATION, result.type()); } @@ -8587,15 +8782,17 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersDegradationPreference) { EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_FALSE(rtp_parameters.degradation_preference.has_value()); rtp_parameters.degradation_preference = webrtc::DegradationPreference::MAINTAIN_FRAMERATE; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, rtp_parameters) + .ok()); webrtc::RtpParameters updated_rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(updated_rtp_parameters.degradation_preference, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -8612,13 +8809,16 @@ TEST_F(WebRtcVideoChannelTest, SetAndGetRtpSendParameters) { EXPECT_TRUE(channel_->SetSendParameters(parameters)); webrtc::RtpParameters initial_params = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); // We should be able to set the params we just got. - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, initial_params).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, initial_params) + .ok()); // ... And this shouldn't change the params returned by GetRtpSendParameters. - EXPECT_EQ(initial_params, channel_->GetRtpSendParameters(last_ssrc_)); + EXPECT_EQ(initial_params, + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_)); } // Test that GetRtpReceiveParameters returns the currently configured codecs. @@ -8757,7 +8957,7 @@ TEST_F(WebRtcVideoChannelTest, cricket::StreamParams params = cricket::StreamParams::CreateLegacy(1); params.AddFidSsrc(1, 2); - EXPECT_TRUE(channel_->AddRecvStream(params)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(params)); } void WebRtcVideoChannelTest::TestReceiverLocalSsrcConfiguration( @@ -8789,13 +8989,13 @@ void WebRtcVideoChannelTest::TestReceiverLocalSsrcConfiguration( // Removing first sender should fall back to another (in this case the second) // local send stream's SSRC. AddSendStream(StreamParams::CreateLegacy(kSecondSenderSsrc)); - ASSERT_TRUE(channel_->RemoveSendStream(kSenderSsrc)); + ASSERT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSenderSsrc)); receive_streams = fake_call_->GetVideoReceiveStreams(); ASSERT_EQ(1u, receive_streams.size()); EXPECT_EQ(kSecondSenderSsrc, receive_streams[0]->GetConfig().rtp.local_ssrc); // Removing the last sender should fall back to default local SSRC. - ASSERT_TRUE(channel_->RemoveSendStream(kSecondSenderSsrc)); + ASSERT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSecondSenderSsrc)); receive_streams = fake_call_->GetVideoReceiveStreams(); ASSERT_EQ(1u, receive_streams.size()); EXPECT_EQ(kExpectedDefaultReceiverSsrc, @@ -8841,14 +9041,16 @@ TEST_F(WebRtcVideoChannelTest, FakeVideoSendStream* stream = SetUpSimulcast(true, false); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); ASSERT_TRUE(rtp_parameters.encodings[0].active); ASSERT_TRUE(rtp_parameters.encodings[1].active); ASSERT_TRUE(rtp_parameters.encodings[2].active); rtp_parameters.encodings[0].active = false; rtp_parameters.encodings[1].active = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, rtp_parameters) + .ok()); EXPECT_TRUE(stream->GetEncoderConfig().is_quality_scaling_allowed); } @@ -8873,7 +9075,7 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { channel_.reset(engine_.CreateMediaChannel( &fake_call_, GetMediaConfig(), VideoOptions(), webrtc::CryptoOptions(), mock_rate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); last_ssrc_ = 123; } @@ -8912,7 +9114,8 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - auto rtp_parameters = channel_->GetRtpSendParameters(kSsrcs3[0]); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcs3[0]); EXPECT_EQ(num_configured_streams, rtp_parameters.encodings.size()); std::vector video_streams = stream->GetVideoStreams(); @@ -8988,7 +9191,7 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { FakeVideoSendStream* AddSendStream(const StreamParams& sp) { size_t num_streams = fake_call_.GetVideoSendStreams().size(); - EXPECT_TRUE(channel_->AddSendStream(sp)); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); std::vector streams = fake_call_.GetVideoSendStreams(); EXPECT_EQ(num_streams + 1, streams.size()); @@ -9005,7 +9208,7 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { FakeVideoReceiveStream* AddRecvStream(const StreamParams& sp) { size_t num_streams = fake_call_.GetVideoReceiveStreams().size(); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); std::vector streams = fake_call_.GetVideoReceiveStreams(); EXPECT_EQ(num_streams + 1, streams.size()); @@ -9105,7 +9308,7 @@ TEST_F(WebRtcVideoChannelTest, SetsRidsOnSendStream) { } sp.set_rids(rid_descriptions); - ASSERT_TRUE(channel_->AddSendStream(sp)); + ASSERT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); const auto& streams = fake_call_->GetVideoSendStreams(); ASSERT_EQ(1u, streams.size()); auto stream = streams[0]; @@ -9155,11 +9358,11 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecast) { { // TEST requested_resolution < frame size webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 640, .height = 360}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -9170,11 +9373,12 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecast) { } { // TEST requested_resolution == frame size - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 1280, .height = 720}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); auto streams = stream->GetVideoStreams(); @@ -9184,11 +9388,12 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecast) { } { // TEST requested_resolution > frame size - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 2 * 1280, .height = 2 * 720}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); auto streams = stream->GetVideoStreams(); @@ -9212,11 +9417,12 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecastCropping) { EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 720, .height = 720}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -9227,11 +9433,12 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecastCropping) { } { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 1280, .height = 1280}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -9242,11 +9449,12 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecastCropping) { } { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 650, .height = 650}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); auto streams = stream->GetVideoStreams(); ASSERT_EQ(streams.size(), 1u); @@ -9270,7 +9478,7 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { { webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(3UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 320, .height = 180}; @@ -9278,7 +9486,7 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { .height = 360}; rtp_parameters.encodings[2].requested_resolution = {.width = 1280, .height = 720}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -9292,7 +9500,7 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { { webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(3UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 320, .height = 180}; @@ -9300,7 +9508,7 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { rtp_parameters.encodings[2].requested_resolution = {.width = 1280, .height = 720}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -9313,7 +9521,7 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { { webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(3UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 320, .height = 180}; @@ -9322,7 +9530,7 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { .height = 360}; rtp_parameters.encodings[2].requested_resolution = {.width = 960, .height = 540}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); diff --git a/third_party/libwebrtc/media/engine/webrtc_voice_engine.cc b/third_party/libwebrtc/media/engine/webrtc_voice_engine.cc index ef729f4f8724..b6296a199362 100644 --- a/third_party/libwebrtc/media/engine/webrtc_voice_engine.cc +++ b/third_party/libwebrtc/media/engine/webrtc_voice_engine.cc @@ -789,19 +789,19 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream void SetSendCodecSpec( const webrtc::AudioSendStream::Config::SendCodecSpec& send_codec_spec) { UpdateSendCodecSpec(send_codec_spec); - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } void SetRtpExtensions(const std::vector& extensions) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.rtp.extensions = extensions; rtp_parameters_.header_extensions = extensions; - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } void SetExtmapAllowMixed(bool extmap_allow_mixed) { config_.rtp.extmap_allow_mixed = extmap_allow_mixed; - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } void SetMid(const std::string& mid) { @@ -810,14 +810,14 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream return; } config_.rtp.mid = mid; - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } void SetFrameEncryptor( rtc::scoped_refptr frame_encryptor) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.frame_encryptor = frame_encryptor; - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } void SetAudioNetworkAdaptorConfig( @@ -830,7 +830,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream audio_network_adaptor_config_from_options_ = audio_network_adaptor_config; UpdateAudioNetworkAdaptorConfig(); UpdateAllowedBitrateRange(); - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } bool SetMaxSendBitrate(int bps) { @@ -848,7 +848,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream if (send_rate != config_.send_codec_spec->target_bitrate_bps) { config_.send_codec_spec->target_bitrate_bps = send_rate; - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } return true; } @@ -958,11 +958,12 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream return rtp_parameters_; } - webrtc::RTCError SetRtpParameters(const webrtc::RtpParameters& parameters) { + webrtc::RTCError SetRtpParameters(const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback) { webrtc::RTCError error = CheckRtpParametersInvalidModificationAndValues( rtp_parameters_, parameters); if (!error.ok()) { - return error; + return webrtc::InvokeSetParametersCallback(callback, error); } absl::optional send_rate; @@ -971,7 +972,8 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream parameters.encodings[0].max_bitrate_bps, *audio_codec_spec_); if (!send_rate) { - return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR); + return webrtc::InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); } } @@ -1001,7 +1003,9 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream // used. UpdateAudioNetworkAdaptorConfig(); UpdateAllowedBitrateRange(); - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(std::move(callback)); + } else { + webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); } rtp_parameters_.rtcp.cname = config_.rtp.c_name; @@ -1016,7 +1020,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream rtc::scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.frame_transformer = std::move(frame_transformer); - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } private: @@ -1037,7 +1041,6 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream // - a reasonable default of 32kbps min/max // - fixed target bitrate from codec spec // - lower min bitrate if adaptive ptime is enabled - // - bitrate configured in the rtp_parameter encodings settings const int kDefaultBitrateBps = 32000; config_.min_bitrate_bps = kDefaultBitrateBps; config_.max_bitrate_bps = kDefaultBitrateBps; @@ -1053,13 +1056,6 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream config_.min_bitrate_bps, static_cast(adaptive_ptime_config_.min_encoder_bitrate.bps())); } - - if (rtp_parameters_.encodings[0].min_bitrate_bps) { - config_.min_bitrate_bps = *rtp_parameters_.encodings[0].min_bitrate_bps; - } - if (rtp_parameters_.encodings[0].max_bitrate_bps) { - config_.max_bitrate_bps = *rtp_parameters_.encodings[0].max_bitrate_bps; - } } void UpdateSendCodecSpec( @@ -1106,10 +1102,10 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream audio_network_adaptor_config_from_options_; } - void ReconfigureAudioSendStream() { + void ReconfigureAudioSendStream(webrtc::SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); - stream_->Reconfigure(config_); + stream_->Reconfigure(config_, std::move(callback)); } int NumPreferredChannels() const override { return num_encoded_channels_; } @@ -1389,14 +1385,16 @@ webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpSendParameters( webrtc::RTCError WebRtcVoiceMediaChannel::SetRtpSendParameters( uint32_t ssrc, - const webrtc::RtpParameters& parameters) { + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback) { RTC_DCHECK_RUN_ON(worker_thread_); auto it = send_streams_.find(ssrc); if (it == send_streams_.end()) { RTC_LOG(LS_WARNING) << "Attempting to set RTP send parameters for stream " "with ssrc " << ssrc << " which doesn't exist."; - return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR); + return webrtc::InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); } // TODO(deadbeef): Handle setting parameters with a list of codecs in a @@ -1405,7 +1403,8 @@ webrtc::RTCError WebRtcVoiceMediaChannel::SetRtpSendParameters( if (current_parameters.codecs != parameters.codecs) { RTC_DLOG(LS_ERROR) << "Using SetParameters to change the set of codecs " "is not currently supported."; - return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_PARAMETER); + return webrtc::InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); } if (!parameters.encodings.empty()) { @@ -1440,7 +1439,7 @@ webrtc::RTCError WebRtcVoiceMediaChannel::SetRtpSendParameters( // Codecs are handled at the WebRtcVoiceMediaChannel level. webrtc::RtpParameters reduced_params = parameters; reduced_params.codecs.clear(); - return it->second->SetRtpParameters(reduced_params); + return it->second->SetRtpParameters(reduced_params, std::move(callback)); } webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpReceiveParameters( diff --git a/third_party/libwebrtc/media/engine/webrtc_voice_engine.h b/third_party/libwebrtc/media/engine/webrtc_voice_engine.h index 0a501bea0a1d..daa964a655d5 100644 --- a/third_party/libwebrtc/media/engine/webrtc_voice_engine.h +++ b/third_party/libwebrtc/media/engine/webrtc_voice_engine.h @@ -156,7 +156,8 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override; webrtc::RTCError SetRtpSendParameters( uint32_t ssrc, - const webrtc::RtpParameters& parameters) override; + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback) override; webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const override; webrtc::RtpParameters GetDefaultRtpReceiveParameters() const override; diff --git a/third_party/libwebrtc/media/engine/webrtc_voice_engine_unittest.cc b/third_party/libwebrtc/media/engine/webrtc_voice_engine_unittest.cc index 9644fbdae2f9..2c638be7a4a5 100644 --- a/third_party/libwebrtc/media/engine/webrtc_voice_engine_unittest.cc +++ b/third_party/libwebrtc/media/engine/webrtc_voice_engine_unittest.cc @@ -29,6 +29,7 @@ #include "media/base/media_constants.h" #include "media/engine/fake_webrtc_call.h" #include "modules/audio_device/include/mock_audio_device.h" +#include "modules/audio_mixer/audio_mixer_impl.h" #include "modules/audio_processing/include/mock_audio_processing.h" #include "rtc_base/arraysize.h" #include "rtc_base/byte_order.h" @@ -53,7 +54,6 @@ using webrtc::BitrateConstraints; constexpr uint32_t kMaxUnsignaledRecvStreams = 4; const cricket::AudioCodec kPcmuCodec(0, "PCMU", 8000, 64000, 1); -const cricket::AudioCodec kIsacCodec(103, "ISAC", 16000, 32000, 1); const cricket::AudioCodec kOpusCodec(111, "opus", 48000, 32000, 2); const cricket::AudioCodec kG722CodecVoE(9, "G722", 16000, 64000, 1); const cricket::AudioCodec kG722CodecSdp(9, "G722", 8000, 64000, 1); @@ -247,7 +247,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { if (!SetupChannel()) { return false; } - if (!channel_->AddSendStream(sp)) { + if (!channel_->AsSendChannel()->AddSendStream(sp)) { return false; } if (!use_null_apm_) { @@ -258,21 +258,23 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { bool AddRecvStream(uint32_t ssrc) { EXPECT_TRUE(channel_); - return channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc)); + return channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(ssrc)); } void SetupForMultiSendStream() { EXPECT_TRUE(SetupSendStream()); // Remove stream added in Setup. EXPECT_TRUE(call_.GetAudioSendStream(kSsrcX)); - EXPECT_TRUE(channel_->RemoveSendStream(kSsrcX)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSsrcX)); // Verify the channel does not exist. EXPECT_FALSE(call_.GetAudioSendStream(kSsrcX)); } void DeliverPacket(const void* data, int len) { rtc::CopyOnWriteBuffer packet(reinterpret_cast(data), len); - channel_->OnPacketReceived(packet, /* packet_time_us */ -1); + channel_->AsReceiveChannel()->OnPacketReceived(packet, + /* packet_time_us */ -1); rtc::Thread::Current()->ProcessMessages(0); } @@ -338,8 +340,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { if (caller) { // If this is a caller, local description will be applied and add the // send stream. - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); } // Test we can only InsertDtmf when the other side supports telephone-event. @@ -354,8 +356,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { if (!caller) { // If this is callee, there's no active send channel yet. EXPECT_FALSE(channel_->InsertDtmf(ssrc, 2, 123)); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); } // Check we fail if the ssrc is invalid. @@ -377,8 +379,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // For a caller, the answer will be applied in set remote description // where SetSendParameters() is called. EXPECT_TRUE(SetupChannel()); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); send_parameters_.extmap_allow_mixed = extmap_allow_mixed; SetSendParameters(send_parameters_); const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX); @@ -390,8 +392,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // where SetExtmapAllowMixed() and AddSendStream() are called. EXPECT_TRUE(SetupChannel()); channel_->SetExtmapAllowMixed(extmap_allow_mixed); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX); EXPECT_EQ(extmap_allow_mixed, config.rtp.extmap_allow_mixed); @@ -419,11 +421,14 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // Sets the per-stream maximum bitrate limit for the specified SSRC. bool SetMaxBitrateForStream(int32_t ssrc, int bitrate) { - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(ssrc); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(ssrc); EXPECT_EQ(1UL, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = bitrate; - return channel_->SetRtpSendParameters(ssrc, parameters).ok(); + return channel_->AsSendChannel() + ->SetRtpSendParameters(ssrc, parameters) + .ok(); } void SetGlobalMaxBitrate(const cricket::AudioCodec& codec, int bitrate) { @@ -445,6 +450,10 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { return GetSendStreamConfig(ssrc).send_codec_spec->target_bitrate_bps; } + int GetMaxBitrate(int32_t ssrc) { + return GetSendStreamConfig(ssrc).max_bitrate_bps; + } + const absl::optional& GetAudioNetworkAdaptorConfig( int32_t ssrc) { return GetSendStreamConfig(ssrc).audio_network_adaptor_config; @@ -465,13 +474,14 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // Verify that reading back the parameters gives results // consistent with the Set() result. webrtc::RtpParameters resulting_parameters = - channel_->GetRtpSendParameters(kSsrcX); + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); EXPECT_EQ(1UL, resulting_parameters.encodings.size()); EXPECT_EQ(expected_result ? stream_max : -1, resulting_parameters.encodings[0].max_bitrate_bps); // Verify that the codec settings have the expected bitrate. EXPECT_EQ(expected_codec_bitrate, GetCodecBitrate(kSsrcX)); + EXPECT_EQ(expected_codec_bitrate, GetMaxBitrate(kSsrcX)); } void SetSendCodecsShouldWorkForBitrates(const char* min_bitrate_kbps, @@ -525,8 +535,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { EXPECT_EQ(id, GetSendStreamConfig(kSsrcX).rtp.extensions[0].id); // Ensure extension is set properly on new stream. - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcY))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcY))); EXPECT_NE(call_.GetAudioSendStream(kSsrcX), call_.GetAudioSendStream(kSsrcY)); EXPECT_EQ(1u, GetSendStreamConfig(kSsrcY).rtp.extensions.size()); @@ -811,8 +821,8 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateMediaChannel) { // Test that we can add a send stream and that it has the correct defaults. TEST_P(WebRtcVoiceEngineTestFake, CreateSendStream) { EXPECT_TRUE(SetupChannel()); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX); EXPECT_EQ(kSsrcX, config.rtp.ssrc); EXPECT_EQ("", config.rtp.c_name); @@ -852,7 +862,7 @@ TEST_P(WebRtcVoiceEngineTestFake, OpusSupportsTransportCc) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecs) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kTelephoneEventCodec1); parameters.codecs.push_back(kTelephoneEventCodec2); @@ -863,7 +873,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecs) { EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( {{0, {"PCMU", 8000, 1}}, - {106, {"ISAC", 16000, 1}}, + {106, {"OPUS", 48000, 2}}, {126, {"telephone-event", 8000, 1}}, {107, {"telephone-event", 32000, 1}}}))); } @@ -872,7 +882,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecs) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsUnsupportedCodec) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(cricket::AudioCodec(127, "XYZ", 32000, 0, 1)); EXPECT_FALSE(channel_->SetRecvParameters(parameters)); } @@ -881,9 +891,9 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsUnsupportedCodec) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsDuplicatePayloadType) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kCn16000Codec); - parameters.codecs[1].id = kIsacCodec.id; + parameters.codecs[1].id = kOpusCodec.id; EXPECT_FALSE(channel_->SetRecvParameters(parameters)); } @@ -891,32 +901,27 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsDuplicatePayloadType) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpusNoStereo) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kOpusCodec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( - {{0, {"PCMU", 8000, 1}}, - {103, {"ISAC", 16000, 1}}, - {111, {"opus", 48000, 2}}}))); + {{0, {"PCMU", 8000, 1}}, {111, {"opus", 48000, 2}}}))); } // Test that we can decode OPUS with stereo = 0. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus0Stereo) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kOpusCodec); - parameters.codecs[2].params["stereo"] = "0"; + parameters.codecs[1].params["stereo"] = "0"; EXPECT_TRUE(channel_->SetRecvParameters(parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( {{0, {"PCMU", 8000, 1}}, - {103, {"ISAC", 16000, 1}}, {111, {"opus", 48000, 2, {{"stereo", "0"}}}}}))); } @@ -924,16 +929,14 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus0Stereo) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus1Stereo) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kOpusCodec); - parameters.codecs[2].params["stereo"] = "1"; + parameters.codecs[1].params["stereo"] = "1"; EXPECT_TRUE(channel_->SetRecvParameters(parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( {{0, {"PCMU", 8000, 1}}, - {103, {"ISAC", 16000, 1}}, {111, {"opus", 48000, 2, {{"stereo", "1"}}}}}))); } @@ -941,7 +944,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus1Stereo) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithMultipleStreams) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kTelephoneEventCodec1); parameters.codecs.push_back(kTelephoneEventCodec2); @@ -953,7 +956,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithMultipleStreams) { EXPECT_THAT(GetRecvStreamConfig(ssrc).decoder_map, (ContainerEq>( {{0, {"PCMU", 8000, 1}}, - {106, {"ISAC", 16000, 1}}, + {106, {"OPUS", 48000, 2}}, {126, {"telephone-event", 8000, 1}}, {107, {"telephone-event", 32000, 1}}}))); } @@ -962,20 +965,20 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithMultipleStreams) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsAfterAddingStreams) { EXPECT_TRUE(SetupRecvStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].id = 106; // collide with existing CN 32k EXPECT_TRUE(channel_->SetRecvParameters(parameters)); const auto& dm = GetRecvStreamConfig(kSsrcX).decoder_map; ASSERT_EQ(1u, dm.count(106)); - EXPECT_EQ(webrtc::SdpAudioFormat("isac", 16000, 1), dm.at(106)); + EXPECT_EQ(webrtc::SdpAudioFormat("opus", 48000, 2), dm.at(106)); } // Test that we can apply the same set of codecs again while playing. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWhilePlaying) { EXPECT_TRUE(SetupRecvStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); channel_->SetPlayout(true); @@ -983,7 +986,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWhilePlaying) { // Remapping a payload type to a different codec should fail. parameters.codecs[0] = kOpusCodec; - parameters.codecs[0].id = kIsacCodec.id; + parameters.codecs[0].id = kPcmuCodec.id; EXPECT_FALSE(channel_->SetRecvParameters(parameters)); EXPECT_TRUE(GetRecvStream(kSsrcX).started()); } @@ -992,7 +995,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWhilePlaying) { TEST_P(WebRtcVoiceEngineTestFake, AddRecvCodecsWhilePlaying) { EXPECT_TRUE(SetupRecvStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); channel_->SetPlayout(true); @@ -1007,7 +1010,7 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvCodecsWhilePlaying) { TEST_P(WebRtcVoiceEngineTestFake, ChangeRecvCodecPayloadType) { EXPECT_TRUE(SetupRecvStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); ++parameters.codecs[0].id; @@ -1035,9 +1038,6 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendBandwidthAuto) { // value. autobw is enabled for the following tests because the target // bitrate is <= 0. - // ISAC, default bitrate == 32000. - TestMaxSendBandwidth(kIsacCodec, 0, true, 32000); - // PCMU, default bitrate == 64000. TestMaxSendBandwidth(kPcmuCodec, -1, true, 64000); @@ -1048,11 +1048,6 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendBandwidthAuto) { TEST_P(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthMultiRateAsCaller) { EXPECT_TRUE(SetupSendStream()); - // ISAC, default bitrate == 32000. - TestMaxSendBandwidth(kIsacCodec, 16000, true, 16000); - // Rates above the max (56000) should be capped. - TestMaxSendBandwidth(kIsacCodec, 100000, true, 32000); - // opus, default bitrate == 64000. TestMaxSendBandwidth(kOpusCodec, 96000, true, 96000); TestMaxSendBandwidth(kOpusCodec, 48000, true, 48000); @@ -1084,8 +1079,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthMultiRateAsCallee) { parameters.max_bandwidth_bps = kDesiredBitrate; SetSendParameters(parameters); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); EXPECT_EQ(kDesiredBitrate, GetCodecBitrate(kSsrcX)); } @@ -1136,12 +1131,13 @@ TEST_P(WebRtcVoiceEngineTestFake, SetMaxBitratePerStream) { TEST_P(WebRtcVoiceEngineTestFake, CannotSetMaxBitrateForNonexistentStream) { EXPECT_TRUE(SetupChannel()); webrtc::RtpParameters nonexistent_parameters = - channel_->GetRtpSendParameters(kSsrcX); + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); EXPECT_EQ(0u, nonexistent_parameters.encodings.size()); nonexistent_parameters.encodings.push_back(webrtc::RtpEncodingParameters()); - EXPECT_FALSE( - channel_->SetRtpSendParameters(kSsrcX, nonexistent_parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(kSsrcX, nonexistent_parameters) + .ok()); } TEST_P(WebRtcVoiceEngineTestFake, @@ -1152,21 +1148,26 @@ TEST_P(WebRtcVoiceEngineTestFake, // for each encoding individually. EXPECT_TRUE(SetupSendStream()); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); // Two or more encodings should result in failure. parameters.encodings.push_back(webrtc::RtpEncodingParameters()); - EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_FALSE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); // Zero encodings should also fail. parameters.encodings.clear(); - EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_FALSE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); } // Changing the SSRC through RtpParameters is not allowed. TEST_P(WebRtcVoiceEngineTestFake, CannotSetSsrcInRtpSendParameters) { EXPECT_TRUE(SetupSendStream()); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); parameters.encodings[0].ssrc = 0xdeadbeef; - EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_FALSE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); } // Test that a stream will not be sending if its encoding is made @@ -1176,34 +1177,40 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpParametersEncodingsActive) { SetSend(true); EXPECT_TRUE(GetSendStream(kSsrcX).IsSending()); // Get current parameters and change "active" to false. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); ASSERT_EQ(1u, parameters.encodings.size()); ASSERT_TRUE(parameters.encodings[0].active); parameters.encodings[0].active = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_TRUE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_FALSE(GetSendStream(kSsrcX).IsSending()); // Now change it back to active and verify we resume sending. // This should occur even when other parameters are updated. parameters.encodings[0].active = true; parameters.encodings[0].max_bitrate_bps = absl::optional(6000); - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_TRUE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_TRUE(GetSendStream(kSsrcX).IsSending()); } TEST_P(WebRtcVoiceEngineTestFake, SetRtpParametersAdaptivePtime) { EXPECT_TRUE(SetupSendStream()); // Get current parameters and change "adaptive_ptime" to true. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); ASSERT_EQ(1u, parameters.encodings.size()); ASSERT_FALSE(parameters.encodings[0].adaptive_ptime); parameters.encodings[0].adaptive_ptime = true; - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_TRUE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_TRUE(GetAudioNetworkAdaptorConfig(kSsrcX)); EXPECT_EQ(16000, GetSendStreamConfig(kSsrcX).min_bitrate_bps); parameters.encodings[0].adaptive_ptime = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_TRUE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_FALSE(GetAudioNetworkAdaptorConfig(kSsrcX)); EXPECT_EQ(32000, GetSendStreamConfig(kSsrcX).min_bitrate_bps); } @@ -1217,9 +1224,11 @@ TEST_P(WebRtcVoiceEngineTestFake, EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config, GetAudioNetworkAdaptorConfig(kSsrcX)); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); parameters.encodings[0].adaptive_ptime = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_TRUE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config, GetAudioNetworkAdaptorConfig(kSsrcX)); } @@ -1237,8 +1246,8 @@ TEST_P(WebRtcVoiceEngineTestFake, RtpParametersArePerStream) { SetupForMultiSendStream(); // Create send streams. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(ssrc))); } // Configure one stream to be limited by the stream config, another to be // limited by the global max, and the third one with no per-stream limit @@ -1264,13 +1273,14 @@ TEST_P(WebRtcVoiceEngineTestFake, RtpParametersArePerStream) { TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersCodecs) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); SetSendParameters(parameters); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); ASSERT_EQ(2u, rtp_parameters.codecs.size()); - EXPECT_EQ(kIsacCodec.ToCodecParameters(), rtp_parameters.codecs[0]); + EXPECT_EQ(kOpusCodec.ToCodecParameters(), rtp_parameters.codecs[0]); EXPECT_EQ(kPcmuCodec.ToCodecParameters(), rtp_parameters.codecs[1]); } @@ -1280,7 +1290,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersRtcpCname) { params.cname = "rtcpcname"; EXPECT_TRUE(SetupSendStream(params)); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); EXPECT_STREQ("rtcpcname", rtp_parameters.rtcp.cname.c_str()); } @@ -1288,20 +1299,22 @@ TEST_P(WebRtcVoiceEngineTestFake, DetectRtpSendParameterHeaderExtensionsChange) { EXPECT_TRUE(SetupSendStream()); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); rtp_parameters.header_extensions.emplace_back(); EXPECT_NE(0u, rtp_parameters.header_extensions.size()); webrtc::RTCError result = - channel_->SetRtpSendParameters(kSsrcX, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, rtp_parameters); EXPECT_EQ(webrtc::RTCErrorType::INVALID_MODIFICATION, result.type()); } // Test that GetRtpSendParameters returns an SSRC. TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersSsrc) { EXPECT_TRUE(SetupSendStream()); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_EQ(kSsrcX, rtp_parameters.encodings[0].ssrc); } @@ -1310,18 +1323,23 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersSsrc) { TEST_P(WebRtcVoiceEngineTestFake, SetAndGetRtpSendParameters) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); SetSendParameters(parameters); - webrtc::RtpParameters initial_params = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters initial_params = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); // We should be able to set the params we just got. - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, initial_params).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(kSsrcX, initial_params) + .ok()); // ... And this shouldn't change the params returned by GetRtpSendParameters. - webrtc::RtpParameters new_params = channel_->GetRtpSendParameters(kSsrcX); - EXPECT_EQ(initial_params, channel_->GetRtpSendParameters(kSsrcX)); + webrtc::RtpParameters new_params = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); + EXPECT_EQ(initial_params, + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX)); } // Test that max_bitrate_bps in send stream config gets updated correctly when @@ -1332,13 +1350,16 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesMaxBitrate) { send_parameters.codecs.push_back(kOpusCodec); SetSendParameters(send_parameters); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); // Expect empty on parameters.encodings[0].max_bitrate_bps; EXPECT_FALSE(rtp_parameters.encodings[0].max_bitrate_bps); constexpr int kMaxBitrateBps = 6000; rtp_parameters.encodings[0].max_bitrate_bps = kMaxBitrateBps; - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(kSsrcX, rtp_parameters) + .ok()); const int max_bitrate = GetSendStreamConfig(kSsrcX).max_bitrate_bps; EXPECT_EQ(max_bitrate, kMaxBitrateBps); @@ -1348,35 +1369,44 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesMaxBitrate) { // a value <= 0, setting the parameters returns false. TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterInvalidBitratePriority) { EXPECT_TRUE(SetupSendStream()); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); EXPECT_EQ(webrtc::kDefaultBitratePriority, rtp_parameters.encodings[0].bitrate_priority); rtp_parameters.encodings[0].bitrate_priority = 0; - EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(kSsrcX, rtp_parameters) + .ok()); rtp_parameters.encodings[0].bitrate_priority = -1.0; - EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(kSsrcX, rtp_parameters) + .ok()); } // Test that the bitrate_priority in the send stream config gets updated when // SetRtpSendParameters is set for the VoiceMediaChannel. TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesBitratePriority) { EXPECT_TRUE(SetupSendStream()); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); EXPECT_EQ(webrtc::kDefaultBitratePriority, rtp_parameters.encodings[0].bitrate_priority); double new_bitrate_priority = 2.0; rtp_parameters.encodings[0].bitrate_priority = new_bitrate_priority; - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(kSsrcX, rtp_parameters) + .ok()); // The priority should get set for both the audio channel's rtp parameters // and the audio send stream's audio config. - EXPECT_EQ( - new_bitrate_priority, - channel_->GetRtpSendParameters(kSsrcX).encodings[0].bitrate_priority); + EXPECT_EQ(new_bitrate_priority, channel_->AsSendChannel() + ->GetRtpSendParameters(kSsrcX) + .encodings[0] + .bitrate_priority); EXPECT_EQ(new_bitrate_priority, GetSendStreamConfig(kSsrcX).bitrate_priority); } @@ -1384,14 +1414,14 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesBitratePriority) { TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersCodecs) { EXPECT_TRUE(SetupRecvStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); webrtc::RtpParameters rtp_parameters = channel_->GetRtpReceiveParameters(kSsrcX); ASSERT_EQ(2u, rtp_parameters.codecs.size()); - EXPECT_EQ(kIsacCodec.ToCodecParameters(), rtp_parameters.codecs[0]); + EXPECT_EQ(kOpusCodec.ToCodecParameters(), rtp_parameters.codecs[0]); EXPECT_EQ(kPcmuCodec.ToCodecParameters(), rtp_parameters.codecs[1]); } @@ -1408,7 +1438,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersSsrc) { TEST_P(WebRtcVoiceEngineTestFake, SetAndGetRtpReceiveParameters) { EXPECT_TRUE(SetupRecvStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); @@ -1430,7 +1460,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersWithUnsignaledSsrc) { // Call necessary methods to configure receiving a default stream as // soon as it arrives. cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); @@ -1462,7 +1492,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersWithUnsignaledSsrc) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecs) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn8000Codec); parameters.codecs[0].id = 96; @@ -1471,7 +1501,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecs) { const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(96, send_codec_spec.payload_type); EXPECT_EQ(22000, send_codec_spec.target_bitrate_bps); - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_STRCASEEQ("OPUS", send_codec_spec.format.name.c_str()); EXPECT_NE(send_codec_spec.format.clockrate_hz, 8000); EXPECT_EQ(absl::nullopt, send_codec_spec.cng_payload_type); EXPECT_FALSE(channel_->CanInsertDtmf()); @@ -1567,7 +1597,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedFmtpAmountOfRedundancy) { TEST_P(WebRtcVoiceEngineTestFake, DontRecreateSendStream) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn8000Codec); parameters.codecs[0].id = 96; @@ -1802,8 +1832,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackAsCallee) { // NACK should be enabled even with no send stream. EXPECT_EQ(kRtpHistoryMs, GetRecvStreamConfig(kSsrcX).rtp.nack.rtp_history_ms); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); } // Test that we can enable NACK on receive streams. @@ -1840,7 +1870,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecDisableNackRecvStreams) { TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamEnableNack) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs[0].AddFeedbackParam(cricket::FeedbackParam( cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)); @@ -1860,7 +1890,7 @@ TEST_P(WebRtcVoiceEngineTestFake, TransportCcCanBeEnabledAndDisabled) { SetSendParameters(send_parameters); cricket::AudioRecvParameters recv_parameters; - recv_parameters.codecs.push_back(kIsacCodec); + recv_parameters.codecs.push_back(kOpusCodec); EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); ASSERT_TRUE(call_.GetAudioReceiveStream(kSsrcX) != nullptr); @@ -1872,8 +1902,8 @@ TEST_P(WebRtcVoiceEngineTestFake, TransportCcCanBeEnabledAndDisabled) { EXPECT_TRUE(call_.GetAudioReceiveStream(kSsrcX)->transport_cc()); } -// Test that we can switch back and forth between Opus and ISAC with CN. -TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsIsacOpusSwitching) { +// Test that we can switch back and forth between Opus and PCMU with CN. +TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsOpusPcmuSwitching) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters opus_parameters; @@ -1885,15 +1915,15 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsIsacOpusSwitching) { EXPECT_STRCASEEQ("opus", spec.format.name.c_str()); } - cricket::AudioSendParameters isac_parameters; - isac_parameters.codecs.push_back(kIsacCodec); - isac_parameters.codecs.push_back(kCn16000Codec); - isac_parameters.codecs.push_back(kOpusCodec); - SetSendParameters(isac_parameters); + cricket::AudioSendParameters pcmu_parameters; + pcmu_parameters.codecs.push_back(kPcmuCodec); + pcmu_parameters.codecs.push_back(kCn16000Codec); + pcmu_parameters.codecs.push_back(kOpusCodec); + SetSendParameters(pcmu_parameters); { const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_EQ(103, spec.payload_type); - EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str()); + EXPECT_EQ(0, spec.payload_type); + EXPECT_STRCASEEQ("PCMU", spec.format.name.c_str()); } SetSendParameters(opus_parameters); @@ -1908,33 +1938,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsIsacOpusSwitching) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsBitrate) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); // bitrate == 32000 - SetSendParameters(parameters); - { - const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_EQ(103, spec.payload_type); - EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str()); - EXPECT_EQ(32000, spec.target_bitrate_bps); - } - - parameters.codecs[0].bitrate = 0; // bitrate == default - SetSendParameters(parameters); - { - const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_EQ(103, spec.payload_type); - EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str()); - EXPECT_EQ(32000, spec.target_bitrate_bps); - } - parameters.codecs[0].bitrate = 28000; // bitrate == 28000 - SetSendParameters(parameters); - { - const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_EQ(103, spec.payload_type); - EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str()); - EXPECT_EQ(28000, spec.target_bitrate_bps); - } - - parameters.codecs[0] = kPcmuCodec; // bitrate == 64000 + parameters.codecs.push_back(kPcmuCodec); SetSendParameters(parameters); { const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; @@ -1976,14 +1980,14 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFOnTop) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; parameters.codecs.push_back(kTelephoneEventCodec1); - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs[0].id = 98; // DTMF parameters.codecs[1].id = 96; SetSendParameters(parameters); const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(96, spec.payload_type); - EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str()); + EXPECT_STRCASEEQ("OPUS", spec.format.name.c_str()); SetSend(true); EXPECT_TRUE(channel_->CanInsertDtmf()); } @@ -2009,7 +2013,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFPayloadTypeOutOfRange) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; parameters.codecs.push_back(kTelephoneEventCodec2); - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].id = 0; // DTMF parameters.codecs[1].id = 96; SetSendParameters(parameters); @@ -2031,15 +2035,13 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFPayloadTypeOutOfRange) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNOnTop) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kCn16000Codec); - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kCn8000Codec); parameters.codecs.push_back(kPcmuCodec); - parameters.codecs[0].id = 98; // wideband CN - parameters.codecs[1].id = 96; + parameters.codecs[0].id = 98; // narrowband CN SetSendParameters(parameters); const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_EQ(96, send_codec_spec.payload_type); - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_EQ(0, send_codec_spec.payload_type); + EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); EXPECT_EQ(98, send_codec_spec.cng_payload_type); } @@ -2047,19 +2049,17 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNOnTop) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCaller) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); parameters.codecs.push_back(kPcmuCodec); - // TODO(juberti): cn 32000 parameters.codecs.push_back(kCn16000Codec); parameters.codecs.push_back(kCn8000Codec); parameters.codecs.push_back(kTelephoneEventCodec1); parameters.codecs[0].id = 96; - parameters.codecs[2].id = 97; // wideband CN - parameters.codecs[4].id = 98; // DTMF + parameters.codecs[2].id = 97; // narrowband CN + parameters.codecs[3].id = 98; // DTMF SetSendParameters(parameters); const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(96, send_codec_spec.payload_type); - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); EXPECT_EQ(1u, send_codec_spec.format.num_channels); EXPECT_EQ(97, send_codec_spec.cng_payload_type); SetSend(true); @@ -2070,22 +2070,20 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCaller) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) { EXPECT_TRUE(SetupChannel()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); parameters.codecs.push_back(kPcmuCodec); - // TODO(juberti): cn 32000 parameters.codecs.push_back(kCn16000Codec); parameters.codecs.push_back(kCn8000Codec); parameters.codecs.push_back(kTelephoneEventCodec2); parameters.codecs[0].id = 96; - parameters.codecs[2].id = 97; // wideband CN - parameters.codecs[4].id = 98; // DTMF + parameters.codecs[2].id = 97; // narrowband CN + parameters.codecs[3].id = 98; // DTMF SetSendParameters(parameters); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(96, send_codec_spec.payload_type); - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); EXPECT_EQ(1u, send_codec_spec.format.num_channels); EXPECT_EQ(97, send_codec_spec.cng_payload_type); SetSend(true); @@ -2097,20 +2095,11 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNNoMatch) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - // Set ISAC(16K) and CN(16K). VAD should be activated. - parameters.codecs.push_back(kIsacCodec); + // Set PCMU(8K) and CN(16K). VAD should not be activated. + parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs[1].id = 97; SetSendParameters(parameters); - { - const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); - EXPECT_EQ(1u, send_codec_spec.format.num_channels); - EXPECT_EQ(97, send_codec_spec.cng_payload_type); - } - // Set PCMU(8K) and CN(16K). VAD should not be activated. - parameters.codecs[0] = kPcmuCodec; - SetSendParameters(parameters); { const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); @@ -2125,12 +2114,12 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNNoMatch) { EXPECT_EQ(1u, send_codec_spec.format.num_channels); EXPECT_EQ(13, send_codec_spec.cng_payload_type); } - // Set ISAC(16K) and CN(8K). VAD should not be activated. - parameters.codecs[0] = kIsacCodec; + // Set OPUS(48K) and CN(8K). VAD should not be activated. + parameters.codecs[0] = kOpusCodec; SetSendParameters(parameters); { const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_STRCASEEQ("OPUS", send_codec_spec.format.name.c_str()); EXPECT_EQ(absl::nullopt, send_codec_spec.cng_payload_type); } } @@ -2139,19 +2128,18 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNNoMatch) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCaseInsensitive) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs.push_back(kCn8000Codec); parameters.codecs.push_back(kTelephoneEventCodec1); - parameters.codecs[0].name = "iSaC"; + parameters.codecs[0].name = "PcMu"; parameters.codecs[0].id = 96; - parameters.codecs[2].id = 97; // wideband CN - parameters.codecs[4].id = 98; // DTMF + parameters.codecs[2].id = 97; // narrowband CN + parameters.codecs[3].id = 98; // DTMF SetSendParameters(parameters); const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(96, send_codec_spec.payload_type); - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); EXPECT_EQ(1u, send_codec_spec.format.num_channels); EXPECT_EQ(97, send_codec_spec.cng_payload_type); SetSend(true); @@ -2264,8 +2252,8 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateAndDeleteMultipleSendStreams) { SetSend(true); for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(ssrc))); SetAudioSend(ssrc, true, &fake_source_); // Verify that we are in a sending state for all the created streams. EXPECT_TRUE(GetSendStream(ssrc).IsSending()); @@ -2274,9 +2262,9 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateAndDeleteMultipleSendStreams) { // Delete the send streams. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE(channel_->RemoveSendStream(ssrc)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(ssrc)); EXPECT_FALSE(call_.GetAudioSendStream(ssrc)); - EXPECT_FALSE(channel_->RemoveSendStream(ssrc)); + EXPECT_FALSE(channel_->AsSendChannel()->RemoveSendStream(ssrc)); } EXPECT_EQ(0u, call_.GetAudioSendStreams().size()); } @@ -2287,29 +2275,30 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsWithMultipleSendStreams) { // Create send streams. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(ssrc))); } cricket::AudioSendParameters parameters; - // Set ISAC(16K) and CN(16K). VAD should be activated. - parameters.codecs.push_back(kIsacCodec); - parameters.codecs.push_back(kCn16000Codec); + // Set PCMU and CN(8K). VAD should be activated. + parameters.codecs.push_back(kPcmuCodec); + parameters.codecs.push_back(kCn8000Codec); parameters.codecs[1].id = 97; SetSendParameters(parameters); - // Verify ISAC and VAD are corrected configured on all send channels. + // Verify PCMU and VAD are corrected configured on all send channels. for (uint32_t ssrc : kSsrcs4) { ASSERT_TRUE(call_.GetAudioSendStream(ssrc) != nullptr); const auto& send_codec_spec = *call_.GetAudioSendStream(ssrc)->GetConfig().send_codec_spec; - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); EXPECT_EQ(1u, send_codec_spec.format.num_channels); EXPECT_EQ(97, send_codec_spec.cng_payload_type); } // Change to PCMU(8K) and CN(16K). parameters.codecs[0] = kPcmuCodec; + parameters.codecs[1] = kCn16000Codec; SetSendParameters(parameters); for (uint32_t ssrc : kSsrcs4) { ASSERT_TRUE(call_.GetAudioSendStream(ssrc) != nullptr); @@ -2326,8 +2315,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendWithMultipleSendStreams) { // Create the send channels and they should be a "not sending" date. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(ssrc))); SetAudioSend(ssrc, true, &fake_source_); EXPECT_FALSE(GetSendStream(ssrc).IsSending()); } @@ -2353,8 +2342,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) { // Create send streams. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(ssrc))); } // Create a receive stream to check that none of the send streams end up in @@ -2388,7 +2377,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) { // Remove the kSsrcY stream. No receiver stats. { cricket::VoiceMediaInfo info; - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcY)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrcY)); EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); EXPECT_EQ(true, channel_->GetStats(&info, /*get_and_clear_legacy_stats=*/true)); @@ -2449,8 +2438,8 @@ TEST_P(WebRtcVoiceEngineTestFake, PlayoutWithMultipleStreams) { EXPECT_TRUE(GetRecvStream(kSsrcZ).started()); // Now remove the recv streams. - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcZ)); - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcY)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrcZ)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrcY)); } TEST_P(WebRtcVoiceEngineTestFake, SetAudioNetworkAdaptorViaOptions) { @@ -2540,7 +2529,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStats) { // Remove the kSsrcY stream. No receiver stats. { cricket::VoiceMediaInfo info; - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcY)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrcY)); EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); EXPECT_EQ(true, channel_->GetStats(&info, /*get_and_clear_legacy_stats=*/true)); @@ -2578,8 +2567,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendSsrcWithMultipleStreams) { TEST_P(WebRtcVoiceEngineTestFake, SetSendSsrcAfterCreatingReceiveChannel) { EXPECT_TRUE(SetupChannel()); EXPECT_TRUE(AddRecvStream(kSsrcY)); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); EXPECT_TRUE(call_.GetAudioSendStream(kSsrcX)); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc); } @@ -2641,9 +2630,9 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvWithMultipleStreams) { EXPECT_EQ(s3.received_packets(), 1); EXPECT_TRUE(s3.VerifyLastPacket(packets[3], sizeof(packets[3]))); - EXPECT_TRUE(channel_->RemoveRecvStream(ssrc3)); - EXPECT_TRUE(channel_->RemoveRecvStream(ssrc2)); - EXPECT_TRUE(channel_->RemoveRecvStream(ssrc1)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(ssrc3)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(ssrc2)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(ssrc1)); } // Test that receiving on an unsignaled stream works (a stream is created). @@ -2666,7 +2655,7 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvUnsignaledSsrcWithSignaledStreamId) { EXPECT_TRUE(SetupChannel()); cricket::StreamParams unsignaled_stream; unsignaled_stream.set_stream_ids({kSyncLabel}); - ASSERT_TRUE(channel_->AddRecvStream(unsignaled_stream)); + ASSERT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(unsignaled_stream)); // The stream shouldn't have been created at this point because it doesn't // have any SSRCs. EXPECT_EQ(0u, call_.GetAudioReceiveStreams().size()); @@ -2680,8 +2669,8 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvUnsignaledSsrcWithSignaledStreamId) { // Remset the unsignaled stream to clear the cached parameters. If a new // default unsignaled receive stream is created it will not have a sync group. - channel_->ResetUnsignaledRecvStream(); - channel_->RemoveRecvStream(kSsrc1); + channel_->AsReceiveChannel()->ResetUnsignaledRecvStream(); + channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc1); DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame)); @@ -2710,7 +2699,7 @@ TEST_P(WebRtcVoiceEngineTestFake, ASSERT_EQ(receivers1.size(), 2u); // Should remove all default streams. - channel_->ResetUnsignaledRecvStream(); + channel_->AsReceiveChannel()->ResetUnsignaledRecvStream(); const auto& receivers2 = call_.GetAudioReceiveStreams(); EXPECT_EQ(0u, receivers2.size()); } @@ -2839,7 +2828,7 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamAfterUnsignaled_Updates) { stream_params.ssrcs.push_back(1); stream_params.set_stream_ids({new_stream_id}); - EXPECT_TRUE(channel_->AddRecvStream(stream_params)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(stream_params)); EXPECT_EQ(1u, streams.size()); // The audio receive stream should not have been recreated. EXPECT_EQ(audio_receive_stream_id, streams.front()->id()); @@ -2859,13 +2848,13 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvStream) { TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamUnsupportedCodec) { EXPECT_TRUE(SetupSendStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( - {{0, {"PCMU", 8000, 1}}, {103, {"ISAC", 16000, 1}}}))); + {{0, {"PCMU", 8000, 1}}, {111, {"OPUS", 48000, 2}}}))); } // Test that we properly clean up any streams that were added, even if @@ -3207,10 +3196,10 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) { // Various priorities map to various dscp values. parameters.encodings[0].network_priority = webrtc::Priority::kHigh; - ASSERT_TRUE(channel->SetRtpSendParameters(kSsrcZ, parameters).ok()); + ASSERT_TRUE(channel->SetRtpSendParameters(kSsrcZ, parameters, nullptr).ok()); EXPECT_EQ(rtc::DSCP_EF, network_interface.dscp()); parameters.encodings[0].network_priority = webrtc::Priority::kVeryLow; - ASSERT_TRUE(channel->SetRtpSendParameters(kSsrcZ, parameters).ok()); + ASSERT_TRUE(channel->SetRtpSendParameters(kSsrcZ, parameters, nullptr).ok()); EXPECT_EQ(rtc::DSCP_CS1, network_interface.dscp()); // Packets should also self-identify their dscp in PacketOptions. @@ -3237,7 +3226,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOutputVolume) { EXPECT_FALSE(channel_->SetOutputVolume(kSsrcY, 0.5)); cricket::StreamParams stream; stream.ssrcs.push_back(kSsrcY); - EXPECT_TRUE(channel_->AddRecvStream(stream)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(stream)); EXPECT_DOUBLE_EQ(1, GetRecvStream(kSsrcY).gain()); EXPECT_TRUE(channel_->SetOutputVolume(kSsrcY, 3)); EXPECT_DOUBLE_EQ(3, GetRecvStream(kSsrcY).gain()); @@ -3279,14 +3268,18 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOutputVolumeUnsignaledRecvStream) { TEST_P(WebRtcVoiceEngineTestFake, BaseMinimumPlayoutDelayMs) { EXPECT_TRUE(SetupChannel()); - EXPECT_FALSE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrcY, 200)); - EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrcY).has_value()); + EXPECT_FALSE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrcY, 200)); + EXPECT_FALSE(channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcY) + .has_value()); cricket::StreamParams stream; stream.ssrcs.push_back(kSsrcY); - EXPECT_TRUE(channel_->AddRecvStream(stream)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(stream)); EXPECT_EQ(0, GetRecvStream(kSsrcY).base_mininum_playout_delay_ms()); - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrcY, 300)); + EXPECT_TRUE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrcY, 300)); EXPECT_EQ(300, GetRecvStream(kSsrcY).base_mininum_playout_delay_ms()); } @@ -3297,43 +3290,70 @@ TEST_P(WebRtcVoiceEngineTestFake, // Spawn an unsignaled stream by sending a packet - delay should be 0. DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame)); - EXPECT_EQ(0, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc1).value_or(-1)); + EXPECT_EQ(0, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrc1) + .value_or(-1)); // Check that it doesn't provide default values for unknown ssrc. - EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrcY).has_value()); + EXPECT_FALSE(channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcY) + .has_value()); // Check that default value for unsignaled streams is 0. - EXPECT_EQ(0, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc0).value_or(-1)); + EXPECT_EQ(0, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrc0) + .value_or(-1)); // Should remember the delay 100 which will be set on new unsignaled streams, // and also set the delay to 100 on existing unsignaled streams. - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrc0, 100)); - EXPECT_EQ(100, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc0).value_or(-1)); + EXPECT_TRUE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrc0, 100)); + EXPECT_EQ(100, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrc0) + .value_or(-1)); // Check that it doesn't provide default values for unknown ssrc. - EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrcY).has_value()); + EXPECT_FALSE(channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcY) + .has_value()); // Spawn an unsignaled stream by sending a packet - delay should be 100. unsigned char pcmuFrame2[sizeof(kPcmuFrame)]; memcpy(pcmuFrame2, kPcmuFrame, sizeof(kPcmuFrame)); rtc::SetBE32(&pcmuFrame2[8], kSsrcX); DeliverPacket(pcmuFrame2, sizeof(pcmuFrame2)); - EXPECT_EQ(100, channel_->GetBaseMinimumPlayoutDelayMs(kSsrcX).value_or(-1)); + EXPECT_EQ(100, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcX) + .value_or(-1)); // Setting delay with SSRC=0 should affect all unsignaled streams. - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrc0, 300)); + EXPECT_TRUE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrc0, 300)); if (kMaxUnsignaledRecvStreams > 1) { - EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc1).value_or(-1)); + EXPECT_EQ(300, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrc1) + .value_or(-1)); } - EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(kSsrcX).value_or(-1)); + EXPECT_EQ(300, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcX) + .value_or(-1)); // Setting delay on an individual stream affects only that. - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrcX, 400)); + EXPECT_TRUE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrcX, 400)); if (kMaxUnsignaledRecvStreams > 1) { - EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc1).value_or(-1)); + EXPECT_EQ(300, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrc1) + .value_or(-1)); } - EXPECT_EQ(400, channel_->GetBaseMinimumPlayoutDelayMs(kSsrcX).value_or(-1)); - EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc0).value_or(-1)); + EXPECT_EQ(400, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcX) + .value_or(-1)); + EXPECT_EQ(300, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrc0) + .value_or(-1)); // Check that it doesn't provide default values for unknown ssrc. - EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrcY).has_value()); + EXPECT_FALSE(channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcY) + .has_value()); } TEST_P(WebRtcVoiceEngineTestFake, SetsSyncGroupFromStreamId) { @@ -3345,9 +3365,9 @@ TEST_P(WebRtcVoiceEngineTestFake, SetsSyncGroupFromStreamId) { sp.set_stream_ids({kStreamId}); // Creating two channels to make sure that sync label is set properly for both // the default voice channel and following ones. - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); sp.ssrcs[0] += 1; - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); ASSERT_EQ(2u, call_.GetAudioReceiveStreams().size()); EXPECT_EQ(kStreamId, @@ -3370,8 +3390,8 @@ TEST_P(WebRtcVoiceEngineTestFake, ConfiguresAudioReceiveStreamRtpExtensions) { EXPECT_TRUE(SetupSendStream()); SetSendParameters(send_parameters_); for (uint32_t ssrc : ssrcs) { - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(ssrc))); } EXPECT_EQ(2u, call_.GetAudioReceiveStreams().size()); @@ -3432,7 +3452,8 @@ TEST_P(WebRtcVoiceEngineTestFake, DeliverAudioPacket_Call) { const cricket::FakeAudioReceiveStream* s = call_.GetAudioReceiveStream(kAudioSsrc); EXPECT_EQ(0, s->received_packets()); - channel_->OnPacketReceived(kPcmuPacket, /* packet_time_us */ -1); + channel_->AsReceiveChannel()->OnPacketReceived(kPcmuPacket, + /* packet_time_us */ -1); rtc::Thread::Current()->ProcessMessages(0); EXPECT_EQ(1, s->received_packets()); @@ -3444,8 +3465,8 @@ TEST_P(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_SendCreatedFirst) { EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(AddRecvStream(kSsrcY)); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcZ))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcZ))); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc); EXPECT_TRUE(AddRecvStream(kSsrcW)); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcW).rtp.local_ssrc); @@ -3454,13 +3475,13 @@ TEST_P(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_SendCreatedFirst) { TEST_P(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_RecvCreatedFirst) { EXPECT_TRUE(SetupRecvStream()); EXPECT_EQ(0xFA17FA17u, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcY))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcY))); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc); EXPECT_TRUE(AddRecvStream(kSsrcZ)); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcZ).rtp.local_ssrc); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcW))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcW))); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcZ).rtp.local_ssrc); } @@ -3508,7 +3529,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRawAudioSinkUnsignaledRecvStream) { EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink()); // If we remove and add a default stream, it should get the same sink. - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrc1)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc1)); DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame)); EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink()); @@ -3558,13 +3579,13 @@ TEST_P(WebRtcVoiceEngineTestFake, OnReadyToSendSignalsNetworkState) { EXPECT_EQ(webrtc::kNetworkUp, call_.GetNetworkState(webrtc::MediaType::VIDEO)); - channel_->OnReadyToSend(false); + channel_->AsSendChannel()->OnReadyToSend(false); EXPECT_EQ(webrtc::kNetworkDown, call_.GetNetworkState(webrtc::MediaType::AUDIO)); EXPECT_EQ(webrtc::kNetworkUp, call_.GetNetworkState(webrtc::MediaType::VIDEO)); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); EXPECT_EQ(webrtc::kNetworkUp, call_.GetNetworkState(webrtc::MediaType::AUDIO)); EXPECT_EQ(webrtc::kNetworkUp, @@ -3666,7 +3687,7 @@ TEST(WebRtcVoiceEngineTest, StartupShutdownWithExternalADM) { } } -// Verify the payload id of common audio codecs, including CN, ISAC, and G722. +// Verify the payload id of common audio codecs, including CN and G722. TEST(WebRtcVoiceEngineTest, HasCorrectPayloadTypeMapping) { for (bool use_null_apm : {false, true}) { std::unique_ptr task_queue_factory = @@ -3693,10 +3714,6 @@ TEST(WebRtcVoiceEngineTest, HasCorrectPayloadTypeMapping) { EXPECT_EQ(105, codec.id); } else if (is_codec("CN", 32000)) { EXPECT_EQ(106, codec.id); - } else if (is_codec("ISAC", 16000)) { - EXPECT_EQ(103, codec.id); - } else if (is_codec("ISAC", 32000)) { - EXPECT_EQ(104, codec.id); } else if (is_codec("G722", 8000)) { EXPECT_EQ(9, codec.id); } else if (is_codec("telephone-event", 8000)) { @@ -3802,6 +3819,57 @@ TEST(WebRtcVoiceEngineTest, SetRecvCodecs) { } } +TEST(WebRtcVoiceEngineTest, SetRtpSendParametersMaxBitrate) { + rtc::AutoThread main_thread; + std::unique_ptr task_queue_factory = + webrtc::CreateDefaultTaskQueueFactory(); + rtc::scoped_refptr adm = + webrtc::test::MockAudioDeviceModule::CreateNice(); + webrtc::FieldTrialBasedConfig field_trials; + FakeAudioSource source; + cricket::WebRtcVoiceEngine engine(task_queue_factory.get(), adm.get(), + webrtc::CreateBuiltinAudioEncoderFactory(), + webrtc::CreateBuiltinAudioDecoderFactory(), + nullptr, nullptr, nullptr, field_trials); + engine.Init(); + webrtc::RtcEventLogNull event_log; + webrtc::Call::Config call_config(&event_log); + call_config.trials = &field_trials; + call_config.task_queue_factory = task_queue_factory.get(); + { + webrtc::AudioState::Config config; + config.audio_mixer = webrtc::AudioMixerImpl::Create(); + config.audio_device_module = + webrtc::test::MockAudioDeviceModule::CreateNice(); + call_config.audio_state = webrtc::AudioState::Create(config); + } + auto call = absl::WrapUnique(webrtc::Call::Create(call_config)); + cricket::WebRtcVoiceMediaChannel channel(&engine, cricket::MediaConfig(), + cricket::AudioOptions(), + webrtc::CryptoOptions(), call.get()); + { + cricket::AudioSendParameters params; + params.codecs.push_back(cricket::AudioCodec(1, "opus", 48000, 32000, 2)); + params.extensions.push_back(webrtc::RtpExtension( + webrtc::RtpExtension::kTransportSequenceNumberUri, 1)); + EXPECT_TRUE(channel.SetSendParameters(params)); + } + constexpr int kSsrc = 1234; + { + cricket::StreamParams params; + params.add_ssrc(kSsrc); + channel.AddSendStream(params); + } + channel.SetAudioSend(kSsrc, true, nullptr, &source); + channel.SetSend(true); + webrtc::RtpParameters params = channel.GetRtpSendParameters(kSsrc); + for (int max_bitrate : {-10, -1, 0, 10000}) { + params.encodings[0].max_bitrate_bps = max_bitrate; + channel.SetRtpSendParameters( + kSsrc, params, [](webrtc::RTCError error) { EXPECT_TRUE(error.ok()); }); + } +} + TEST(WebRtcVoiceEngineTest, CollectRecvCodecs) { for (bool use_null_apm : {false, true}) { std::vector specs; diff --git a/third_party/libwebrtc/modules/audio_coding/BUILD.gn b/third_party/libwebrtc/modules/audio_coding/BUILD.gn index ad43772ef1fa..e4a3c70f10f2 100644 --- a/third_party/libwebrtc/modules/audio_coding/BUILD.gn +++ b/third_party/libwebrtc/modules/audio_coding/BUILD.gn @@ -381,50 +381,8 @@ rtc_library("ilbc_c") { absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] } -rtc_source_set("isac_common") { - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/audio_decoder_isac_t.h", - "codecs/isac/audio_decoder_isac_t_impl.h", - "codecs/isac/audio_encoder_isac_t.h", - "codecs/isac/audio_encoder_isac_t_impl.h", - ] - deps = [ - ":isac_bwinfo", - "../../api:scoped_refptr", - "../../api/audio_codecs:audio_codecs_api", - "../../api/units:time_delta", - "../../rtc_base:checks", - "../../rtc_base:safe_minmax", - "../../system_wrappers:field_trial", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] -} - -rtc_library("isac") { - visibility += [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/main/include/audio_decoder_isac.h", - "codecs/isac/main/include/audio_encoder_isac.h", - "codecs/isac/main/source/audio_decoder_isac.cc", - "codecs/isac/main/source/audio_encoder_isac.cc", - ] - - deps = [ - ":isac_common", - "../../api/audio_codecs:audio_codecs_api", - ] - public_deps = [ ":isac_c" ] # no-presubmit-check TODO(webrtc:8603) -} - -rtc_source_set("isac_bwinfo") { - sources = [ "codecs/isac/bandwidth_info.h" ] - deps = [] -} - rtc_library("isac_vad") { - visibility += webrtc_default_visibility + visibility += [ "../audio_processing/vad:*" ] sources = [ "codecs/isac/main/source/filter_functions.c", "codecs/isac/main/source/filter_functions.h", @@ -447,247 +405,9 @@ rtc_library("isac_vad") { ] } -rtc_library("isac_c") { - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/main/include/isac.h", - "codecs/isac/main/source/arith_routines.c", - "codecs/isac/main/source/arith_routines.h", - "codecs/isac/main/source/arith_routines_hist.c", - "codecs/isac/main/source/arith_routines_logist.c", - "codecs/isac/main/source/bandwidth_estimator.c", - "codecs/isac/main/source/bandwidth_estimator.h", - "codecs/isac/main/source/codec.h", - "codecs/isac/main/source/crc.c", - "codecs/isac/main/source/crc.h", - "codecs/isac/main/source/decode.c", - "codecs/isac/main/source/decode_bwe.c", - "codecs/isac/main/source/encode.c", - "codecs/isac/main/source/encode_lpc_swb.c", - "codecs/isac/main/source/encode_lpc_swb.h", - "codecs/isac/main/source/entropy_coding.c", - "codecs/isac/main/source/entropy_coding.h", - "codecs/isac/main/source/filterbanks.c", - "codecs/isac/main/source/intialize.c", - "codecs/isac/main/source/isac.c", - "codecs/isac/main/source/isac_float_type.h", - "codecs/isac/main/source/lattice.c", - "codecs/isac/main/source/lpc_analysis.c", - "codecs/isac/main/source/lpc_analysis.h", - "codecs/isac/main/source/lpc_gain_swb_tables.c", - "codecs/isac/main/source/lpc_gain_swb_tables.h", - "codecs/isac/main/source/lpc_shape_swb12_tables.c", - "codecs/isac/main/source/lpc_shape_swb12_tables.h", - "codecs/isac/main/source/lpc_shape_swb16_tables.c", - "codecs/isac/main/source/lpc_shape_swb16_tables.h", - "codecs/isac/main/source/lpc_tables.c", - "codecs/isac/main/source/lpc_tables.h", - "codecs/isac/main/source/pitch_gain_tables.c", - "codecs/isac/main/source/pitch_gain_tables.h", - "codecs/isac/main/source/pitch_lag_tables.c", - "codecs/isac/main/source/pitch_lag_tables.h", - "codecs/isac/main/source/spectrum_ar_model_tables.c", - "codecs/isac/main/source/spectrum_ar_model_tables.h", - "codecs/isac/main/source/transform.c", - ] - - if (is_linux || is_chromeos) { - libs = [ "m" ] - } - - deps = [ - ":isac_bwinfo", - ":isac_vad", - "../../common_audio", - "../../common_audio:common_audio_c", - "../../rtc_base:checks", - "../../rtc_base:compile_assert_c", - "../../rtc_base/system:arch", - "../third_party/fft", - ] -} - -rtc_library("isac_fix") { - visibility += [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/fix/source/audio_decoder_isacfix.cc", - "codecs/isac/fix/source/audio_encoder_isacfix.cc", - ] - - deps = [ - ":isac_common", - "../../api/audio_codecs:audio_codecs_api", - "../../common_audio", - "../../system_wrappers", - ] - public_deps = [ ":isac_fix_c" ] # no-presubmit-check TODO(webrtc:8603) - - if (rtc_build_with_neon) { - deps += [ ":isac_neon" ] - } -} - -rtc_library("isac_fix_common") { - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/fix/source/codec.h", - "codecs/isac/fix/source/entropy_coding.h", - "codecs/isac/fix/source/fft.c", - "codecs/isac/fix/source/fft.h", - "codecs/isac/fix/source/filterbank_internal.h", - "codecs/isac/fix/source/settings.h", - "codecs/isac/fix/source/structs.h", - "codecs/isac/fix/source/transform_tables.c", - ] - deps = [ - ":isac_bwinfo", - "../../common_audio", - "../../common_audio:common_audio_c", - ] -} - -rtc_source_set("isac_fix_c_arm_asm") { - poisonous = [ "audio_codecs" ] - sources = [] - if (target_cpu == "arm" && arm_version >= 7) { - sources += [ - "codecs/isac/fix/source/lattice_armv7.S", - "codecs/isac/fix/source/pitch_filter_armv6.S", - ] - deps = [ - ":isac_fix_common", - "../../rtc_base/system:asm_defines", - ] - } -} - -rtc_library("isac_fix_c") { - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/fix/include/audio_decoder_isacfix.h", - "codecs/isac/fix/include/audio_encoder_isacfix.h", - "codecs/isac/fix/include/isacfix.h", - "codecs/isac/fix/source/arith_routines.c", - "codecs/isac/fix/source/arith_routines_hist.c", - "codecs/isac/fix/source/arith_routines_logist.c", - "codecs/isac/fix/source/arith_routins.h", - "codecs/isac/fix/source/bandwidth_estimator.c", - "codecs/isac/fix/source/bandwidth_estimator.h", - "codecs/isac/fix/source/decode.c", - "codecs/isac/fix/source/decode_bwe.c", - "codecs/isac/fix/source/decode_plc.c", - "codecs/isac/fix/source/encode.c", - "codecs/isac/fix/source/entropy_coding.c", - "codecs/isac/fix/source/filterbank_tables.c", - "codecs/isac/fix/source/filterbank_tables.h", - "codecs/isac/fix/source/filterbanks.c", - "codecs/isac/fix/source/filters.c", - "codecs/isac/fix/source/initialize.c", - "codecs/isac/fix/source/isac_fix_type.h", - "codecs/isac/fix/source/isacfix.c", - "codecs/isac/fix/source/lattice.c", - "codecs/isac/fix/source/lattice_c.c", - "codecs/isac/fix/source/lpc_masking_model.c", - "codecs/isac/fix/source/lpc_masking_model.h", - "codecs/isac/fix/source/lpc_tables.c", - "codecs/isac/fix/source/lpc_tables.h", - "codecs/isac/fix/source/pitch_estimator.c", - "codecs/isac/fix/source/pitch_estimator.h", - "codecs/isac/fix/source/pitch_estimator_c.c", - "codecs/isac/fix/source/pitch_filter.c", - "codecs/isac/fix/source/pitch_filter_c.c", - "codecs/isac/fix/source/pitch_gain_tables.c", - "codecs/isac/fix/source/pitch_gain_tables.h", - "codecs/isac/fix/source/pitch_lag_tables.c", - "codecs/isac/fix/source/pitch_lag_tables.h", - "codecs/isac/fix/source/spectrum_ar_model_tables.c", - "codecs/isac/fix/source/spectrum_ar_model_tables.h", - "codecs/isac/fix/source/transform.c", - ] - - deps = [ - ":isac_bwinfo", - ":isac_common", - ":isac_fix_common", - "../../api/audio_codecs:audio_codecs_api", - "../../common_audio", - "../../common_audio:common_audio_c", - "../../rtc_base:checks", - "../../rtc_base:compile_assert_c", - "../../rtc_base:sanitizer", - "../../system_wrappers", - "../third_party/fft", - ] - - if (rtc_build_with_neon) { - deps += [ ":isac_neon" ] - - # TODO(bugs.webrtc.org/9579): Consider moving the usage of NEON from - # pitch_estimator_c.c into the "isac_neon" target and delete this flag: - if (target_cpu != "arm64") { - suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] - cflags = [ "-mfpu=neon" ] - } - } - - if (target_cpu == "arm" && arm_version >= 7) { - sources -= [ - "codecs/isac/fix/source/lattice_c.c", - "codecs/isac/fix/source/pitch_filter_c.c", - ] - deps += [ ":isac_fix_c_arm_asm" ] - } - - if (target_cpu == "mipsel") { - sources += [ - "codecs/isac/fix/source/entropy_coding_mips.c", - "codecs/isac/fix/source/filters_mips.c", - "codecs/isac/fix/source/lattice_mips.c", - "codecs/isac/fix/source/pitch_estimator_mips.c", - "codecs/isac/fix/source/transform_mips.c", - ] - sources -= [ - "codecs/isac/fix/source/lattice_c.c", - "codecs/isac/fix/source/pitch_estimator_c.c", - ] - if (mips_dsp_rev > 0) { - sources += [ "codecs/isac/fix/source/filterbanks_mips.c" ] - } - if (mips_dsp_rev > 1) { - sources += [ - "codecs/isac/fix/source/lpc_masking_model_mips.c", - "codecs/isac/fix/source/pitch_filter_mips.c", - ] - sources -= [ "codecs/isac/fix/source/pitch_filter_c.c" ] - } - } -} - -if (rtc_build_with_neon) { - rtc_library("isac_neon") { - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/fix/source/entropy_coding_neon.c", - "codecs/isac/fix/source/filterbanks_neon.c", - "codecs/isac/fix/source/filters_neon.c", - "codecs/isac/fix/source/lattice_neon.c", - "codecs/isac/fix/source/transform_neon.c", - ] - - if (target_cpu != "arm64") { - # Enable compilation for the NEON instruction set. - suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] - cflags = [ "-mfpu=neon" ] - } - - deps = [ - ":isac_fix_common", - "../../common_audio", - "../../common_audio:common_audio_c", - "../../rtc_base:checks", - ] - } +rtc_source_set("isac_bwinfo") { + sources = [ "codecs/isac/bandwidth_info.h" ] + deps = [] } rtc_library("pcm16b") { @@ -1291,11 +1011,6 @@ if (rtc_include_tests) { if (rtc_include_opus) { audio_coding_deps += [ ":webrtc_opus" ] } - if (target_cpu == "arm") { - audio_coding_deps += [ ":isac_fix" ] - } else { - audio_coding_deps += [ ":isac" ] - } if (!build_with_mozilla && !build_with_chromium) { audio_coding_deps += [ ":red" ] } @@ -1327,11 +1042,7 @@ if (rtc_include_tests) { ":g711_test", ":g722_test", ":ilbc_test", - ":isac_api_test", - ":isac_switch_samprate_test", - ":isac_test", ":neteq_ilbc_quality_test", - ":neteq_isac_quality_test", ":neteq_opus_quality_test", ":neteq_pcm16b_quality_test", ":neteq_pcmu_quality_test", @@ -1371,8 +1082,6 @@ if (rtc_include_tests) { "test/Tester.cc", "test/TwoWayCommunication.cc", "test/TwoWayCommunication.h", - "test/iSACTest.cc", - "test/iSACTest.h", "test/target_delay_unittest.cc", ] deps = [ @@ -1397,8 +1106,6 @@ if (rtc_include_tests) { "../../api/audio_codecs/g722:audio_encoder_g722", "../../api/audio_codecs/ilbc:audio_decoder_ilbc", "../../api/audio_codecs/ilbc:audio_encoder_ilbc", - "../../api/audio_codecs/isac:audio_decoder_isac_float", - "../../api/audio_codecs/isac:audio_encoder_isac_float", "../../api/audio_codecs/opus:audio_decoder_opus", "../../api/audio_codecs/opus:audio_encoder_opus", "../../common_audio", @@ -1513,8 +1220,6 @@ if (rtc_include_tests) { deps = [ ":ilbc", - ":isac", - ":isac_fix", ":neteq", ":neteq_input_audio_tools", ":neteq_tools", @@ -1619,12 +1324,10 @@ if (rtc_include_tests) { testonly = true defines = [] deps = [ - ":isac_fix_common", "../../rtc_base:macromagic", "../../test:fileutils", ] sources = [ - "codecs/isac/fix/test/isac_speed_test.cc", "codecs/opus/opus_speed_test.cc", "codecs/tools/audio_codec_speed_test.cc", "codecs/tools/audio_codec_speed_test.h", @@ -1647,7 +1350,6 @@ if (rtc_include_tests) { } deps += [ - ":isac_fix", ":webrtc_opus", "../../rtc_base:checks", "../../test:test_main", @@ -1723,7 +1425,6 @@ if (rtc_include_tests) { "../../api/audio_codecs/g711:audio_encoder_g711", "../../api/audio_codecs/g722:audio_encoder_g722", "../../api/audio_codecs/ilbc:audio_encoder_ilbc", - "../../api/audio_codecs/isac:audio_encoder_isac", "../../api/audio_codecs/opus:audio_encoder_opus", "../../rtc_base:safe_conversions", "//third_party/abseil-cpp/absl/flags:flag", @@ -1832,21 +1533,6 @@ if (rtc_include_tests) { ] } - rtc_executable("neteq_isac_quality_test") { - testonly = true - - sources = [ "neteq/test/neteq_isac_quality_test.cc" ] - - deps = [ - ":isac_fix", - ":neteq", - ":neteq_quality_test_support", - "../../test:test_main", - "//testing/gtest", - "//third_party/abseil-cpp/absl/flags:flag", - ] - } - rtc_executable("neteq_pcmu_quality_test") { testonly = true @@ -1884,28 +1570,6 @@ if (rtc_include_tests) { } } - rtc_library("isac_test_util") { - testonly = true - sources = [ - "codecs/isac/main/util/utility.c", - "codecs/isac/main/util/utility.h", - ] - } - - if (!build_with_chromium) { - rtc_executable("isac_test") { - testonly = true - - sources = [ "codecs/isac/main/test/simpleKenny.c" ] - - deps = [ - ":isac", - ":isac_test_util", - "../../rtc_base:macromagic", - ] - } - } - rtc_executable("g711_test") { testonly = true @@ -1923,32 +1587,6 @@ if (rtc_include_tests) { } if (!build_with_chromium) { - rtc_executable("isac_api_test") { - testonly = true - - sources = [ "codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc" ] - - deps = [ - ":isac", - ":isac_test_util", - "../../rtc_base:macromagic", - ] - } - - rtc_executable("isac_switch_samprate_test") { - testonly = true - - sources = - [ "codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc" ] - - deps = [ - ":isac", - ":isac_test_util", - "../../common_audio", - "../../common_audio:common_audio_c", - ] - } - rtc_executable("ilbc_test") { testonly = true @@ -1997,13 +1635,6 @@ if (rtc_include_tests) { "codecs/cng/audio_encoder_cng_unittest.cc", "codecs/cng/cng_unittest.cc", "codecs/ilbc/ilbc_unittest.cc", - "codecs/isac/fix/source/filterbanks_unittest.cc", - "codecs/isac/fix/source/filters_unittest.cc", - "codecs/isac/fix/source/lpc_masking_model_unittest.cc", - "codecs/isac/fix/source/transform_unittest.cc", - "codecs/isac/isac_webrtc_api_test.cc", - "codecs/isac/main/source/audio_encoder_isac_unittest.cc", - "codecs/isac/main/source/isac_unittest.cc", "codecs/legacy_encoded_audio_frame_unittest.cc", "codecs/opus/audio_decoder_multi_channel_opus_unittest.cc", "codecs/opus/audio_encoder_multi_channel_opus_unittest.cc", @@ -2070,11 +1701,6 @@ if (rtc_include_tests) { ":default_neteq_factory", ":g711", ":ilbc", - ":isac", - ":isac_c", - ":isac_common", - ":isac_fix", - ":isac_fix_common", ":legacy_encoded_audio_frame", ":mocks", ":neteq", @@ -2094,10 +1720,6 @@ if (rtc_include_tests) { "../../api/audio_codecs:audio_codecs_api", "../../api/audio_codecs:builtin_audio_decoder_factory", "../../api/audio_codecs:builtin_audio_encoder_factory", - "../../api/audio_codecs/isac:audio_decoder_isac_fix", - "../../api/audio_codecs/isac:audio_decoder_isac_float", - "../../api/audio_codecs/isac:audio_encoder_isac_fix", - "../../api/audio_codecs/isac:audio_encoder_isac_float", "../../api/audio_codecs/opus:audio_decoder_multiopus", "../../api/audio_codecs/opus:audio_decoder_opus", "../../api/audio_codecs/opus:audio_encoder_multiopus", diff --git a/third_party/libwebrtc/modules/audio_coding/acm2/acm_receiver_unittest.cc b/third_party/libwebrtc/modules/audio_coding/acm2/acm_receiver_unittest.cc index e73acc23382f..6dd44b696e69 100644 --- a/third_party/libwebrtc/modules/audio_coding/acm2/acm_receiver_unittest.cc +++ b/third_party/libwebrtc/modules/audio_coding/acm2/acm_receiver_unittest.cc @@ -13,6 +13,7 @@ #include // std::min #include +#include "absl/types/optional.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" @@ -64,12 +65,14 @@ class AcmReceiverTestOldApi : public AudioPacketizationCallback, const SdpAudioFormat& format, const std::map cng_payload_types = {}) { // Create the speech encoder. - AudioCodecInfo info = encoder_factory_->QueryAudioEncoder(format).value(); + absl::optional info = + encoder_factory_->QueryAudioEncoder(format); + RTC_CHECK(info.has_value()); std::unique_ptr enc = encoder_factory_->MakeAudioEncoder(payload_type, format, absl::nullopt); // If we have a compatible CN specification, stack a CNG on top. - auto it = cng_payload_types.find(info.sample_rate_hz); + auto it = cng_payload_types.find(info->sample_rate_hz); if (it != cng_payload_types.end()) { AudioEncoderCngConfig config; config.speech_encoder = std::move(enc); @@ -81,7 +84,7 @@ class AcmReceiverTestOldApi : public AudioPacketizationCallback, // Actually start using the new encoder. acm_->SetEncoder(std::move(enc)); - return info; + return *info; } int InsertOnePacketOfSilence(const AudioCodecInfo& info) { @@ -148,8 +151,7 @@ class AcmReceiverTestOldApi : public AudioPacketizationCallback, #define MAYBE_SampleRate SampleRate #endif TEST_F(AcmReceiverTestOldApi, MAYBE_SampleRate) { - const std::map codecs = {{0, {"ISAC", 16000, 1}}, - {1, {"ISAC", 32000, 1}}}; + const std::map codecs = {{0, {"OPUS", 48000, 2}}}; receiver_->SetCodecs(codecs); constexpr int kOutSampleRateHz = 8000; // Different than codec sample rate. @@ -232,15 +234,6 @@ TEST_F(AcmReceiverTestFaxModeOldApi, MAYBE_VerifyAudioFramePCMU) { RunVerifyAudioFrame({"PCMU", 8000, 1}); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_VerifyAudioFrameISAC DISABLED_VerifyAudioFrameISAC -#else -#define MAYBE_VerifyAudioFrameISAC VerifyAudioFrameISAC -#endif -TEST_F(AcmReceiverTestFaxModeOldApi, MAYBE_VerifyAudioFrameISAC) { - RunVerifyAudioFrame({"ISAC", 16000, 1}); -} - #if defined(WEBRTC_ANDROID) #define MAYBE_VerifyAudioFrameOpus DISABLED_VerifyAudioFrameOpus #else @@ -310,12 +303,10 @@ TEST_F(AcmReceiverTestPostDecodeVadPassiveOldApi, MAYBE_PostdecodingVad) { #else #define MAYBE_LastAudioCodec LastAudioCodec #endif -#if defined(WEBRTC_CODEC_ISAC) +#if defined(WEBRTC_CODEC_OPUS) TEST_F(AcmReceiverTestOldApi, MAYBE_LastAudioCodec) { - const std::map codecs = {{0, {"ISAC", 16000, 1}}, - {1, {"PCMA", 8000, 1}}, - {2, {"ISAC", 32000, 1}}, - {3, {"L16", 32000, 1}}}; + const std::map codecs = { + {0, {"PCMU", 8000, 1}}, {1, {"PCMA", 8000, 1}}, {2, {"L16", 32000, 1}}}; const std::map cng_payload_types = { {8000, 100}, {16000, 101}, {32000, 102}}; { diff --git a/third_party/libwebrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc b/third_party/libwebrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc index 7e4b764aeda3..f1eb81c0155a 100644 --- a/third_party/libwebrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc +++ b/third_party/libwebrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc @@ -30,7 +30,6 @@ #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" #include "modules/audio_coding/codecs/g711/audio_decoder_pcm.h" #include "modules/audio_coding/codecs/g711/audio_encoder_pcm.h" -#include "modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/audio_coding/neteq/tools/audio_checksum.h" #include "modules/audio_coding/neteq/tools/audio_loop.h" @@ -302,44 +301,6 @@ TEST_F(AudioCodingModuleTestOldApi, TransportCallbackIsInvokedForEachPacket) { EXPECT_EQ(AudioFrameType::kAudioFrameSpeech, packet_cb_.last_frame_type()); } -#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX) -// Verifies that the RTP timestamp series is not reset when the codec is -// changed. -TEST_F(AudioCodingModuleTestOldApi, TimestampSeriesContinuesWhenCodecChanges) { - RegisterCodec(); // This registers the default codec. - uint32_t expected_ts = input_frame_.timestamp_; - int blocks_per_packet = pac_size_ / (kSampleRateHz / 100); - // Encode 5 packets of the first codec type. - const int kNumPackets1 = 5; - for (int j = 0; j < kNumPackets1; ++j) { - for (int i = 0; i < blocks_per_packet; ++i) { - EXPECT_EQ(j, packet_cb_.num_calls()); - InsertAudio(); - } - EXPECT_EQ(j + 1, packet_cb_.num_calls()); - EXPECT_EQ(expected_ts, packet_cb_.last_timestamp()); - expected_ts += pac_size_; - } - - // Change codec. - audio_format_ = SdpAudioFormat("ISAC", kSampleRateHz, 1); - pac_size_ = 480; - RegisterCodec(); - blocks_per_packet = pac_size_ / (kSampleRateHz / 100); - // Encode another 5 packets. - const int kNumPackets2 = 5; - for (int j = 0; j < kNumPackets2; ++j) { - for (int i = 0; i < blocks_per_packet; ++i) { - EXPECT_EQ(kNumPackets1 + j, packet_cb_.num_calls()); - InsertAudio(); - } - EXPECT_EQ(kNumPackets1 + j + 1, packet_cb_.num_calls()); - EXPECT_EQ(expected_ts, packet_cb_.last_timestamp()); - expected_ts += pac_size_; - } -} -#endif - // Introduce this class to set different expectations on the number of encoded // bytes. This class expects all encoded packets to be 9 bytes (matching one // CNG SID frame) or 0 bytes. This test depends on `input_frame_` containing @@ -420,8 +381,7 @@ TEST_F(AudioCodingModuleTestWithComfortNoiseOldApi, DoTest(k10MsBlocksPerPacket, kCngPayloadType); } -// A multi-threaded test for ACM. This base class is using the PCM16b 16 kHz -// codec, while the derive class AcmIsacMtTest is using iSAC. +// A multi-threaded test for ACM that uses the PCM16b 16 kHz codec. class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi { protected: static const int kNumPackets = 500; @@ -560,272 +520,6 @@ TEST_F(AudioCodingModuleMtTestOldApi, MAYBE_DoTest) { EXPECT_TRUE(RunTest()); } -// This is a multi-threaded ACM test using iSAC. The test encodes audio -// from a PCM file. The most recent encoded frame is used as input to the -// receiving part. Depending on timing, it may happen that the same RTP packet -// is inserted into the receiver multiple times, but this is a valid use-case, -// and simplifies the test code a lot. -class AcmIsacMtTestOldApi : public AudioCodingModuleMtTestOldApi { - protected: - static const int kNumPackets = 500; - static const int kNumPullCalls = 500; - - AcmIsacMtTestOldApi() - : AudioCodingModuleMtTestOldApi(), last_packet_number_(0) {} - - ~AcmIsacMtTestOldApi() {} - - void SetUp() override { - AudioCodingModuleTestOldApi::SetUp(); - RegisterCodec(); // Must be called before the threads start below. - - // Set up input audio source to read from specified file, loop after 5 - // seconds, and deliver blocks of 10 ms. - const std::string input_file_name = - webrtc::test::ResourcePath("audio_coding/speech_mono_16kHz", "pcm"); - audio_loop_.Init(input_file_name, 5 * kSampleRateHz, kNumSamples10ms); - - // Generate one packet to have something to insert. - int loop_counter = 0; - while (packet_cb_.last_payload_len_bytes() == 0) { - InsertAudio(); - ASSERT_LT(loop_counter++, 10); - } - // Set `last_packet_number_` to one less that `num_calls` so that the packet - // will be fetched in the next InsertPacket() call. - last_packet_number_ = packet_cb_.num_calls() - 1; - - StartThreads(); - } - - void RegisterCodec() override { - static_assert(kSampleRateHz == 16000, "test designed for iSAC 16 kHz"); - audio_format_ = SdpAudioFormat("isac", kSampleRateHz, 1); - pac_size_ = 480; - - // Register iSAC codec in ACM, effectively unregistering the PCM16B codec - // registered in AudioCodingModuleTestOldApi::SetUp(); - acm_->SetReceiveCodecs({{kPayloadType, *audio_format_}}); - acm_->SetEncoder(CreateBuiltinAudioEncoderFactory()->MakeAudioEncoder( - kPayloadType, *audio_format_, absl::nullopt)); - } - - void InsertPacket() override { - int num_calls = packet_cb_.num_calls(); // Store locally for thread safety. - if (num_calls > last_packet_number_) { - // Get the new payload out from the callback handler. - // Note that since we swap buffers here instead of directly inserting - // a pointer to the data in `packet_cb_`, we avoid locking the callback - // for the duration of the IncomingPacket() call. - packet_cb_.SwapBuffers(&last_payload_vec_); - ASSERT_GT(last_payload_vec_.size(), 0u); - rtp_utility_->Forward(&rtp_header_); - last_packet_number_ = num_calls; - } - ASSERT_GT(last_payload_vec_.size(), 0u); - ASSERT_EQ(0, acm_->IncomingPacket(&last_payload_vec_[0], - last_payload_vec_.size(), rtp_header_)); - } - - void InsertAudio() override { - // TODO(kwiberg): Use std::copy here. Might be complications because AFAICS - // this call confuses the number of samples with the number of bytes, and - // ends up copying only half of what it should. - memcpy(input_frame_.mutable_data(), audio_loop_.GetNextBlock().data(), - kNumSamples10ms); - AudioCodingModuleTestOldApi::InsertAudio(); - } - - // Override the verification function with no-op, since iSAC produces variable - // payload sizes. - void VerifyEncoding() override {} - - // This method is the same as AudioCodingModuleMtTestOldApi::TestDone(), but - // here it is using the constants defined in this class (i.e., shorter test - // run). - bool TestDone() override { - if (packet_cb_.num_calls() > kNumPackets) { - MutexLock lock(&mutex_); - if (pull_audio_count_ > kNumPullCalls) { - // Both conditions for completion are met. End the test. - return true; - } - } - return false; - } - - int last_packet_number_; - std::vector last_payload_vec_; - test::AudioLoop audio_loop_; -}; - -#if defined(WEBRTC_IOS) -#define MAYBE_DoTest DISABLED_DoTest -#else -#define MAYBE_DoTest DoTest -#endif -#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX) -TEST_F(AcmIsacMtTestOldApi, MAYBE_DoTest) { - EXPECT_TRUE(RunTest()); -} -#endif - -class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi { - protected: - static const int kRegisterAfterNumPackets = 5; - static const int kNumPackets = 10; - static const int kPacketSizeMs = 30; - static const int kPacketSizeSamples = kPacketSizeMs * 16; - - AcmReRegisterIsacMtTestOldApi() - : AudioCodingModuleTestOldApi(), - codec_registered_(false), - receive_packet_count_(0), - next_insert_packet_time_ms_(0), - fake_clock_(new SimulatedClock(0)) { - AudioEncoderIsacFloatImpl::Config config; - config.payload_type = kPayloadType; - isac_encoder_.reset(new AudioEncoderIsacFloatImpl(config)); - clock_ = fake_clock_.get(); - } - - void SetUp() override { - AudioCodingModuleTestOldApi::SetUp(); - // Set up input audio source to read from specified file, loop after 5 - // seconds, and deliver blocks of 10 ms. - const std::string input_file_name = - webrtc::test::ResourcePath("audio_coding/speech_mono_16kHz", "pcm"); - audio_loop_.Init(input_file_name, 5 * kSampleRateHz, kNumSamples10ms); - RegisterCodec(); // Must be called before the threads start below. - StartThreads(); - } - - void RegisterCodec() override { - // Register iSAC codec in ACM, effectively unregistering the PCM16B codec - // registered in AudioCodingModuleTestOldApi::SetUp(); - // Only register the decoder for now. The encoder is registered later. - static_assert(kSampleRateHz == 16000, "test designed for iSAC 16 kHz"); - acm_->SetReceiveCodecs({{kPayloadType, {"ISAC", kSampleRateHz, 1}}}); - } - - void StartThreads() { - quit_.store(false); - const auto attributes = - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime); - receive_thread_ = rtc::PlatformThread::SpawnJoinable( - [this] { - while (!quit_.load() && CbReceiveImpl()) { - } - }, - "receive", attributes); - codec_registration_thread_ = rtc::PlatformThread::SpawnJoinable( - [this] { - while (!quit_.load()) { - CbCodecRegistrationImpl(); - } - }, - "codec_registration", attributes); - } - - void TearDown() override { - AudioCodingModuleTestOldApi::TearDown(); - quit_.store(true); - receive_thread_.Finalize(); - codec_registration_thread_.Finalize(); - } - - bool RunTest() { return test_complete_.Wait(TimeDelta::Minutes(10)); } - - bool CbReceiveImpl() { - SleepMs(1); - rtc::Buffer encoded; - AudioEncoder::EncodedInfo info; - { - MutexLock lock(&mutex_); - if (clock_->TimeInMilliseconds() < next_insert_packet_time_ms_) { - return true; - } - next_insert_packet_time_ms_ += kPacketSizeMs; - ++receive_packet_count_; - - // Encode new frame. - uint32_t input_timestamp = rtp_header_.timestamp; - while (info.encoded_bytes == 0) { - info = isac_encoder_->Encode(input_timestamp, - audio_loop_.GetNextBlock(), &encoded); - input_timestamp += 160; // 10 ms at 16 kHz. - } - EXPECT_EQ(rtp_header_.timestamp + kPacketSizeSamples, input_timestamp); - EXPECT_EQ(rtp_header_.timestamp, info.encoded_timestamp); - EXPECT_EQ(rtp_header_.payloadType, info.payload_type); - } - // Now we're not holding the crit sect when calling ACM. - - // Insert into ACM. - EXPECT_EQ(0, acm_->IncomingPacket(encoded.data(), info.encoded_bytes, - rtp_header_)); - - // Pull audio. - for (int i = 0; i < rtc::CheckedDivExact(kPacketSizeMs, 10); ++i) { - AudioFrame audio_frame; - bool muted; - EXPECT_EQ(0, acm_->PlayoutData10Ms(-1 /* default output frequency */, - &audio_frame, &muted)); - if (muted) { - ADD_FAILURE(); - return false; - } - fake_clock_->AdvanceTimeMilliseconds(10); - } - rtp_utility_->Forward(&rtp_header_); - return true; - } - - void CbCodecRegistrationImpl() { - SleepMs(1); - if (HasFatalFailure()) { - // End the test early if a fatal failure (ASSERT_*) has occurred. - test_complete_.Set(); - } - MutexLock lock(&mutex_); - if (!codec_registered_ && - receive_packet_count_ > kRegisterAfterNumPackets) { - // Register the iSAC encoder. - acm_->SetEncoder(CreateBuiltinAudioEncoderFactory()->MakeAudioEncoder( - kPayloadType, *audio_format_, absl::nullopt)); - codec_registered_ = true; - } - if (codec_registered_ && receive_packet_count_ > kNumPackets) { - test_complete_.Set(); - } - } - - rtc::PlatformThread receive_thread_; - rtc::PlatformThread codec_registration_thread_; - // Used to force worker threads to stop looping. - std::atomic quit_; - - rtc::Event test_complete_; - Mutex mutex_; - bool codec_registered_ RTC_GUARDED_BY(mutex_); - int receive_packet_count_ RTC_GUARDED_BY(mutex_); - int64_t next_insert_packet_time_ms_ RTC_GUARDED_BY(mutex_); - std::unique_ptr isac_encoder_; - std::unique_ptr fake_clock_; - test::AudioLoop audio_loop_; -}; - -#if defined(WEBRTC_IOS) -#define MAYBE_DoTest DISABLED_DoTest -#else -#define MAYBE_DoTest DoTest -#endif -#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX) -TEST_F(AcmReRegisterIsacMtTestOldApi, MAYBE_DoTest) { - EXPECT_TRUE(RunTest()); -} -#endif - // Disabling all of these tests on iOS until file support has been added. // See https://code.google.com/p/webrtc/issues/detail?id=4752 for details. #if !defined(WEBRTC_IOS) @@ -1025,38 +719,6 @@ class AcmSenderBitExactnessOldApi : public ::testing::Test, class AcmSenderBitExactnessNewApi : public AcmSenderBitExactnessOldApi {}; -// Run bit exactness tests only for release builds. -#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) && \ - defined(NDEBUG) && defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64) -TEST_F(AcmSenderBitExactnessOldApi, IsacWb30ms) { - ASSERT_NO_FATAL_FAILURE(SetUpTest("ISAC", 16000, 1, 103, 480, 480)); - Run(/*audio_checksum_ref=*/"37ecdabad1698a857cf811e6d1fa91df", - /*payload_checksum_ref=*/"3c79f16f34218271f3dca4e2b1dfe1bb", - /*expected_packets=*/33, - /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); -} - -TEST_F(AcmSenderBitExactnessOldApi, IsacWb60ms) { - ASSERT_NO_FATAL_FAILURE(SetUpTest("ISAC", 16000, 1, 103, 960, 960)); - Run(/*audio_checksum_ref=*/"0e9078d23454901496a88362ba0740c3", - /*payload_checksum_ref=*/"9e0a0ab743ad987b55b8e14802769c56", - /*expected_packets=*/16, - /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); -} -#endif - -// Run bit exactness test only for release build. -#if defined(WEBRTC_CODEC_ISAC) && defined(NDEBUG) && defined(WEBRTC_LINUX) && \ - defined(WEBRTC_ARCH_X86_64) -TEST_F(AcmSenderBitExactnessOldApi, IsacSwb30ms) { - ASSERT_NO_FATAL_FAILURE(SetUpTest("ISAC", 32000, 1, 104, 960, 960)); - Run(/*audio_checksum_ref=*/"f4cf577f28a0dcbac33358b757518e0c", - /*payload_checksum_ref=*/"ce86106a93419aefb063097108ec94ab", - /*expected_packets=*/33, - /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); -} -#endif - TEST_F(AcmSenderBitExactnessOldApi, Pcm16_8000khz_10ms) { ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80)); Run(/*audio_checksum_ref=*/"69118ed438ac76252d023e0463819471", diff --git a/third_party/libwebrtc/modules/audio_coding/audio_coding.gni b/third_party/libwebrtc/modules/audio_coding/audio_coding.gni index 78460e642004..3b147091defe 100644 --- a/third_party/libwebrtc/modules/audio_coding/audio_coding.gni +++ b/third_party/libwebrtc/modules/audio_coding/audio_coding.gni @@ -20,11 +20,6 @@ if (rtc_opus_support_120ms_ptime) { } else { audio_codec_defines += [ "WEBRTC_OPUS_SUPPORT_120MS_PTIME=0" ] } -if (target_cpu == "arm") { - audio_codec_defines += [ "WEBRTC_CODEC_ISACFX" ] -} else { - audio_codec_defines += [ "WEBRTC_CODEC_ISAC" ] -} audio_coding_defines = audio_codec_defines neteq_defines = audio_codec_defines diff --git a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller_unittest.cc b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller_unittest.cc index 3155f198a43f..9c593b818b44 100644 --- a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller_unittest.cc +++ b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller_unittest.cc @@ -74,8 +74,6 @@ TEST(AnaBitrateControllerTest, OutputInitValueWhenOverheadUnknown) { } TEST(AnaBitrateControllerTest, ChangeBitrateOnTargetBitrateChanged) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); constexpr int kInitialFrameLengthMs = 20; BitrateController controller( BitrateController::Config(32000, kInitialFrameLengthMs, 0, 0)); @@ -98,8 +96,6 @@ TEST(AnaBitrateControllerTest, UpdateMultipleNetworkMetricsAtOnce) { // BitrateController::UpdateNetworkMetrics(...) can handle multiple // network updates at once. This is, however, not a common use case in current // audio_network_adaptor_impl.cc. - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); constexpr int kInitialFrameLengthMs = 20; BitrateController controller( BitrateController::Config(32000, kInitialFrameLengthMs, 0, 0)); @@ -116,8 +112,6 @@ TEST(AnaBitrateControllerTest, UpdateMultipleNetworkMetricsAtOnce) { } TEST(AnaBitrateControllerTest, TreatUnknownFrameLengthAsFrameLengthUnchanged) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); constexpr int kInitialFrameLengthMs = 20; BitrateController controller( BitrateController::Config(32000, kInitialFrameLengthMs, 0, 0)); @@ -131,8 +125,6 @@ TEST(AnaBitrateControllerTest, TreatUnknownFrameLengthAsFrameLengthUnchanged) { } TEST(AnaBitrateControllerTest, IncreaseBitrateOnFrameLengthIncreased) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); constexpr int kInitialFrameLengthMs = 20; BitrateController controller( BitrateController::Config(32000, kInitialFrameLengthMs, 0, 0)); @@ -155,8 +147,6 @@ TEST(AnaBitrateControllerTest, IncreaseBitrateOnFrameLengthIncreased) { } TEST(AnaBitrateControllerTest, DecreaseBitrateOnFrameLengthDecreased) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); constexpr int kInitialFrameLengthMs = 60; BitrateController controller( BitrateController::Config(32000, kInitialFrameLengthMs, 0, 0)); @@ -179,8 +169,6 @@ TEST(AnaBitrateControllerTest, DecreaseBitrateOnFrameLengthDecreased) { } TEST(AnaBitrateControllerTest, BitrateNeverBecomesNegative) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); BitrateController controller(BitrateController::Config(32000, 20, 0, 0)); constexpr size_t kOverheadBytesPerPacket = 64; constexpr int kFrameLengthMs = 60; @@ -192,8 +180,6 @@ TEST(AnaBitrateControllerTest, BitrateNeverBecomesNegative) { } TEST(AnaBitrateControllerTest, CheckBehaviorOnChangingCondition) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); BitrateController controller(BitrateController::Config(32000, 20, 0, 0)); // Start from an arbitrary overall bitrate. diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory_unittest.cc b/third_party/libwebrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory_unittest.cc index 109da78eea46..4a2b261a5997 100644 --- a/third_party/libwebrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory_unittest.cc +++ b/third_party/libwebrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory_unittest.cc @@ -75,31 +75,6 @@ TEST(AudioDecoderFactoryTest, CreateIlbc) { adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 16000, 1), absl::nullopt)); } -TEST(AudioDecoderFactoryTest, CreateIsac) { - rtc::scoped_refptr adf = - CreateBuiltinAudioDecoderFactory(); - ASSERT_TRUE(adf); - // iSAC supports 16 kHz, 1 channel. The float implementation additionally - // supports 32 kHz, 1 channel. - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 16000, 0), absl::nullopt)); - EXPECT_TRUE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 16000, 1), absl::nullopt)); - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 16000, 2), absl::nullopt)); - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 8000, 1), absl::nullopt)); - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 48000, 1), absl::nullopt)); -#ifdef WEBRTC_ARCH_ARM - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 32000, 1), absl::nullopt)); -#else - EXPECT_TRUE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 32000, 1), absl::nullopt)); -#endif -} - TEST(AudioDecoderFactoryTest, CreateL16) { rtc::scoped_refptr adf = CreateBuiltinAudioDecoderFactory(); @@ -125,9 +100,6 @@ TEST(AudioDecoderFactoryTest, MaxNrOfChannels) { #ifdef WEBRTC_CODEC_OPUS "opus", #endif -#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX) - "isac", -#endif #ifdef WEBRTC_CODEC_ILBC "ilbc", #endif diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h deleted file mode 100644 index aae708f295ed..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_H_ - -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_decoder.h" -#include "api/scoped_refptr.h" - -namespace webrtc { - -template -class AudioDecoderIsacT final : public AudioDecoder { - public: - struct Config { - bool IsOk() const; - int sample_rate_hz = 16000; - }; - explicit AudioDecoderIsacT(const Config& config); - virtual ~AudioDecoderIsacT() override; - - AudioDecoderIsacT(const AudioDecoderIsacT&) = delete; - AudioDecoderIsacT& operator=(const AudioDecoderIsacT&) = delete; - - bool HasDecodePlc() const override; - size_t DecodePlc(size_t num_frames, int16_t* decoded) override; - void Reset() override; - int ErrorCode() override; - int SampleRateHz() const override; - size_t Channels() const override; - int DecodeInternal(const uint8_t* encoded, - size_t encoded_len, - int sample_rate_hz, - int16_t* decoded, - SpeechType* speech_type) override; - - private: - typename T::instance_type* isac_state_; - int sample_rate_hz_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h deleted file mode 100644 index 9aa498866baa..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_IMPL_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_IMPL_H_ - -#include "modules/audio_coding/codecs/isac/audio_decoder_isac_t.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -template -bool AudioDecoderIsacT::Config::IsOk() const { - return (sample_rate_hz == 16000 || sample_rate_hz == 32000); -} - -template -AudioDecoderIsacT::AudioDecoderIsacT(const Config& config) - : sample_rate_hz_(config.sample_rate_hz) { - RTC_CHECK(config.IsOk()) << "Unsupported sample rate " - << config.sample_rate_hz; - RTC_CHECK_EQ(0, T::Create(&isac_state_)); - T::DecoderInit(isac_state_); - RTC_CHECK_EQ(0, T::SetDecSampRate(isac_state_, sample_rate_hz_)); -} - -template -AudioDecoderIsacT::~AudioDecoderIsacT() { - RTC_CHECK_EQ(0, T::Free(isac_state_)); -} - -template -int AudioDecoderIsacT::DecodeInternal(const uint8_t* encoded, - size_t encoded_len, - int sample_rate_hz, - int16_t* decoded, - SpeechType* speech_type) { - RTC_CHECK_EQ(sample_rate_hz_, sample_rate_hz); - int16_t temp_type = 1; // Default is speech. - int ret = - T::DecodeInternal(isac_state_, encoded, encoded_len, decoded, &temp_type); - *speech_type = ConvertSpeechType(temp_type); - return ret; -} - -template -bool AudioDecoderIsacT::HasDecodePlc() const { - return false; -} - -template -size_t AudioDecoderIsacT::DecodePlc(size_t num_frames, int16_t* decoded) { - return T::DecodePlc(isac_state_, decoded, num_frames); -} - -template -void AudioDecoderIsacT::Reset() { - T::DecoderInit(isac_state_); -} - -template -int AudioDecoderIsacT::ErrorCode() { - return T::GetErrorCode(isac_state_); -} - -template -int AudioDecoderIsacT::SampleRateHz() const { - return sample_rate_hz_; -} - -template -size_t AudioDecoderIsacT::Channels() const { - return 1; -} - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_IMPL_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h deleted file mode 100644 index c382ea076ee1..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_encoder.h" -#include "api/scoped_refptr.h" -#include "api/units/time_delta.h" -#include "system_wrappers/include/field_trial.h" - -namespace webrtc { - -template -class AudioEncoderIsacT final : public AudioEncoder { - public: - // Allowed combinations of sample rate, frame size, and bit rate are - // - 16000 Hz, 30 ms, 10000-32000 bps - // - 16000 Hz, 60 ms, 10000-32000 bps - // - 32000 Hz, 30 ms, 10000-56000 bps (if T has super-wideband support) - struct Config { - bool IsOk() const; - int payload_type = 103; - int sample_rate_hz = 16000; - int frame_size_ms = 30; - int bit_rate = kDefaultBitRate; // Limit on the short-term average bit - // rate, in bits/s. - int max_payload_size_bytes = -1; - int max_bit_rate = -1; - }; - - explicit AudioEncoderIsacT(const Config& config); - ~AudioEncoderIsacT() override; - - AudioEncoderIsacT(const AudioEncoderIsacT&) = delete; - AudioEncoderIsacT& operator=(const AudioEncoderIsacT&) = delete; - - int SampleRateHz() const override; - size_t NumChannels() const override; - size_t Num10MsFramesInNextPacket() const override; - size_t Max10MsFramesInAPacket() const override; - int GetTargetBitrate() const override; - void SetTargetBitrate(int target_bps) override; - void OnReceivedTargetAudioBitrate(int target_bps) override; - void OnReceivedUplinkBandwidth( - int target_audio_bitrate_bps, - absl::optional bwe_period_ms) override; - void OnReceivedUplinkAllocation(BitrateAllocationUpdate update) override; - void OnReceivedOverhead(size_t overhead_bytes_per_packet) override; - EncodedInfo EncodeImpl(uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) override; - void Reset() override; - absl::optional> GetFrameLengthRange() - const override; - - private: - // This value is taken from STREAM_SIZE_MAX_60 for iSAC float (60 ms) and - // STREAM_MAXW16_60MS for iSAC fix (60 ms). - static const size_t kSufficientEncodeBufferSizeBytes = 400; - - static constexpr int kDefaultBitRate = 32000; - static constexpr int kMinBitrateBps = 10000; - static constexpr int MaxBitrateBps(int sample_rate_hz) { - return sample_rate_hz == 32000 ? 56000 : 32000; - } - - void SetTargetBitrate(int target_bps, bool subtract_per_packet_overhead); - - // Recreate the iSAC encoder instance with the given settings, and save them. - void RecreateEncoderInstance(const Config& config); - - Config config_; - typename T::instance_type* isac_state_ = nullptr; - - // Have we accepted input but not yet emitted it in a packet? - bool packet_in_progress_ = false; - - // Timestamp of the first input of the currently in-progress packet. - uint32_t packet_timestamp_; - - // Timestamp of the previously encoded packet. - uint32_t last_encoded_timestamp_; - - // Cache the value of the "WebRTC-SendSideBwe-WithOverhead" field trial. - const bool send_side_bwe_with_overhead_ = - !field_trial::IsDisabled("WebRTC-SendSideBwe-WithOverhead"); - - // When we send a packet, expect this many bytes of headers to be added to it. - // Start out with a reasonable default that we can use until we receive a real - // value. - DataSize overhead_per_packet_ = DataSize::Bytes(28); -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h deleted file mode 100644 index 1bd27cf80d75..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_IMPL_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_IMPL_H_ - -#include "modules/audio_coding/codecs/isac/audio_encoder_isac_t.h" -#include "rtc_base/checks.h" -#include "rtc_base/numerics/safe_minmax.h" - -namespace webrtc { - -template -bool AudioEncoderIsacT::Config::IsOk() const { - if (max_bit_rate < 32000 && max_bit_rate != -1) - return false; - if (max_payload_size_bytes < 120 && max_payload_size_bytes != -1) - return false; - - switch (sample_rate_hz) { - case 16000: - if (max_bit_rate > 53400) - return false; - if (max_payload_size_bytes > 400) - return false; - return (frame_size_ms == 30 || frame_size_ms == 60) && - (bit_rate == 0 || (bit_rate >= 10000 && bit_rate <= 32000)); - case 32000: - if (max_bit_rate > 160000) - return false; - if (max_payload_size_bytes > 600) - return false; - return T::has_swb && - (frame_size_ms == 30 && - (bit_rate == 0 || (bit_rate >= 10000 && bit_rate <= 56000))); - default: - return false; - } -} - -template -AudioEncoderIsacT::AudioEncoderIsacT(const Config& config) { - RecreateEncoderInstance(config); -} - -template -AudioEncoderIsacT::~AudioEncoderIsacT() { - RTC_CHECK_EQ(0, T::Free(isac_state_)); -} - -template -int AudioEncoderIsacT::SampleRateHz() const { - return T::EncSampRate(isac_state_); -} - -template -size_t AudioEncoderIsacT::NumChannels() const { - return 1; -} - -template -size_t AudioEncoderIsacT::Num10MsFramesInNextPacket() const { - const int samples_in_next_packet = T::GetNewFrameLen(isac_state_); - return static_cast(rtc::CheckedDivExact( - samples_in_next_packet, rtc::CheckedDivExact(SampleRateHz(), 100))); -} - -template -size_t AudioEncoderIsacT::Max10MsFramesInAPacket() const { - return 6; // iSAC puts at most 60 ms in a packet. -} - -template -int AudioEncoderIsacT::GetTargetBitrate() const { - return config_.bit_rate == 0 ? kDefaultBitRate : config_.bit_rate; -} - -template -void AudioEncoderIsacT::SetTargetBitrate(int target_bps) { - // Set target bitrate directly without subtracting per-packet overhead, - // because that's what AudioEncoderOpus does. - SetTargetBitrate(target_bps, - /*subtract_per_packet_overhead=*/false); -} - -template -void AudioEncoderIsacT::OnReceivedTargetAudioBitrate(int target_bps) { - // Set target bitrate directly without subtracting per-packet overhead, - // because that's what AudioEncoderOpus does. - SetTargetBitrate(target_bps, - /*subtract_per_packet_overhead=*/false); -} - -template -void AudioEncoderIsacT::OnReceivedUplinkBandwidth( - int target_audio_bitrate_bps, - absl::optional /*bwe_period_ms*/) { - // Set target bitrate, subtracting the per-packet overhead if - // WebRTC-SendSideBwe-WithOverhead is enabled, because that's what - // AudioEncoderOpus does. - SetTargetBitrate( - target_audio_bitrate_bps, - /*subtract_per_packet_overhead=*/send_side_bwe_with_overhead_); -} - -template -void AudioEncoderIsacT::OnReceivedUplinkAllocation( - BitrateAllocationUpdate update) { - // Set target bitrate, subtracting the per-packet overhead if - // WebRTC-SendSideBwe-WithOverhead is enabled, because that's what - // AudioEncoderOpus does. - SetTargetBitrate( - update.target_bitrate.bps(), - /*subtract_per_packet_overhead=*/send_side_bwe_with_overhead_); -} - -template -void AudioEncoderIsacT::OnReceivedOverhead( - size_t overhead_bytes_per_packet) { - overhead_per_packet_ = DataSize::Bytes(overhead_bytes_per_packet); -} - -template -AudioEncoder::EncodedInfo AudioEncoderIsacT::EncodeImpl( - uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) { - if (!packet_in_progress_) { - // Starting a new packet; remember the timestamp for later. - packet_in_progress_ = true; - packet_timestamp_ = rtp_timestamp; - } - size_t encoded_bytes = encoded->AppendData( - kSufficientEncodeBufferSizeBytes, [&](rtc::ArrayView encoded) { - int r = T::Encode(isac_state_, audio.data(), encoded.data()); - - if (T::GetErrorCode(isac_state_) == 6450) { - // Isac is not able to effectively compress all types of signals. This - // is a limitation of the codec that cannot be easily fixed. - r = 0; - } - RTC_CHECK_GE(r, 0) << "Encode failed (error code " - << T::GetErrorCode(isac_state_) << ")"; - - return static_cast(r); - }); - - if (encoded_bytes == 0) - return EncodedInfo(); - - // Got enough input to produce a packet. Return the saved timestamp from - // the first chunk of input that went into the packet. - packet_in_progress_ = false; - EncodedInfo info; - info.encoded_bytes = encoded_bytes; - info.encoded_timestamp = packet_timestamp_; - info.payload_type = config_.payload_type; - info.encoder_type = CodecType::kIsac; - return info; -} - -template -void AudioEncoderIsacT::Reset() { - RecreateEncoderInstance(config_); -} - -template -absl::optional> -AudioEncoderIsacT::GetFrameLengthRange() const { - return {{TimeDelta::Millis(config_.frame_size_ms), - TimeDelta::Millis(config_.frame_size_ms)}}; -} - -template -void AudioEncoderIsacT::SetTargetBitrate(int target_bps, - bool subtract_per_packet_overhead) { - if (subtract_per_packet_overhead) { - const DataRate overhead_rate = - overhead_per_packet_ / TimeDelta::Millis(config_.frame_size_ms); - target_bps -= overhead_rate.bps(); - } - target_bps = rtc::SafeClamp(target_bps, kMinBitrateBps, - MaxBitrateBps(config_.sample_rate_hz)); - int result = T::Control(isac_state_, target_bps, config_.frame_size_ms); - RTC_DCHECK_EQ(result, 0); - config_.bit_rate = target_bps; -} - -template -void AudioEncoderIsacT::RecreateEncoderInstance(const Config& config) { - RTC_CHECK(config.IsOk()); - packet_in_progress_ = false; - if (isac_state_) - RTC_CHECK_EQ(0, T::Free(isac_state_)); - RTC_CHECK_EQ(0, T::Create(&isac_state_)); - RTC_CHECK_EQ(0, T::EncoderInit(isac_state_, /*coding_mode=*/1)); - RTC_CHECK_EQ(0, T::SetEncSampRate(isac_state_, config.sample_rate_hz)); - const int bit_rate = config.bit_rate == 0 ? kDefaultBitRate : config.bit_rate; - RTC_CHECK_EQ(0, T::Control(isac_state_, bit_rate, config.frame_size_ms)); - - if (config.max_payload_size_bytes != -1) - RTC_CHECK_EQ( - 0, T::SetMaxPayloadSize(isac_state_, config.max_payload_size_bytes)); - if (config.max_bit_rate != -1) - RTC_CHECK_EQ(0, T::SetMaxRate(isac_state_, config.max_bit_rate)); - - // Set the decoder sample rate even though we just use the encoder. This - // doesn't appear to be necessary to produce a valid encoding, but without it - // we get an encoding that isn't bit-for-bit identical with what a combined - // encoder+decoder object produces. - RTC_CHECK_EQ(0, T::SetDecSampRate(isac_state_, config.sample_rate_hz)); - - config_ = config; -} - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_IMPL_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/empty.cc b/third_party/libwebrtc/modules/audio_coding/codecs/isac/empty.cc deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h deleted file mode 100644 index 0b4eadd448f6..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_AUDIO_DECODER_ISACFIX_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_AUDIO_DECODER_ISACFIX_H_ - -#include "modules/audio_coding/codecs/isac/audio_decoder_isac_t.h" -#include "modules/audio_coding/codecs/isac/fix/source/isac_fix_type.h" - -namespace webrtc { - -using AudioDecoderIsacFixImpl = AudioDecoderIsacT; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_AUDIO_DECODER_ISACFIX_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h deleted file mode 100644 index f0cc03832840..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_AUDIO_ENCODER_ISACFIX_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_AUDIO_ENCODER_ISACFIX_H_ - -#include "modules/audio_coding/codecs/isac/audio_encoder_isac_t.h" -#include "modules/audio_coding/codecs/isac/fix/source/isac_fix_type.h" - -namespace webrtc { - -using AudioEncoderIsacFixImpl = AudioEncoderIsacT; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_AUDIO_ENCODER_ISACFIX_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/include/isacfix.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/include/isacfix.h deleted file mode 100644 index dcc7b0991d68..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/include/isacfix.h +++ /dev/null @@ -1,486 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_ISACFIX_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_ISACFIX_H_ - -#include - -#include "modules/audio_coding/codecs/isac/bandwidth_info.h" - -typedef struct { - void* dummy; -} ISACFIX_MainStruct; - -#if defined(__cplusplus) -extern "C" { -#endif - -/**************************************************************************** - * WebRtcIsacfix_Create(...) - * - * This function creates an ISAC instance, which will contain the state - * information for one coding/decoding channel. - * - * Input: - * - *ISAC_main_inst : a pointer to the coder instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_Create(ISACFIX_MainStruct** ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_Free(...) - * - * This function frees the ISAC instance created at the beginning. - * - * Input: - * - ISAC_main_inst : a ISAC instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_Free(ISACFIX_MainStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_EncoderInit(...) - * - * This function initializes an ISAC instance prior to the encoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - CodingMode : 0 - Bit rate and frame length are automatically - * adjusted to available bandwidth on - * transmission channel. - * 1 - User sets a frame length and a target bit - * rate which is taken as the maximum short-term - * average bit rate. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_EncoderInit(ISACFIX_MainStruct* ISAC_main_inst, - int16_t CodingMode); - -/**************************************************************************** - * WebRtcIsacfix_Encode(...) - * - * This function encodes 10ms frame(s) and inserts it into a package. - * Input speech length has to be 160 samples (10ms). The encoder buffers those - * 10ms frames until it reaches the chosen Framesize (480 or 960 samples - * corresponding to 30 or 60 ms frames), and then proceeds to the encoding. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - speechIn : input speech vector. - * - * Output: - * - encoded : the encoded data vector - * - * Return value : >0 - Length (in bytes) of coded data - * 0 - The buffer didn't reach the chosen framesize - * so it keeps buffering speech samples. - * -1 - Error - */ - -int WebRtcIsacfix_Encode(ISACFIX_MainStruct* ISAC_main_inst, - const int16_t* speechIn, - uint8_t* encoded); - -/**************************************************************************** - * WebRtcIsacfix_DecoderInit(...) - * - * This function initializes an ISAC instance prior to the decoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - */ - -void WebRtcIsacfix_DecoderInit(ISACFIX_MainStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_UpdateBwEstimate1(...) - * - * This function updates the estimate of the bandwidth. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - packet_size : size of the packet in bytes. - * - rtp_seq_number : the RTP number of the packet. - * - arr_ts : the arrival time of the packet (from NetEq) - * in samples. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t arr_ts); - -/**************************************************************************** - * WebRtcIsacfix_UpdateBwEstimate(...) - * - * This function updates the estimate of the bandwidth. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - packet_size : size of the packet in bytes. - * - rtp_seq_number : the RTP number of the packet. - * - send_ts : the send time of the packet from RTP header, - * in samples. - * - arr_ts : the arrival time of the packet (from NetEq) - * in samples. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts); - -/**************************************************************************** - * WebRtcIsacfix_Decode(...) - * - * This function decodes an ISAC frame. Output speech length - * will be a multiple of 480 samples: 480 or 960 samples, - * depending on the framesize (30 or 60 ms). - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s) - * - len : bytes in encoded vector - * - * Output: - * - decoded : The decoded vector - * - * Return value : >0 - number of samples in decoded vector - * -1 - Error - */ - -int WebRtcIsacfix_Decode(ISACFIX_MainStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType); - -/**************************************************************************** - * WebRtcIsacfix_DecodePlc(...) - * - * This function conducts PLC for ISAC frame(s) in wide-band (16kHz sampling). - * Output speech length will be "480*noOfLostFrames" samples - * that is equevalent of "30*noOfLostFrames" millisecond. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - noOfLostFrames : Number of PLC frames (480sample = 30ms) - * to produce - * NOTE! Maximum number is 2 (960 samples = 60ms) - * - * Output: - * - decoded : The decoded vector - * - * Return value : Number of samples in decoded PLC vector - */ - -size_t WebRtcIsacfix_DecodePlc(ISACFIX_MainStruct* ISAC_main_inst, - int16_t* decoded, - size_t noOfLostFrames); - -/**************************************************************************** - * WebRtcIsacfix_ReadFrameLen(...) - * - * This function returns the length of the frame represented in the packet. - * - * Input: - * - encoded : Encoded bitstream - * - encoded_len_bytes : Length of the bitstream in bytes. - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - */ - -int16_t WebRtcIsacfix_ReadFrameLen(const uint8_t* encoded, - size_t encoded_len_bytes, - size_t* frameLength); - -/**************************************************************************** - * WebRtcIsacfix_Control(...) - * - * This function sets the limit on the short-term average bit rate and the - * frame length. Should be used only in Instantaneous mode. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rate : limit on the short-term average bit rate, - * in bits/second (between 10000 and 32000) - * - framesize : number of milliseconds per frame (30 or 60) - * - * Return value : 0 - ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_Control(ISACFIX_MainStruct* ISAC_main_inst, - int16_t rate, - int framesize); - -void WebRtcIsacfix_SetInitialBweBottleneck(ISACFIX_MainStruct* ISAC_main_inst, - int bottleneck_bits_per_second); - -/**************************************************************************** - * WebRtcIsacfix_ControlBwe(...) - * - * This function sets the initial values of bottleneck and frame-size if - * iSAC is used in channel-adaptive mode. Through this API, users can - * enforce a frame-size for all values of bottleneck. Then iSAC will not - * automatically change the frame-size. - * - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rateBPS : initial value of bottleneck in bits/second - * 10000 <= rateBPS <= 32000 is accepted - * - frameSizeMs : number of milliseconds per frame (30 or 60) - * - enforceFrameSize : 1 to enforce the given frame-size through out - * the adaptation process, 0 to let iSAC change - * the frame-size if required. - * - * Return value : 0 - ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct* ISAC_main_inst, - int16_t rateBPS, - int frameSizeMs, - int16_t enforceFrameSize); - -/**************************************************************************** - * WebRtcIsacfix_version(...) - * - * This function returns the version number. - * - * Output: - * - version : Pointer to character string - * - */ - -void WebRtcIsacfix_version(char* version); - -/**************************************************************************** - * WebRtcIsacfix_GetErrorCode(...) - * - * This function can be used to check the error code of an iSAC instance. When - * a function returns -1 a error code will be set for that instance. The - * function below extract the code of the last error that occured in the - * specified instance. - * - * Input: - * - ISAC_main_inst : ISAC instance - * - * Return value : Error code - */ - -int16_t WebRtcIsacfix_GetErrorCode(ISACFIX_MainStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_GetUplinkBw(...) - * - * This function return iSAC send bitrate - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : <0 Error code - * else bitrate - */ - -int32_t WebRtcIsacfix_GetUplinkBw(ISACFIX_MainStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_SetMaxPayloadSize(...) - * - * This function sets a limit for the maximum payload size of iSAC. The same - * value is used both for 30 and 60 msec packets. - * The absolute max will be valid until next time the function is called. - * NOTE! This function may override the function WebRtcIsacfix_SetMaxRate() - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxPayloadBytes : maximum size of the payload in bytes - * valid values are between 100 and 400 bytes - * - * - * Return value : 0 if sucessful - * -1 if error happens - */ - -int16_t WebRtcIsacfix_SetMaxPayloadSize(ISACFIX_MainStruct* ISAC_main_inst, - int16_t maxPayloadBytes); - -/**************************************************************************** - * WebRtcIsacfix_SetMaxRate(...) - * - * This function sets the maximum rate which the codec may not exceed for a - * singel packet. The maximum rate is set in bits per second. - * The codec has an absolute maximum rate of 53400 bits per second (200 bytes - * per 30 msec). - * It is possible to set a maximum rate between 32000 and 53400 bits per second. - * - * The rate limit is valid until next time the function is called. - * - * NOTE! Packet size will never go above the value set if calling - * WebRtcIsacfix_SetMaxPayloadSize() (default max packet size is 400 bytes). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxRateInBytes : maximum rate in bits per second, - * valid values are 32000 to 53400 bits - * - * Return value : 0 if sucessful - * -1 if error happens - */ - -int16_t WebRtcIsacfix_SetMaxRate(ISACFIX_MainStruct* ISAC_main_inst, - int32_t maxRate); - -/**************************************************************************** - * WebRtcIsacfix_CreateInternal(...) - * - * This function creates the memory that is used to store data in the encoder - * - * Input: - * - *ISAC_main_inst : a pointer to the coder instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_CreateInternal(ISACFIX_MainStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_FreeInternal(...) - * - * This function frees the internal memory for storing encoder data. - * - * Input: - * - ISAC_main_inst : an ISAC instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_GetNewBitStream(...) - * - * This function returns encoded data, with the received bwe-index in the - * stream. It should always return a complete packet, i.e. only called once - * even for 60 msec frames - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - bweIndex : index of bandwidth estimate to put in new - * bitstream - scale : factor for rate change (0.4 ~=> half the - * rate, 1 no change). - * - * Output: - * - encoded : the encoded data vector - * - * Return value : >0 - Length (in bytes) of coded data - * -1 - Error - */ - -int16_t WebRtcIsacfix_GetNewBitStream(ISACFIX_MainStruct* ISAC_main_inst, - int16_t bweIndex, - float scale, - uint8_t* encoded); - -/**************************************************************************** - * WebRtcIsacfix_GetDownLinkBwIndex(...) - * - * This function returns index representing the Bandwidth estimate from - * other side to this side. - * - * Input: - * - ISAC_main_inst : iSAC struct - * - * Output: - * - rateIndex : Bandwidth estimate to transmit to other side. - * - */ - -int16_t WebRtcIsacfix_GetDownLinkBwIndex(ISACFIX_MainStruct* ISAC_main_inst, - int16_t* rateIndex); - -/**************************************************************************** - * WebRtcIsacfix_UpdateUplinkBw(...) - * - * This function takes an index representing the Bandwidth estimate from - * this side to other side and updates BWE. - * - * Input: - * - ISAC_main_inst : iSAC struct - * - rateIndex : Bandwidth estimate from other side. - * - */ - -int16_t WebRtcIsacfix_UpdateUplinkBw(ISACFIX_MainStruct* ISAC_main_inst, - int16_t rateIndex); - -/**************************************************************************** - * WebRtcIsacfix_ReadBwIndex(...) - * - * This function returns the index of the Bandwidth estimate from the bitstream. - * - * Input: - * - encoded : Encoded bitstream - * - encoded_len_bytes : Length of the bitstream in bytes. - * - * Output: - * - rateIndex : Bandwidth estimate in bitstream - * - */ - -int16_t WebRtcIsacfix_ReadBwIndex(const uint8_t* encoded, - size_t encoded_len_bytes, - int16_t* rateIndex); - -/**************************************************************************** - * WebRtcIsacfix_GetNewFrameLen(...) - * - * This function return the next frame length (in samples) of iSAC. - * - * Input: - * -ISAC_main_inst : iSAC instance - * - * Return value : frame lenght in samples - */ - -int16_t WebRtcIsacfix_GetNewFrameLen(ISACFIX_MainStruct* ISAC_main_inst); - -#if defined(__cplusplus) -} -#endif - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_ISACFIX_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines.c deleted file mode 100644 index eaeef50f0474..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines.c +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * arith_routins.c - * - * This C file contains a function for finalizing the bitstream - * after arithmetic coding. - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/arith_routins.h" - - -/**************************************************************************** - * WebRtcIsacfix_EncTerminate(...) - * - * Final call to the arithmetic coder for an encoder call. This function - * terminates and return byte stream. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - * Return value : number of bytes in the stream - */ -int16_t WebRtcIsacfix_EncTerminate(Bitstr_enc *streamData) -{ - uint16_t *streamPtr; - uint16_t negCarry; - - /* point to the right place in the stream buffer */ - streamPtr = streamData->stream + streamData->stream_index; - - /* find minimum length (determined by current interval width) */ - if ( streamData->W_upper > 0x01FFFFFF ) - { - streamData->streamval += 0x01000000; - - /* if result is less than the added value we must take care of the carry */ - if (streamData->streamval < 0x01000000) - { - /* propagate carry */ - if (streamData->full == 0) { - /* Add value to current value */ - negCarry = *streamPtr; - negCarry += 0x0100; - *streamPtr = negCarry; - - /* if value is too big, propagate carry to next byte, and so on */ - while (!(negCarry)) - { - negCarry = *--streamPtr; - negCarry++; - *streamPtr = negCarry; - } - } else { - /* propagate carry by adding one to the previous byte in the - * stream if that byte is 0xFFFF we need to propagate the carry - * furhter back in the stream */ - while ( !(++(*--streamPtr)) ); - } - - /* put pointer back to the old value */ - streamPtr = streamData->stream + streamData->stream_index; - } - /* write remaining data to bitstream, if "full == 0" first byte has data */ - if (streamData->full == 0) { - *streamPtr++ += (uint16_t)(streamData->streamval >> 24); - streamData->full = 1; - } else { - *streamPtr = (uint16_t)((streamData->streamval >> 24) << 8); - streamData->full = 0; - } - } - else - { - streamData->streamval += 0x00010000; - - /* if result is less than the added value we must take care of the carry */ - if (streamData->streamval < 0x00010000) - { - /* propagate carry */ - if (streamData->full == 0) { - /* Add value to current value */ - negCarry = *streamPtr; - negCarry += 0x0100; - *streamPtr = negCarry; - - /* if value to big, propagate carry to next byte, and so on */ - while (!(negCarry)) - { - negCarry = *--streamPtr; - negCarry++; - *streamPtr = negCarry; - } - } else { - /* Add carry to previous byte */ - while ( !(++(*--streamPtr)) ); - } - - /* put pointer back to the old value */ - streamPtr = streamData->stream + streamData->stream_index; - } - /* write remaining data (2 bytes) to bitstream */ - if (streamData->full) { - *streamPtr++ = (uint16_t)(streamData->streamval >> 16); - } else { - *streamPtr++ |= (uint16_t)(streamData->streamval >> 24); - *streamPtr = (uint16_t)(streamData->streamval >> 8) & 0xFF00; - } - } - - /* calculate stream length in bytes */ - return (((streamPtr - streamData->stream)<<1) + !(streamData->full)); -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c deleted file mode 100644 index cad3056b3766..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c +++ /dev/null @@ -1,401 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * arith_routinshist.c - * - * This C file contains arithmetic encoding and decoding. - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/arith_routins.h" - - -/**************************************************************************** - * WebRtcIsacfix_EncHistMulti(...) - * - * Encode the histogram interval - * - * Input: - * - streamData : in-/output struct containing bitstream - * - data : data vector - * - cdf : array of cdf arrays - * - lenData : data vector length - * - * Return value : 0 if ok - * <0 if error detected - */ -int WebRtcIsacfix_EncHistMulti(Bitstr_enc *streamData, - const int16_t *data, - const uint16_t *const *cdf, - const int16_t lenData) -{ - uint32_t W_lower; - uint32_t W_upper; - uint32_t W_upper_LSB; - uint32_t W_upper_MSB; - uint16_t *streamPtr; - uint16_t negCarry; - uint16_t *maxStreamPtr; - uint16_t *streamPtrCarry; - uint32_t cdfLo; - uint32_t cdfHi; - int k; - - - /* point to beginning of stream buffer - * and set maximum streamPtr value */ - streamPtr = streamData->stream + streamData->stream_index; - maxStreamPtr = streamData->stream + STREAM_MAXW16_60MS - 1; - - W_upper = streamData->W_upper; - - for (k = lenData; k > 0; k--) - { - /* fetch cdf_lower and cdf_upper from cdf tables */ - cdfLo = (uint32_t) *(*cdf + (uint32_t)*data); - cdfHi = (uint32_t) *(*cdf++ + (uint32_t)*data++ + 1); - - /* update interval */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - W_lower = WEBRTC_SPL_UMUL(W_upper_MSB, cdfLo); - W_lower += ((W_upper_LSB * cdfLo) >> 16); - W_upper = WEBRTC_SPL_UMUL(W_upper_MSB, cdfHi); - W_upper += ((W_upper_LSB * cdfHi) >> 16); - - /* shift interval such that it begins at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamData->streamval += W_lower; - - /* handle carry */ - if (streamData->streamval < W_lower) - { - /* propagate carry */ - streamPtrCarry = streamPtr; - if (streamData->full == 0) { - negCarry = *streamPtrCarry; - negCarry += 0x0100; - *streamPtrCarry = negCarry; - while (!(negCarry)) - { - negCarry = *--streamPtrCarry; - negCarry++; - *streamPtrCarry = negCarry; - } - } else { - while ( !(++(*--streamPtrCarry)) ); - } - } - - /* renormalize interval, store most significant byte of streamval and update streamval - * W_upper < 2^24 */ - while ( !(W_upper & 0xFF000000) ) - { - W_upper <<= 8; - if (streamData->full == 0) { - *streamPtr++ += (uint16_t)(streamData->streamval >> 24); - streamData->full = 1; - } else { - *streamPtr = (uint16_t)((streamData->streamval >> 24) << 8); - streamData->full = 0; - } - - if( streamPtr > maxStreamPtr ) { - return -ISAC_DISALLOWED_BITSTREAM_LENGTH; - } - streamData->streamval <<= 8; - } - } - - /* calculate new stream_index */ - streamData->stream_index = streamPtr - streamData->stream; - streamData->W_upper = W_upper; - - return 0; -} - - -/**************************************************************************** - * WebRtcIsacfix_DecHistBisectMulti(...) - * - * Function to decode more symbols from the arithmetic bytestream, using - * method of bisection cdf tables should be of size 2^k-1 (which corresponds - * to an alphabet size of 2^k-2) - * - * Input: - * - streamData : in-/output struct containing bitstream - * - cdf : array of cdf arrays - * - cdfSize : array of cdf table sizes+1 (power of two: 2^k) - * - lenData : data vector length - * - * Output: - * - data : data vector - * - * Return value : number of bytes in the stream - * <0 if error detected - */ -int16_t WebRtcIsacfix_DecHistBisectMulti(int16_t *data, - Bitstr_dec *streamData, - const uint16_t *const *cdf, - const uint16_t *cdfSize, - const int16_t lenData) -{ - uint32_t W_lower = 0; - uint32_t W_upper; - uint32_t W_tmp; - uint32_t W_upper_LSB; - uint32_t W_upper_MSB; - uint32_t streamval; - const uint16_t *streamPtr; - const uint16_t *cdfPtr; - int16_t sizeTmp; - int k; - - - streamPtr = streamData->stream + streamData->stream_index; - W_upper = streamData->W_upper; - - /* Error check: should not be possible in normal operation */ - if (W_upper == 0) { - return -2; - } - - /* first time decoder is called for this stream */ - if (streamData->stream_index == 0) - { - /* read first word from bytestream */ - streamval = (uint32_t)*streamPtr++ << 16; - streamval |= *streamPtr++; - } else { - streamval = streamData->streamval; - } - - for (k = lenData; k > 0; k--) - { - /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - - /* start halfway the cdf range */ - sizeTmp = *cdfSize++ / 2; - cdfPtr = *cdf + (sizeTmp - 1); - - /* method of bisection */ - for ( ;; ) - { - W_tmp = WEBRTC_SPL_UMUL_32_16(W_upper_MSB, *cdfPtr); - W_tmp += (W_upper_LSB * (*cdfPtr)) >> 16; - sizeTmp /= 2; - if (sizeTmp == 0) { - break; - } - - if (streamval > W_tmp) - { - W_lower = W_tmp; - cdfPtr += sizeTmp; - } else { - W_upper = W_tmp; - cdfPtr -= sizeTmp; - } - } - if (streamval > W_tmp) - { - W_lower = W_tmp; - *data++ = cdfPtr - *cdf++; - } else { - W_upper = W_tmp; - *data++ = cdfPtr - *cdf++ - 1; - } - - /* shift interval to start at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamval -= W_lower; - - /* renormalize interval and update streamval */ - /* W_upper < 2^24 */ - while ( !(W_upper & 0xFF000000) ) - { - /* read next byte from stream */ - if (streamData->full == 0) { - streamval = (streamval << 8) | (*streamPtr++ & 0x00FF); - streamData->full = 1; - } else { - streamval = (streamval << 8) | (*streamPtr >> 8); - streamData->full = 0; - } - W_upper <<= 8; - } - - - /* Error check: should not be possible in normal operation */ - if (W_upper == 0) { - return -2; - } - - } - - streamData->stream_index = streamPtr - streamData->stream; - streamData->W_upper = W_upper; - streamData->streamval = streamval; - - if ( W_upper > 0x01FFFFFF ) { - return (streamData->stream_index*2 - 3 + !streamData->full); - } else { - return (streamData->stream_index*2 - 2 + !streamData->full); - } -} - - -/**************************************************************************** - * WebRtcIsacfix_DecHistOneStepMulti(...) - * - * Function to decode more symbols from the arithmetic bytestream, taking - * single step up or down at a time. - * cdf tables can be of arbitrary size, but large tables may take a lot of - * iterations. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - cdf : array of cdf arrays - * - initIndex : vector of initial cdf table search entries - * - lenData : data vector length - * - * Output: - * - data : data vector - * - * Return value : number of bytes in original stream - * <0 if error detected - */ -int16_t WebRtcIsacfix_DecHistOneStepMulti(int16_t *data, - Bitstr_dec *streamData, - const uint16_t *const *cdf, - const uint16_t *initIndex, - const int16_t lenData) -{ - uint32_t W_lower; - uint32_t W_upper; - uint32_t W_tmp; - uint32_t W_upper_LSB; - uint32_t W_upper_MSB; - uint32_t streamval; - const uint16_t *streamPtr; - const uint16_t *cdfPtr; - int k; - - - streamPtr = streamData->stream + streamData->stream_index; - W_upper = streamData->W_upper; - /* Error check: Should not be possible in normal operation */ - if (W_upper == 0) { - return -2; - } - - /* Check if it is the first time decoder is called for this stream */ - if (streamData->stream_index == 0) - { - /* read first word from bytestream */ - streamval = (uint32_t)(*streamPtr++) << 16; - streamval |= *streamPtr++; - } else { - streamval = streamData->streamval; - } - - for (k = lenData; k > 0; k--) - { - /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = WEBRTC_SPL_RSHIFT_U32(W_upper, 16); - - /* start at the specified table entry */ - cdfPtr = *cdf + (*initIndex++); - W_tmp = WEBRTC_SPL_UMUL_32_16(W_upper_MSB, *cdfPtr); - W_tmp += (W_upper_LSB * (*cdfPtr)) >> 16; - - if (streamval > W_tmp) - { - for ( ;; ) - { - W_lower = W_tmp; - - /* range check */ - if (cdfPtr[0] == 65535) { - return -3; - } - - W_tmp = WEBRTC_SPL_UMUL_32_16(W_upper_MSB, *++cdfPtr); - W_tmp += (W_upper_LSB * (*cdfPtr)) >> 16; - - if (streamval <= W_tmp) { - break; - } - } - W_upper = W_tmp; - *data++ = cdfPtr - *cdf++ - 1; - } else { - for ( ;; ) - { - W_upper = W_tmp; - --cdfPtr; - - /* range check */ - if (cdfPtr < *cdf) { - return -3; - } - - W_tmp = WEBRTC_SPL_UMUL_32_16(W_upper_MSB, *cdfPtr); - W_tmp += (W_upper_LSB * (*cdfPtr)) >> 16; - - if (streamval > W_tmp) { - break; - } - } - W_lower = W_tmp; - *data++ = cdfPtr - *cdf++; - } - - /* shift interval to start at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamval -= W_lower; - - /* renormalize interval and update streamval */ - /* W_upper < 2^24 */ - while ( !(W_upper & 0xFF000000) ) - { - /* read next byte from stream */ - if (streamData->full == 0) { - streamval = (streamval << 8) | (*streamPtr++ & 0x00FF); - streamData->full = 1; - } else { - streamval = (streamval << 8) | (*streamPtr >> 8); - streamData->full = 0; - } - W_upper <<= 8; - } - } - - streamData->stream_index = streamPtr - streamData->stream; - streamData->W_upper = W_upper; - streamData->streamval = streamval; - - /* find number of bytes in original stream (determined by current interval width) */ - if ( W_upper > 0x01FFFFFF ) { - return (streamData->stream_index*2 - 3 + !streamData->full); - } else { - return (streamData->stream_index*2 - 2 + !streamData->full); - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c deleted file mode 100644 index 8e979604613a..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c +++ /dev/null @@ -1,413 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * arith_routinslogist.c - * - * This C file contains arithmetic encode and decode logistic - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/arith_routins.h" - -/* Tables for piecewise linear cdf functions: y = k*x */ - -/* x Points for function piecewise() in Q15 */ -static const int32_t kHistEdges[51] = { - -327680, -314573, -301466, -288359, -275252, -262144, -249037, -235930, -222823, -209716, - -196608, -183501, -170394, -157287, -144180, -131072, -117965, -104858, -91751, -78644, - -65536, -52429, -39322, -26215, -13108, 0, 13107, 26214, 39321, 52428, - 65536, 78643, 91750, 104857, 117964, 131072, 144179, 157286, 170393, 183500, - 196608, 209715, 222822, 235929, 249036, 262144, 275251, 288358, 301465, 314572, - 327680 -}; - - -/* k Points for function piecewise() in Q0 */ -static const uint16_t kCdfSlope[51] = { - 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, - 5, 5, 13, 23, 47, 87, 154, 315, 700, 1088, - 2471, 6064, 14221, 21463, 36634, 36924, 19750, 13270, 5806, 2312, - 1095, 660, 316, 145, 86, 41, 32, 5, 5, 5, - 5, 5, 5, 5, 5, 5, 5, 5, 5, 2, - 0 -}; - -/* y Points for function piecewise() in Q0 */ -static const uint16_t kCdfLogistic[51] = { - 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, - 20, 22, 24, 29, 38, 57, 92, 153, 279, 559, - 994, 1983, 4408, 10097, 18682, 33336, 48105, 56005, 61313, 63636, - 64560, 64998, 65262, 65389, 65447, 65481, 65497, 65510, 65512, 65514, - 65516, 65518, 65520, 65522, 65524, 65526, 65528, 65530, 65532, 65534, - 65535 -}; - - -/**************************************************************************** - * WebRtcIsacfix_Piecewise(...) - * - * Piecewise linear function - * - * Input: - * - xinQ15 : input value x in Q15 - * - * Return value : korresponding y-value in Q0 - */ - - -static __inline uint16_t WebRtcIsacfix_Piecewise(int32_t xinQ15) { - int32_t ind; - int32_t qtmp1; - uint16_t qtmp2; - - /* Find index for x-value */ - qtmp1 = WEBRTC_SPL_SAT(kHistEdges[50],xinQ15,kHistEdges[0]); - ind = WEBRTC_SPL_MUL(5, qtmp1 - kHistEdges[0]); - ind >>= 16; - - /* Calculate corresponding y-value ans return*/ - qtmp1 = qtmp1 - kHistEdges[ind]; - qtmp2 = (uint16_t)WEBRTC_SPL_RSHIFT_U32( - WEBRTC_SPL_UMUL_32_16(qtmp1,kCdfSlope[ind]), 15); - return (kCdfLogistic[ind] + qtmp2); -} - -/**************************************************************************** - * WebRtcIsacfix_EncLogisticMulti2(...) - * - * Arithmetic coding of spectrum. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - dataQ7 : data vector in Q7 - * - envQ8 : side info vector defining the width of the pdf - * in Q8 - * - lenData : data vector length - * - * Return value : 0 if ok, - * <0 otherwise. - */ -int WebRtcIsacfix_EncLogisticMulti2(Bitstr_enc *streamData, - int16_t *dataQ7, - const uint16_t *envQ8, - const int16_t lenData) -{ - uint32_t W_lower; - uint32_t W_upper; - uint16_t W_upper_LSB; - uint16_t W_upper_MSB; - uint16_t *streamPtr; - uint16_t *maxStreamPtr; - uint16_t *streamPtrCarry; - uint16_t negcarry; - uint32_t cdfLo; - uint32_t cdfHi; - int k; - - /* point to beginning of stream buffer - * and set maximum streamPtr value */ - streamPtr = streamData->stream + streamData->stream_index; - maxStreamPtr = streamData->stream + STREAM_MAXW16_60MS - 1; - W_upper = streamData->W_upper; - - for (k = 0; k < lenData; k++) - { - /* compute cdf_lower and cdf_upper by evaluating the - * WebRtcIsacfix_Piecewise linear cdf */ - cdfLo = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(*dataQ7 - 64, *envQ8)); - cdfHi = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(*dataQ7 + 64, *envQ8)); - - /* test and clip if probability gets too small */ - while ((cdfLo + 1) >= cdfHi) { - /* clip */ - if (*dataQ7 > 0) { - *dataQ7 -= 128; - cdfHi = cdfLo; - cdfLo = WebRtcIsacfix_Piecewise( - WEBRTC_SPL_MUL_16_U16(*dataQ7 - 64, *envQ8)); - } else { - *dataQ7 += 128; - cdfLo = cdfHi; - cdfHi = WebRtcIsacfix_Piecewise( - WEBRTC_SPL_MUL_16_U16(*dataQ7 + 64, *envQ8)); - } - } - - dataQ7++; - /* increment only once per 4 iterations */ - envQ8 += (k & 1) & (k >> 1); - - - /* update interval */ - W_upper_LSB = (uint16_t)W_upper; - W_upper_MSB = (uint16_t)WEBRTC_SPL_RSHIFT_U32(W_upper, 16); - W_lower = WEBRTC_SPL_UMUL_32_16(cdfLo, W_upper_MSB); - W_lower += (cdfLo * W_upper_LSB) >> 16; - W_upper = WEBRTC_SPL_UMUL_32_16(cdfHi, W_upper_MSB); - W_upper += (cdfHi * W_upper_LSB) >> 16; - - /* shift interval such that it begins at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamData->streamval += W_lower; - - /* handle carry */ - if (streamData->streamval < W_lower) - { - /* propagate carry */ - streamPtrCarry = streamPtr; - if (streamData->full == 0) { - negcarry = *streamPtrCarry; - negcarry += 0x0100; - *streamPtrCarry = negcarry; - while (!(negcarry)) - { - negcarry = *--streamPtrCarry; - negcarry++; - *streamPtrCarry = negcarry; - } - } else { - while (!(++(*--streamPtrCarry))); - } - } - - /* renormalize interval, store most significant byte of streamval and update streamval - * W_upper < 2^24 */ - while ( !(W_upper & 0xFF000000) ) - { - W_upper <<= 8; - if (streamData->full == 0) { - *streamPtr++ += (uint16_t) WEBRTC_SPL_RSHIFT_U32( - streamData->streamval, 24); - streamData->full = 1; - } else { - *streamPtr = (uint16_t)((streamData->streamval >> 24) << 8); - streamData->full = 0; - } - - if( streamPtr > maxStreamPtr ) - return -ISAC_DISALLOWED_BITSTREAM_LENGTH; - - streamData->streamval <<= 8; - } - } - - /* calculate new stream_index */ - streamData->stream_index = streamPtr - streamData->stream; - streamData->W_upper = W_upper; - - return 0; -} - - -/**************************************************************************** - * WebRtcIsacfix_DecLogisticMulti2(...) - * - * Arithmetic decoding of spectrum. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - envQ8 : side info vector defining the width of the pdf - * in Q8 - * - lenData : data vector length - * - * Input/Output: - * - dataQ7 : input: dither vector, output: data vector - * - * Return value : number of bytes in the stream so far - * -1 if error detected - */ -int WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7, - Bitstr_dec *streamData, - const int32_t *envQ8, - const int16_t lenData) -{ - uint32_t W_lower; - uint32_t W_upper; - uint32_t W_tmp; - uint16_t W_upper_LSB; - uint16_t W_upper_MSB; - uint32_t streamVal; - uint16_t cdfTmp; - int32_t res; - int32_t inSqrt; - int32_t newRes; - const uint16_t *streamPtr; - int16_t candQ7; - int16_t envCount; - uint16_t tmpARSpecQ8 = 0; - int k, i; - int offset = 0; - - /* point to beginning of stream buffer */ - streamPtr = streamData->stream + streamData->stream_index; - W_upper = streamData->W_upper; - - /* Check if it is first time decoder is called for this stream */ - if (streamData->stream_index == 0) - { - /* read first word from bytestream */ - streamVal = (uint32_t)(*streamPtr++) << 16; - streamVal |= *streamPtr++; - - } else { - streamVal = streamData->streamval; - } - - - res = 1 << (WebRtcSpl_GetSizeInBits(envQ8[0]) >> 1); - envCount = 0; - - /* code assumes lenData%4 == 0 */ - for (k = 0; k < lenData; k += 4) - { - int k4; - - /* convert to magnitude spectrum, by doing square-roots (modified from SPLIB) */ - inSqrt = envQ8[envCount]; - i = 10; - - /* For safty reasons */ - if (inSqrt < 0) - inSqrt=-inSqrt; - - newRes = (inSqrt / res + res) >> 1; - do - { - res = newRes; - newRes = (inSqrt / res + res) >> 1; - } while (newRes != res && i-- > 0); - - tmpARSpecQ8 = (uint16_t)newRes; - - for(k4 = 0; k4 < 4; k4++) - { - /* find the integer *data for which streamVal lies in [W_lower+1, W_upper] */ - W_upper_LSB = (uint16_t) (W_upper & 0x0000FFFF); - W_upper_MSB = (uint16_t) WEBRTC_SPL_RSHIFT_U32(W_upper, 16); - - /* find first candidate by inverting the logistic cdf - * Input dither value collected from io-stream */ - candQ7 = - *dataQ7 + 64; - cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8)); - - W_tmp = (uint32_t)cdfTmp * W_upper_MSB; - W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16; - - if (streamVal > W_tmp) - { - W_lower = W_tmp; - candQ7 += 128; - cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8)); - - W_tmp = (uint32_t)cdfTmp * W_upper_MSB; - W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16; - - while (streamVal > W_tmp) - { - W_lower = W_tmp; - candQ7 += 128; - cdfTmp = WebRtcIsacfix_Piecewise( - WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8)); - - W_tmp = (uint32_t)cdfTmp * W_upper_MSB; - W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16; - - /* error check */ - if (W_lower == W_tmp) { - return -1; - } - } - W_upper = W_tmp; - - /* Output value put in dataQ7: another sample decoded */ - *dataQ7 = candQ7 - 64; - } - else - { - W_upper = W_tmp; - candQ7 -= 128; - cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8)); - - W_tmp = (uint32_t)cdfTmp * W_upper_MSB; - W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16; - - while ( !(streamVal > W_tmp) ) - { - W_upper = W_tmp; - candQ7 -= 128; - cdfTmp = WebRtcIsacfix_Piecewise( - WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8)); - - W_tmp = (uint32_t)cdfTmp * W_upper_MSB; - W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16; - - /* error check */ - if (W_upper == W_tmp){ - return -1; - } - } - W_lower = W_tmp; - - /* Output value put in dataQ7: another sample decoded */ - *dataQ7 = candQ7 + 64; - } - - dataQ7++; - - /* shift interval to start at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamVal -= W_lower; - - /* renormalize interval and update streamVal - * W_upper < 2^24 */ - while ( !(W_upper & 0xFF000000) ) - { - if (streamPtr < streamData->stream + streamData->stream_size) { - /* read next byte from stream */ - if (streamData->full == 0) { - streamVal = (streamVal << 8) | (*streamPtr++ & 0x00FF); - streamData->full = 1; - } else { - streamVal = (streamVal << 8) | (*streamPtr >> 8); - streamData->full = 0; - } - } else { - /* Intending to read outside the stream. This can happen for the last - * two or three bytes. It is how the algorithm is implemented. Do - * not read from the bit stream and insert zeros instead. */ - streamVal <<= 8; - if (streamData->full == 0) { - offset++; // We would have incremented the pointer in this case. - streamData->full = 1; - } else { - streamData->full = 0; - } - } - W_upper <<= 8; - } - } - envCount++; - } - - streamData->stream_index = streamPtr + offset - streamData->stream; - streamData->W_upper = W_upper; - streamData->streamval = streamVal; - - /* find number of bytes in original stream (determined by current interval width) */ - if ( W_upper > 0x01FFFFFF ) - return (streamData->stream_index*2 - 3 + !streamData->full); - else - return (streamData->stream_index*2 - 2 + !streamData->full); -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h deleted file mode 100644 index d112bfe7f27a..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * arith_routins.h - * - * Functions for arithmetic coding. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ARITH_ROUTINS_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ARITH_ROUTINS_H_ - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -/**************************************************************************** - * WebRtcIsacfix_EncLogisticMulti2(...) - * - * Arithmetic coding of spectrum. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - dataQ7 : data vector in Q7 - * - envQ8 : side info vector defining the width of the pdf - * in Q8 - * - lenData : data vector length - * - * Return value : 0 if ok, - * <0 otherwise. - */ -int WebRtcIsacfix_EncLogisticMulti2(Bitstr_enc* streamData, - int16_t* dataQ7, - const uint16_t* env, - int16_t lenData); - -/**************************************************************************** - * WebRtcIsacfix_EncTerminate(...) - * - * Final call to the arithmetic coder for an encoder call. This function - * terminates and return byte stream. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - * Return value : number of bytes in the stream - */ -int16_t WebRtcIsacfix_EncTerminate(Bitstr_enc* streamData); - -/**************************************************************************** - * WebRtcIsacfix_DecLogisticMulti2(...) - * - * Arithmetic decoding of spectrum. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - envQ8 : side info vector defining the width of the pdf - * in Q8 - * - lenData : data vector length - * - * Input/Output: - * - dataQ7 : input: dither vector, output: data vector, in Q7 - * - * Return value : number of bytes in the stream so far - * <0 if error detected - */ -int WebRtcIsacfix_DecLogisticMulti2(int16_t* data, - Bitstr_dec* streamData, - const int32_t* env, - int16_t lenData); - -/**************************************************************************** - * WebRtcIsacfix_EncHistMulti(...) - * - * Encode the histogram interval - * - * Input: - * - streamData : in-/output struct containing bitstream - * - data : data vector - * - cdf : array of cdf arrays - * - lenData : data vector length - * - * Return value : 0 if ok - * <0 if error detected - */ -int WebRtcIsacfix_EncHistMulti(Bitstr_enc* streamData, - const int16_t* data, - const uint16_t* const* cdf, - int16_t lenData); - -/**************************************************************************** - * WebRtcIsacfix_DecHistBisectMulti(...) - * - * Function to decode more symbols from the arithmetic bytestream, using - * method of bisection. - * C df tables should be of size 2^k-1 (which corresponds to an - * alphabet size of 2^k-2) - * - * Input: - * - streamData : in-/output struct containing bitstream - * - cdf : array of cdf arrays - * - cdfSize : array of cdf table sizes+1 (power of two: 2^k) - * - lenData : data vector length - * - * Output: - * - data : data vector - * - * Return value : number of bytes in the stream - * <0 if error detected - */ -int16_t WebRtcIsacfix_DecHistBisectMulti(int16_t* data, - Bitstr_dec* streamData, - const uint16_t* const* cdf, - const uint16_t* cdfSize, - int16_t lenData); - -/**************************************************************************** - * WebRtcIsacfix_DecHistOneStepMulti(...) - * - * Function to decode more symbols from the arithmetic bytestream, taking - * single step up or down at a time. - * cdf tables can be of arbitrary size, but large tables may take a lot of - * iterations. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - cdf : array of cdf arrays - * - initIndex : vector of initial cdf table search entries - * - lenData : data vector length - * - * Output: - * - data : data vector - * - * Return value : number of bytes in original stream - * <0 if error detected - */ -int16_t WebRtcIsacfix_DecHistOneStepMulti(int16_t* data, - Bitstr_dec* streamData, - const uint16_t* const* cdf, - const uint16_t* initIndex, - int16_t lenData); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ARITH_ROUTINS_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c deleted file mode 100644 index 8845357d5913..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c +++ /dev/null @@ -1,1021 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * bandwidth_estimator.c - * - * This file contains the code for the Bandwidth Estimator designed - * for iSAC. - * - * NOTE! Castings needed for C55, do not remove! - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h" - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "rtc_base/checks.h" - -/* array of quantization levels for bottle neck info; Matlab code: */ -/* sprintf('%4.1ff, ', logspace(log10(5000), log10(40000), 12)) */ -static const int16_t kQRateTable[12] = { - 10000, 11115, 12355, 13733, 15265, 16967, - 18860, 20963, 23301, 25900, 28789, 32000 -}; - -/* 0.1 times the values in the table kQRateTable */ -/* values are in Q16 */ -static const int32_t KQRate01[12] = { - 65536000, 72843264, 80969728, 90000589, 100040704, 111194931, - 123600896, 137383117, 152705434, 169738240, 188671590, 209715200 -}; - -/* Bits per Bytes Seconds - * 8 bits/byte * 1000 msec/sec * 1/framelength (in msec)->bits/byte*sec - * frame length will either be 30 or 60 msec. 8738 is 1/60 in Q19 and 1/30 in Q18 - * The following number is either in Q15 or Q14 depending on the current frame length */ -static const int32_t kBitsByteSec = 4369000; - -/* Received header rate. First value is for 30 ms packets and second for 60 ms */ -static const int16_t kRecHeaderRate[2] = { - 9333, 4666 -}; - -/* Inverted minimum and maximum bandwidth in Q30. - minBwInv 30 ms, maxBwInv 30 ms, - minBwInv 60 ms, maxBwInv 69 ms -*/ -static const int32_t kInvBandwidth[4] = { - 55539, 25978, - 73213, 29284 -}; - -/* Number of samples in 25 msec */ -static const int32_t kSamplesIn25msec = 400; - - -/**************************************************************************** - * WebRtcIsacfix_InitBandwidthEstimator(...) - * - * This function initializes the struct for the bandwidth estimator - * - * Input/Output: - * - bweStr : Struct containing bandwidth information. - * - * Return value : 0 - */ -int32_t WebRtcIsacfix_InitBandwidthEstimator(BwEstimatorstr *bweStr) -{ - bweStr->prevFrameSizeMs = INIT_FRAME_LEN; - bweStr->prevRtpNumber = 0; - bweStr->prevSendTime = 0; - bweStr->prevArrivalTime = 0; - bweStr->prevRtpRate = 1; - bweStr->lastUpdate = 0; - bweStr->lastReduction = 0; - bweStr->countUpdates = -9; - - /* INIT_BN_EST = 20000 - * INIT_BN_EST_Q7 = 2560000 - * INIT_HDR_RATE = 4666 - * INIT_REC_BN_EST_Q5 = 789312 - * - * recBwInv = 1/(INIT_BN_EST + INIT_HDR_RATE) in Q30 - * recBwAvg = INIT_BN_EST + INIT_HDR_RATE in Q5 - */ - bweStr->recBwInv = 43531; - bweStr->recBw = INIT_BN_EST; - bweStr->recBwAvgQ = INIT_BN_EST_Q7; - bweStr->recBwAvg = INIT_REC_BN_EST_Q5; - bweStr->recJitter = (int32_t) 327680; /* 10 in Q15 */ - bweStr->recJitterShortTerm = 0; - bweStr->recJitterShortTermAbs = (int32_t) 40960; /* 5 in Q13 */ - bweStr->recMaxDelay = (int32_t) 10; - bweStr->recMaxDelayAvgQ = (int32_t) 5120; /* 10 in Q9 */ - bweStr->recHeaderRate = INIT_HDR_RATE; - bweStr->countRecPkts = 0; - bweStr->sendBwAvg = INIT_BN_EST_Q7; - bweStr->sendMaxDelayAvg = (int32_t) 5120; /* 10 in Q9 */ - - bweStr->countHighSpeedRec = 0; - bweStr->highSpeedRec = 0; - bweStr->countHighSpeedSent = 0; - bweStr->highSpeedSend = 0; - bweStr->inWaitPeriod = 0; - - /* Find the inverse of the max bw and min bw in Q30 - * (1 / (MAX_ISAC_BW + INIT_HDR_RATE) in Q30 - * (1 / (MIN_ISAC_BW + INIT_HDR_RATE) in Q30 - */ - bweStr->maxBwInv = kInvBandwidth[3]; - bweStr->minBwInv = kInvBandwidth[2]; - - bweStr->external_bw_info.in_use = 0; - - return 0; -} - -/**************************************************************************** - * WebRtcIsacfix_UpdateUplinkBwImpl(...) - * - * This function updates bottle neck rate received from other side in payload - * and calculates a new bottle neck to send to the other side. - * - * Input/Output: - * - bweStr : struct containing bandwidth information. - * - rtpNumber : value from RTP packet, from NetEq - * - frameSize : length of signal frame in ms, from iSAC decoder - * - sendTime : value in RTP header giving send time in samples - * - arrivalTime : value given by timeGetTime() time of arrival in - * samples of packet from NetEq - * - pksize : size of packet in bytes, from NetEq - * - Index : integer (range 0...23) indicating bottle neck & - * jitter as estimated by other side - * - * Return value : 0 if everything went fine, - * -1 otherwise - */ -int32_t WebRtcIsacfix_UpdateUplinkBwImpl(BwEstimatorstr *bweStr, - const uint16_t rtpNumber, - const int16_t frameSize, - const uint32_t sendTime, - const uint32_t arrivalTime, - const size_t pksize, - const uint16_t Index) -{ - uint16_t weight = 0; - uint32_t currBwInv = 0; - uint16_t recRtpRate; - uint32_t arrTimeProj; - int32_t arrTimeDiff; - int32_t arrTimeNoise; - int32_t arrTimeNoiseAbs; - int32_t sendTimeDiff; - - int32_t delayCorrFactor = DELAY_CORRECTION_MED; - int32_t lateDiff = 0; - int16_t immediateSet = 0; - int32_t frameSizeSampl; - - int32_t temp; - int32_t msec; - uint32_t exponent; - uint32_t reductionFactor; - uint32_t numBytesInv; - int32_t sign; - - uint32_t byteSecondsPerBit; - uint32_t tempLower; - uint32_t tempUpper; - int32_t recBwAvgInv; - int32_t numPktsExpected; - - int16_t errCode; - - RTC_DCHECK(!bweStr->external_bw_info.in_use); - - /* UPDATE ESTIMATES FROM OTHER SIDE */ - - /* The function also checks if Index has a valid value */ - errCode = WebRtcIsacfix_UpdateUplinkBwRec(bweStr, Index); - if (errCode <0) { - return(errCode); - } - - - /* UPDATE ESTIMATES ON THIS SIDE */ - - /* Bits per second per byte * 1/30 or 1/60 */ - if (frameSize == 60) { - /* If frameSize changed since last call, from 30 to 60, recalculate some values */ - if ( (frameSize != bweStr->prevFrameSizeMs) && (bweStr->countUpdates > 0)) { - bweStr->countUpdates = 10; - bweStr->recHeaderRate = kRecHeaderRate[1]; - - bweStr->maxBwInv = kInvBandwidth[3]; - bweStr->minBwInv = kInvBandwidth[2]; - bweStr->recBwInv = 1073741824 / (bweStr->recBw + bweStr->recHeaderRate); - } - - /* kBitsByteSec is in Q15 */ - recRtpRate = (int16_t)((kBitsByteSec * pksize) >> 15) + - bweStr->recHeaderRate; - - } else { - /* If frameSize changed since last call, from 60 to 30, recalculate some values */ - if ( (frameSize != bweStr->prevFrameSizeMs) && (bweStr->countUpdates > 0)) { - bweStr->countUpdates = 10; - bweStr->recHeaderRate = kRecHeaderRate[0]; - - bweStr->maxBwInv = kInvBandwidth[1]; - bweStr->minBwInv = kInvBandwidth[0]; - bweStr->recBwInv = 1073741824 / (bweStr->recBw + bweStr->recHeaderRate); - } - - /* kBitsByteSec is in Q14 */ - recRtpRate = (uint16_t)((kBitsByteSec * pksize) >> 14) + - bweStr->recHeaderRate; - } - - - /* Check for timer wrap-around */ - if (arrivalTime < bweStr->prevArrivalTime) { - bweStr->prevArrivalTime = arrivalTime; - bweStr->lastUpdate = arrivalTime; - bweStr->lastReduction = arrivalTime + FS3; - - bweStr->countRecPkts = 0; - - /* store frame size */ - bweStr->prevFrameSizeMs = frameSize; - - /* store far-side transmission rate */ - bweStr->prevRtpRate = recRtpRate; - - /* store far-side RTP time stamp */ - bweStr->prevRtpNumber = rtpNumber; - - return 0; - } - - bweStr->countRecPkts++; - - /* Calculate framesize in msec */ - frameSizeSampl = SAMPLES_PER_MSEC * frameSize; - - /* Check that it's not one of the first 9 packets */ - if ( bweStr->countUpdates > 0 ) { - - /* Stay in Wait Period for 1.5 seconds (no updates in wait period) */ - if(bweStr->inWaitPeriod) { - if ((arrivalTime - bweStr->startWaitPeriod)> FS_1_HALF) { - bweStr->inWaitPeriod = 0; - } - } - - /* If not been updated for a long time, reduce the BN estimate */ - - /* Check send time difference between this packet and previous received */ - sendTimeDiff = sendTime - bweStr->prevSendTime; - if (sendTimeDiff <= frameSizeSampl * 2) { - - /* Only update if 3 seconds has past since last update */ - if ((arrivalTime - bweStr->lastUpdate) > FS3) { - - /* Calculate expected number of received packets since last update */ - numPktsExpected = (arrivalTime - bweStr->lastUpdate) / frameSizeSampl; - - /* If received number of packets is more than 90% of expected (922 = 0.9 in Q10): */ - /* do the update, else not */ - if ((int32_t)bweStr->countRecPkts << 10 > 922 * numPktsExpected) { - /* Q4 chosen to approx dividing by 16 */ - msec = (arrivalTime - bweStr->lastReduction); - - /* the number below represents 13 seconds, highly unlikely - but to insure no overflow when reduction factor is multiplied by recBw inverse */ - if (msec > 208000) { - msec = 208000; - } - - /* Q20 2^(negative number: - 76/1048576) = .99995 - product is Q24 */ - exponent = WEBRTC_SPL_UMUL(0x0000004C, msec); - - /* do the approx with positive exponent so that value is actually rf^-1 - and multiply by bw inverse */ - reductionFactor = WEBRTC_SPL_RSHIFT_U32(0x01000000 | (exponent & 0x00FFFFFF), - WEBRTC_SPL_RSHIFT_U32(exponent, 24)); - - /* reductionFactor in Q13 */ - reductionFactor = WEBRTC_SPL_RSHIFT_U32(reductionFactor, 11); - - if ( reductionFactor != 0 ) { - bweStr->recBwInv = WEBRTC_SPL_MUL((int32_t)bweStr->recBwInv, (int32_t)reductionFactor); - bweStr->recBwInv = (int32_t)bweStr->recBwInv >> 13; - - } else { - static const uint32_t kInitRate = INIT_BN_EST + INIT_HDR_RATE; - /* recBwInv = 1 / kInitRate in Q26 (Q30??)*/ - bweStr->recBwInv = (1073741824 + kInitRate / 2) / kInitRate; - } - - /* reset time-since-update counter */ - bweStr->lastReduction = arrivalTime; - } else { - /* Delay last reduction with 3 seconds */ - bweStr->lastReduction = arrivalTime + FS3; - bweStr->lastUpdate = arrivalTime; - bweStr->countRecPkts = 0; - } - } - } else { - bweStr->lastReduction = arrivalTime + FS3; - bweStr->lastUpdate = arrivalTime; - bweStr->countRecPkts = 0; - } - - - /* update only if previous packet was not lost */ - if ( rtpNumber == bweStr->prevRtpNumber + 1 ) { - arrTimeDiff = arrivalTime - bweStr->prevArrivalTime; - - if (!(bweStr->highSpeedSend && bweStr->highSpeedRec)) { - if (arrTimeDiff > frameSizeSampl) { - if (sendTimeDiff > 0) { - lateDiff = arrTimeDiff - sendTimeDiff - frameSizeSampl * 2; - } else { - lateDiff = arrTimeDiff - frameSizeSampl; - } - - /* 8000 is 1/2 second (in samples at FS) */ - if (lateDiff > 8000) { - delayCorrFactor = (int32_t) DELAY_CORRECTION_MAX; - bweStr->inWaitPeriod = 1; - bweStr->startWaitPeriod = arrivalTime; - immediateSet = 1; - } else if (lateDiff > 5120) { - delayCorrFactor = (int32_t) DELAY_CORRECTION_MED; - immediateSet = 1; - bweStr->inWaitPeriod = 1; - bweStr->startWaitPeriod = arrivalTime; - } - } - } - - if ((bweStr->prevRtpRate > (int32_t)bweStr->recBwAvg >> 5) && - (recRtpRate > (int32_t)bweStr->recBwAvg >> 5) && - !bweStr->inWaitPeriod) { - - /* test if still in initiation period and increment counter */ - if (bweStr->countUpdates++ > 99) { - /* constant weight after initiation part, 0.01 in Q13 */ - weight = (uint16_t) 82; - } else { - /* weight decreases with number of updates, 1/countUpdates in Q13 */ - weight = (uint16_t) WebRtcSpl_DivW32W16( - 8192 + (bweStr->countUpdates >> 1), - (int16_t)bweStr->countUpdates); - } - - /* Bottle Neck Estimation */ - - /* limit outliers, if more than 25 ms too much */ - if (arrTimeDiff > frameSizeSampl + kSamplesIn25msec) { - arrTimeDiff = frameSizeSampl + kSamplesIn25msec; - } - - /* don't allow it to be less than frame rate - 10 ms */ - if (arrTimeDiff < frameSizeSampl - FRAMESAMPLES_10ms) { - arrTimeDiff = frameSizeSampl - FRAMESAMPLES_10ms; - } - - /* compute inverse receiving rate for last packet, in Q19 */ - numBytesInv = (uint16_t) WebRtcSpl_DivW32W16( - (int32_t)(524288 + ((pksize + HEADER_SIZE) >> 1)), - (int16_t)(pksize + HEADER_SIZE)); - - /* 8389 is ~ 1/128000 in Q30 */ - byteSecondsPerBit = (uint32_t)(arrTimeDiff * 8389); - - /* get upper N bits */ - tempUpper = WEBRTC_SPL_RSHIFT_U32(byteSecondsPerBit, 15); - - /* get lower 15 bits */ - tempLower = byteSecondsPerBit & 0x00007FFF; - - tempUpper = WEBRTC_SPL_MUL(tempUpper, numBytesInv); - tempLower = WEBRTC_SPL_MUL(tempLower, numBytesInv); - tempLower = WEBRTC_SPL_RSHIFT_U32(tempLower, 15); - - currBwInv = tempUpper + tempLower; - currBwInv = WEBRTC_SPL_RSHIFT_U32(currBwInv, 4); - - /* Limit inv rate. Note that minBwInv > maxBwInv! */ - if(currBwInv < bweStr->maxBwInv) { - currBwInv = bweStr->maxBwInv; - } else if(currBwInv > bweStr->minBwInv) { - currBwInv = bweStr->minBwInv; - } - - /* update bottle neck rate estimate */ - bweStr->recBwInv = WEBRTC_SPL_UMUL(weight, currBwInv) + - WEBRTC_SPL_UMUL((uint32_t) 8192 - weight, bweStr->recBwInv); - - /* Shift back to Q30 from Q40 (actual used bits shouldn't be more than 27 based on minBwInv) - up to 30 bits used with Q13 weight */ - bweStr->recBwInv = WEBRTC_SPL_RSHIFT_U32(bweStr->recBwInv, 13); - - /* reset time-since-update counter */ - bweStr->lastUpdate = arrivalTime; - bweStr->lastReduction = arrivalTime + FS3; - bweStr->countRecPkts = 0; - - /* to save resolution compute the inverse of recBwAvg in Q26 by left shifting numerator to 2^31 - and NOT right shifting recBwAvg 5 bits to an integer - At max 13 bits are used - shift to Q5 */ - recBwAvgInv = (0x80000000 + bweStr->recBwAvg / 2) / bweStr->recBwAvg; - - /* Calculate Projected arrival time difference */ - - /* The numerator of the quotient can be 22 bits so right shift inv by 4 to avoid overflow - result in Q22 */ - arrTimeProj = WEBRTC_SPL_MUL((int32_t)8000, recBwAvgInv); - /* shift to Q22 */ - arrTimeProj = WEBRTC_SPL_RSHIFT_U32(arrTimeProj, 4); - /* complete calulation */ - arrTimeProj = WEBRTC_SPL_MUL(((int32_t)pksize + HEADER_SIZE), arrTimeProj); - /* shift to Q10 */ - arrTimeProj = WEBRTC_SPL_RSHIFT_U32(arrTimeProj, 12); - - /* difference between projected and actual arrival time differences */ - /* Q9 (only shift arrTimeDiff by 5 to simulate divide by 16 (need to revisit if change sampling rate) DH */ - if ((arrTimeDiff << 6) > (int32_t)arrTimeProj) { - arrTimeNoise = (arrTimeDiff << 6) - arrTimeProj; - sign = 1; - } else { - arrTimeNoise = arrTimeProj - (arrTimeDiff << 6); - sign = -1; - } - - /* Q9 */ - arrTimeNoiseAbs = arrTimeNoise; - - /* long term averaged absolute jitter, Q15 */ - weight >>= 3; - bweStr->recJitter = weight * (arrTimeNoiseAbs << 5) + - (1024 - weight) * bweStr->recJitter; - - /* remove the fractional portion */ - bweStr->recJitter >>= 10; - - /* Maximum jitter is 10 msec in Q15 */ - if (bweStr->recJitter > (int32_t)327680) { - bweStr->recJitter = (int32_t)327680; - } - - /* short term averaged absolute jitter */ - /* Calculation in Q13 products in Q23 */ - bweStr->recJitterShortTermAbs = 51 * (arrTimeNoiseAbs << 3) + - WEBRTC_SPL_MUL(973, bweStr->recJitterShortTermAbs); - bweStr->recJitterShortTermAbs >>= 10; - - /* short term averaged jitter */ - /* Calculation in Q13 products in Q23 */ - bweStr->recJitterShortTerm = 205 * (arrTimeNoise << 3) * sign + - WEBRTC_SPL_MUL(3891, bweStr->recJitterShortTerm); - - if (bweStr->recJitterShortTerm < 0) { - temp = -bweStr->recJitterShortTerm; - temp >>= 12; - bweStr->recJitterShortTerm = -temp; - } else { - bweStr->recJitterShortTerm >>= 12; - } - } - } - } else { - /* reset time-since-update counter when receiving the first 9 packets */ - bweStr->lastUpdate = arrivalTime; - bweStr->lastReduction = arrivalTime + FS3; - bweStr->countRecPkts = 0; - bweStr->countUpdates++; - } - - /* Limit to minimum or maximum bottle neck rate (in Q30) */ - if (bweStr->recBwInv > bweStr->minBwInv) { - bweStr->recBwInv = bweStr->minBwInv; - } else if (bweStr->recBwInv < bweStr->maxBwInv) { - bweStr->recBwInv = bweStr->maxBwInv; - } - - - /* store frame length */ - bweStr->prevFrameSizeMs = frameSize; - - /* store far-side transmission rate */ - bweStr->prevRtpRate = recRtpRate; - - /* store far-side RTP time stamp */ - bweStr->prevRtpNumber = rtpNumber; - - /* Replace bweStr->recMaxDelay by the new value (atomic operation) */ - if (bweStr->prevArrivalTime != 0xffffffff) { - bweStr->recMaxDelay = WEBRTC_SPL_MUL(3, bweStr->recJitter); - } - - /* store arrival time stamp */ - bweStr->prevArrivalTime = arrivalTime; - bweStr->prevSendTime = sendTime; - - /* Replace bweStr->recBw by the new value */ - bweStr->recBw = 1073741824 / bweStr->recBwInv - bweStr->recHeaderRate; - - if (immediateSet) { - /* delay correction factor is in Q10 */ - bweStr->recBw = WEBRTC_SPL_UMUL(delayCorrFactor, bweStr->recBw); - bweStr->recBw = WEBRTC_SPL_RSHIFT_U32(bweStr->recBw, 10); - - if (bweStr->recBw < (int32_t) MIN_ISAC_BW) { - bweStr->recBw = (int32_t) MIN_ISAC_BW; - } - - bweStr->recBwAvg = (bweStr->recBw + bweStr->recHeaderRate) << 5; - - bweStr->recBwAvgQ = bweStr->recBw << 7; - - bweStr->recJitterShortTerm = 0; - - bweStr->recBwInv = 1073741824 / (bweStr->recBw + bweStr->recHeaderRate); - - immediateSet = 0; - } - - - return 0; -} - -/* This function updates the send bottle neck rate */ -/* Index - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */ -/* returns 0 if everything went fine, -1 otherwise */ -int16_t WebRtcIsacfix_UpdateUplinkBwRec(BwEstimatorstr *bweStr, - const int16_t Index) -{ - uint16_t RateInd; - - RTC_DCHECK(!bweStr->external_bw_info.in_use); - - if ( (Index < 0) || (Index > 23) ) { - return -ISAC_RANGE_ERROR_BW_ESTIMATOR; - } - - /* UPDATE ESTIMATES FROM OTHER SIDE */ - - if ( Index > 11 ) { - RateInd = Index - 12; - /* compute the jitter estimate as decoded on the other side in Q9 */ - /* sendMaxDelayAvg = 0.9 * sendMaxDelayAvg + 0.1 * MAX_ISAC_MD */ - bweStr->sendMaxDelayAvg = WEBRTC_SPL_MUL(461, bweStr->sendMaxDelayAvg) + - 51 * (MAX_ISAC_MD << 9); - bweStr->sendMaxDelayAvg >>= 9; - - } else { - RateInd = Index; - /* compute the jitter estimate as decoded on the other side in Q9 */ - /* sendMaxDelayAvg = 0.9 * sendMaxDelayAvg + 0.1 * MIN_ISAC_MD */ - bweStr->sendMaxDelayAvg = WEBRTC_SPL_MUL(461, bweStr->sendMaxDelayAvg) + - 51 * (MIN_ISAC_MD << 9); - bweStr->sendMaxDelayAvg >>= 9; - - } - - - /* compute the BN estimate as decoded on the other side */ - /* sendBwAvg = 0.9 * sendBwAvg + 0.1 * kQRateTable[RateInd]; */ - bweStr->sendBwAvg = 461 * bweStr->sendBwAvg + - 51 * ((uint32_t)kQRateTable[RateInd] << 7); - bweStr->sendBwAvg = WEBRTC_SPL_RSHIFT_U32(bweStr->sendBwAvg, 9); - - - if (WEBRTC_SPL_RSHIFT_U32(bweStr->sendBwAvg, 7) > 28000 && !bweStr->highSpeedSend) { - bweStr->countHighSpeedSent++; - - /* approx 2 seconds with 30ms frames */ - if (bweStr->countHighSpeedSent >= 66) { - bweStr->highSpeedSend = 1; - } - } else if (!bweStr->highSpeedSend) { - bweStr->countHighSpeedSent = 0; - } - - return 0; -} - -/**************************************************************************** - * WebRtcIsacfix_GetDownlinkBwIndexImpl(...) - * - * This function calculates and returns the bandwidth/jitter estimation code - * (integer 0...23) to put in the sending iSAC payload. - * - * Input: - * - bweStr : BWE struct - * - * Return: - * bandwith and jitter index (0..23) - */ -uint16_t WebRtcIsacfix_GetDownlinkBwIndexImpl(BwEstimatorstr *bweStr) -{ - int32_t rate; - int32_t maxDelay; - uint16_t rateInd; - uint16_t maxDelayBit; - int32_t tempTerm1; - int32_t tempTerm2; - int32_t tempTermX; - int32_t tempTermY; - int32_t tempMin; - int32_t tempMax; - - if (bweStr->external_bw_info.in_use) - return bweStr->external_bw_info.bottleneck_idx; - - /* Get Rate Index */ - - /* Get unquantized rate. Always returns 10000 <= rate <= 32000 */ - rate = WebRtcIsacfix_GetDownlinkBandwidth(bweStr); - - /* Compute the averaged BN estimate on this side */ - - /* recBwAvg = 0.9 * recBwAvg + 0.1 * (rate + bweStr->recHeaderRate), 0.9 and 0.1 in Q9 */ - bweStr->recBwAvg = 922 * bweStr->recBwAvg + - 102 * (((uint32_t)rate + bweStr->recHeaderRate) << 5); - bweStr->recBwAvg = WEBRTC_SPL_RSHIFT_U32(bweStr->recBwAvg, 10); - - /* Find quantization index that gives the closest rate after averaging. - * Note that we don't need to check the last value, rate <= kQRateTable[11], - * because we will use rateInd = 11 even if rate > kQRateTable[11]. */ - for (rateInd = 1; rateInd < 11; rateInd++) { - if (rate <= kQRateTable[rateInd]){ - break; - } - } - - /* find closest quantization index, and update quantized average by taking: */ - /* 0.9*recBwAvgQ + 0.1*kQRateTable[rateInd] */ - - /* 0.9 times recBwAvgQ in Q16 */ - /* 461/512 - 25/65536 =0.900009 */ - tempTerm1 = WEBRTC_SPL_MUL(bweStr->recBwAvgQ, 25); - tempTerm1 >>= 7; - tempTermX = WEBRTC_SPL_UMUL(461, bweStr->recBwAvgQ) - tempTerm1; - - /* rate in Q16 */ - tempTermY = rate << 16; - - /* 0.1 * kQRateTable[rateInd] = KQRate01[rateInd] */ - tempTerm1 = tempTermX + KQRate01[rateInd] - tempTermY; - tempTerm2 = tempTermY - tempTermX - KQRate01[rateInd-1]; - - /* Compare (0.9 * recBwAvgQ + 0.1 * kQRateTable[rateInd] - rate) > - (rate - 0.9 * recBwAvgQ - 0.1 * kQRateTable[rateInd-1]) */ - if (tempTerm1 > tempTerm2) { - rateInd--; - } - - /* Update quantized average by taking: */ - /* 0.9*recBwAvgQ + 0.1*kQRateTable[rateInd] */ - - /* Add 0.1 times kQRateTable[rateInd], in Q16 */ - tempTermX += KQRate01[rateInd]; - - /* Shift back to Q7 */ - bweStr->recBwAvgQ = tempTermX >> 9; - - /* Count consecutive received bandwidth above 28000 kbps (28000 in Q7 = 3584000) */ - /* If 66 high estimates in a row, set highSpeedRec to one */ - /* 66 corresponds to ~2 seconds in 30 msec mode */ - if ((bweStr->recBwAvgQ > 3584000) && !bweStr->highSpeedRec) { - bweStr->countHighSpeedRec++; - if (bweStr->countHighSpeedRec >= 66) { - bweStr->highSpeedRec = 1; - } - } else if (!bweStr->highSpeedRec) { - bweStr->countHighSpeedRec = 0; - } - - /* Get Max Delay Bit */ - - /* get unquantized max delay */ - maxDelay = WebRtcIsacfix_GetDownlinkMaxDelay(bweStr); - - /* Update quantized max delay average */ - tempMax = 652800; /* MAX_ISAC_MD * 0.1 in Q18 */ - tempMin = 130560; /* MIN_ISAC_MD * 0.1 in Q18 */ - tempTermX = WEBRTC_SPL_MUL((int32_t)bweStr->recMaxDelayAvgQ, (int32_t)461); - tempTermY = maxDelay << 18; - - tempTerm1 = tempTermX + tempMax - tempTermY; - tempTerm2 = tempTermY - tempTermX - tempMin; - - if ( tempTerm1 > tempTerm2) { - maxDelayBit = 0; - tempTerm1 = tempTermX + tempMin; - - /* update quantized average, shift back to Q9 */ - bweStr->recMaxDelayAvgQ = tempTerm1 >> 9; - } else { - maxDelayBit = 12; - tempTerm1 = tempTermX + tempMax; - - /* update quantized average, shift back to Q9 */ - bweStr->recMaxDelayAvgQ = tempTerm1 >> 9; - } - - /* Return bandwitdh and jitter index (0..23) */ - return (uint16_t)(rateInd + maxDelayBit); -} - -/* get the bottle neck rate from far side to here, as estimated on this side */ -uint16_t WebRtcIsacfix_GetDownlinkBandwidth(const BwEstimatorstr *bweStr) -{ - uint32_t recBw; - int32_t jitter_sign; /* Q8 */ - int32_t bw_adjust; /* Q16 */ - int32_t rec_jitter_short_term_abs_inv; /* Q18 */ - int32_t temp; - - RTC_DCHECK(!bweStr->external_bw_info.in_use); - - /* Q18 rec jitter short term abs is in Q13, multiply it by 2^13 to save precision - 2^18 then needs to be shifted 13 bits to 2^31 */ - rec_jitter_short_term_abs_inv = 0x80000000u / bweStr->recJitterShortTermAbs; - - /* Q27 = 9 + 18 */ - jitter_sign = (bweStr->recJitterShortTerm >> 4) * - rec_jitter_short_term_abs_inv; - - if (jitter_sign < 0) { - temp = -jitter_sign; - temp >>= 19; - jitter_sign = -temp; - } else { - jitter_sign >>= 19; - } - - /* adjust bw proportionally to negative average jitter sign */ - //bw_adjust = 1.0f - jitter_sign * (0.15f + 0.15f * jitter_sign * jitter_sign); - //Q8 -> Q16 .15 +.15 * jitter^2 first term is .15 in Q16 latter term is Q8*Q8*Q8 - //38 in Q8 ~.15 9830 in Q16 ~.15 - temp = 9830 + ((38 * jitter_sign * jitter_sign) >> 8); - - if (jitter_sign < 0) { - temp = WEBRTC_SPL_MUL(jitter_sign, temp); - temp = -temp; - temp >>= 8; - bw_adjust = (uint32_t)65536 + temp; /* (1 << 16) + temp; */ - } else { - /* (1 << 16) - ((jitter_sign * temp) >> 8); */ - bw_adjust = 65536 - ((jitter_sign * temp) >> 8); - } - - //make sure following multiplication won't overflow - //bw adjust now Q14 - bw_adjust >>= 2; // See if good resolution is maintained. - - /* adjust Rate if jitter sign is mostly constant */ - recBw = WEBRTC_SPL_UMUL(bweStr->recBw, bw_adjust); - - recBw >>= 14; - - /* limit range of bottle neck rate */ - if (recBw < MIN_ISAC_BW) { - recBw = MIN_ISAC_BW; - } else if (recBw > MAX_ISAC_BW) { - recBw = MAX_ISAC_BW; - } - - return (uint16_t) recBw; -} - -/* Returns the mmax delay (in ms) */ -int16_t WebRtcIsacfix_GetDownlinkMaxDelay(const BwEstimatorstr *bweStr) -{ - int16_t recMaxDelay = (int16_t)(bweStr->recMaxDelay >> 15); - - RTC_DCHECK(!bweStr->external_bw_info.in_use); - - /* limit range of jitter estimate */ - if (recMaxDelay < MIN_ISAC_MD) { - recMaxDelay = MIN_ISAC_MD; - } else if (recMaxDelay > MAX_ISAC_MD) { - recMaxDelay = MAX_ISAC_MD; - } - - return recMaxDelay; -} - -/* Clamp val to the closed interval [min,max]. */ -static int16_t clamp(int16_t val, int16_t min, int16_t max) { - RTC_DCHECK_LE(min, max); - return val < min ? min : (val > max ? max : val); -} - -int16_t WebRtcIsacfix_GetUplinkBandwidth(const BwEstimatorstr* bweStr) { - return bweStr->external_bw_info.in_use - ? bweStr->external_bw_info.send_bw_avg - : clamp(bweStr->sendBwAvg >> 7, MIN_ISAC_BW, MAX_ISAC_BW); -} - -int16_t WebRtcIsacfix_GetUplinkMaxDelay(const BwEstimatorstr* bweStr) { - return bweStr->external_bw_info.in_use - ? bweStr->external_bw_info.send_max_delay_avg - : clamp(bweStr->sendMaxDelayAvg >> 9, MIN_ISAC_MD, MAX_ISAC_MD); -} - -/* - * update long-term average bitrate and amount of data in buffer - * returns minimum payload size (bytes) - */ -uint16_t WebRtcIsacfix_GetMinBytes(RateModel *State, - int16_t StreamSize, /* bytes in bitstream */ - const int16_t FrameSamples, /* samples per frame */ - const int16_t BottleNeck, /* bottle neck rate; excl headers (bps) */ - const int16_t DelayBuildUp) /* max delay from bottle neck buffering (ms) */ -{ - int32_t MinRate = 0; - uint16_t MinBytes; - int16_t TransmissionTime; - int32_t inv_Q12; - int32_t den; - - - /* first 10 packets @ low rate, then INIT_BURST_LEN packets @ fixed rate of INIT_RATE bps */ - if (State->InitCounter > 0) { - if (State->InitCounter-- <= INIT_BURST_LEN) { - MinRate = INIT_RATE; - } else { - MinRate = 0; - } - } else { - /* handle burst */ - if (State->BurstCounter) { - if (State->StillBuffered < - (((512 - 512 / BURST_LEN) * DelayBuildUp) >> 9)) { - /* max bps derived from BottleNeck and DelayBuildUp values */ - inv_Q12 = 4096 / (BURST_LEN * FrameSamples); - MinRate = (512 + SAMPLES_PER_MSEC * ((DelayBuildUp * inv_Q12) >> 3)) * - BottleNeck; - } else { - /* max bps derived from StillBuffered and DelayBuildUp values */ - inv_Q12 = 4096 / FrameSamples; - if (DelayBuildUp > State->StillBuffered) { - MinRate = (512 + SAMPLES_PER_MSEC * (((DelayBuildUp - - State->StillBuffered) * inv_Q12) >> 3)) * BottleNeck; - } else if ((den = WEBRTC_SPL_MUL(SAMPLES_PER_MSEC, (State->StillBuffered - DelayBuildUp))) >= FrameSamples) { - /* MinRate will be negative here */ - MinRate = 0; - } else { - MinRate = (512 - ((den * inv_Q12) >> 3)) * BottleNeck; - } - //if (MinRate < 1.04 * BottleNeck) - // MinRate = 1.04 * BottleNeck; - //Q9 - if (MinRate < WEBRTC_SPL_MUL(532, BottleNeck)) { - MinRate += WEBRTC_SPL_MUL(22, BottleNeck); - } - } - - State->BurstCounter--; - } - } - - - /* convert rate from bits/second to bytes/packet */ - //round and shift before conversion - MinRate += 256; - MinRate >>= 9; - MinBytes = MinRate * FrameSamples / FS8; - - /* StreamSize will be adjusted if less than MinBytes */ - if (StreamSize < MinBytes) { - StreamSize = MinBytes; - } - - /* keep track of when bottle neck was last exceeded by at least 1% */ - //517/512 ~ 1.01 - if ((StreamSize * (int32_t)FS8) / FrameSamples > (517 * BottleNeck) >> 9) { - if (State->PrevExceed) { - /* bottle_neck exceded twice in a row, decrease ExceedAgo */ - State->ExceedAgo -= BURST_INTERVAL / (BURST_LEN - 1); - if (State->ExceedAgo < 0) { - State->ExceedAgo = 0; - } - } else { - State->ExceedAgo += FrameSamples / SAMPLES_PER_MSEC; /* ms */ - State->PrevExceed = 1; - } - } else { - State->PrevExceed = 0; - State->ExceedAgo += FrameSamples / SAMPLES_PER_MSEC; /* ms */ - } - - /* set burst flag if bottle neck not exceeded for long time */ - if ((State->ExceedAgo > BURST_INTERVAL) && (State->BurstCounter == 0)) { - if (State->PrevExceed) { - State->BurstCounter = BURST_LEN - 1; - } else { - State->BurstCounter = BURST_LEN; - } - } - - - /* Update buffer delay */ - TransmissionTime = (StreamSize * 8000) / BottleNeck; /* ms */ - State->StillBuffered += TransmissionTime; - State->StillBuffered -= FrameSamples / SAMPLES_PER_MSEC; /* ms */ - if (State->StillBuffered < 0) { - State->StillBuffered = 0; - } - - if (State->StillBuffered > 2000) { - State->StillBuffered = 2000; - } - - return MinBytes; -} - - -/* - * update long-term average bitrate and amount of data in buffer - */ -void WebRtcIsacfix_UpdateRateModel(RateModel *State, - int16_t StreamSize, /* bytes in bitstream */ - const int16_t FrameSamples, /* samples per frame */ - const int16_t BottleNeck) /* bottle neck rate; excl headers (bps) */ -{ - const int16_t TransmissionTime = (StreamSize * 8000) / BottleNeck; /* ms */ - - /* avoid the initial "high-rate" burst */ - State->InitCounter = 0; - - /* Update buffer delay */ - State->StillBuffered += TransmissionTime; - State->StillBuffered -= FrameSamples >> 4; /* ms */ - if (State->StillBuffered < 0) { - State->StillBuffered = 0; - } - -} - - -void WebRtcIsacfix_InitRateModel(RateModel *State) -{ - State->PrevExceed = 0; /* boolean */ - State->ExceedAgo = 0; /* ms */ - State->BurstCounter = 0; /* packets */ - State->InitCounter = INIT_BURST_LEN + 10; /* packets */ - State->StillBuffered = 1; /* ms */ -} - - - - - -int16_t WebRtcIsacfix_GetNewFrameLength(int16_t bottle_neck, int16_t current_framesamples) -{ - int16_t new_framesamples; - - new_framesamples = current_framesamples; - - /* find new framelength */ - switch(current_framesamples) { - case 480: - if (bottle_neck < Thld_30_60) { - new_framesamples = 960; - } - break; - case 960: - if (bottle_neck >= Thld_60_30) { - new_framesamples = 480; - } - break; - default: - new_framesamples = -1; /* Error */ - } - - return new_framesamples; -} - -int16_t WebRtcIsacfix_GetSnr(int16_t bottle_neck, int16_t framesamples) -{ - int16_t s2nr = 0; - - /* find new SNR value */ - //consider BottleNeck to be in Q10 ( * 1 in Q10) - switch(framesamples) { - // TODO(bjornv): The comments below confuses me. I don't know if there is a - // difference between frame lengths (in which case the implementation is - // wrong), or if it is frame length independent in which case we should - // correct the comment and simplify the implementation. - case 480: - /*s2nr = -1*(a_30 << 10) + ((b_30 * bottle_neck) >> 10);*/ - s2nr = -22500 + (int16_t)(500 * bottle_neck >> 10); - break; - case 960: - /*s2nr = -1*(a_60 << 10) + ((b_60 * bottle_neck) >> 10);*/ - s2nr = -22500 + (int16_t)(500 * bottle_neck >> 10); - break; - default: - s2nr = -1; /* Error */ - } - - return s2nr; //return in Q10 - -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h deleted file mode 100644 index f106746f145f..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * bandwidth_estimator.h - * - * This header file contains the API for the Bandwidth Estimator - * designed for iSAC. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_BANDWIDTH_ESTIMATOR_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_BANDWIDTH_ESTIMATOR_H_ - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -/**************************************************************************** - * WebRtcIsacfix_InitBandwidthEstimator(...) - * - * This function initializes the struct for the bandwidth estimator - * - * Input/Output: - * - bwest_str : Struct containing bandwidth information. - * - * Return value : 0 - */ - -int32_t WebRtcIsacfix_InitBandwidthEstimator(BwEstimatorstr* bwest_str); - -/**************************************************************************** - * WebRtcIsacfix_UpdateUplinkBwImpl(...) - * - * This function updates bottle neck rate received from other side in payload - * and calculates a new bottle neck to send to the other side. - * - * Input/Output: - * - bweStr : struct containing bandwidth information. - * - rtpNumber : value from RTP packet, from NetEq - * - frameSize : length of signal frame in ms, from iSAC decoder - * - sendTime : value in RTP header giving send time in samples - * - arrivalTime : value given by timeGetTime() time of arrival in - * samples of packet from NetEq - * - pksize : size of packet in bytes, from NetEq - * - Index : integer (range 0...23) indicating bottle neck & - * jitter as estimated by other side - * - * Return value : 0 if everything went fine, - * -1 otherwise - */ - -int32_t WebRtcIsacfix_UpdateUplinkBwImpl(BwEstimatorstr* bwest_str, - uint16_t rtp_number, - int16_t frameSize, - uint32_t send_ts, - uint32_t arr_ts, - size_t pksize, - uint16_t Index); - -/* Update receiving estimates. Used when we only receive BWE index, no iSAC data - * packet. */ -int16_t WebRtcIsacfix_UpdateUplinkBwRec(BwEstimatorstr* bwest_str, - int16_t Index); - -/**************************************************************************** - * WebRtcIsacfix_GetDownlinkBwIndexImpl(...) - * - * This function calculates and returns the bandwidth/jitter estimation code - * (integer 0...23) to put in the sending iSAC payload. - * - * Input: - * - bweStr : BWE struct - * - * Return: - * bandwith and jitter index (0..23) - */ -uint16_t WebRtcIsacfix_GetDownlinkBwIndexImpl(BwEstimatorstr* bwest_str); - -/* Returns the bandwidth estimation (in bps) */ -uint16_t WebRtcIsacfix_GetDownlinkBandwidth(const BwEstimatorstr* bwest_str); - -/* Returns the bandwidth that iSAC should send with in bps */ -int16_t WebRtcIsacfix_GetUplinkBandwidth(const BwEstimatorstr* bwest_str); - -/* Returns the max delay (in ms) */ -int16_t WebRtcIsacfix_GetDownlinkMaxDelay(const BwEstimatorstr* bwest_str); - -/* Returns the max delay value from the other side in ms */ -int16_t WebRtcIsacfix_GetUplinkMaxDelay(const BwEstimatorstr* bwest_str); - -/* - * update amount of data in bottle neck buffer and burst handling - * returns minimum payload size (bytes) - */ -uint16_t WebRtcIsacfix_GetMinBytes( - RateModel* State, - int16_t StreamSize, /* bytes in bitstream */ - int16_t FrameLen, /* ms per frame */ - int16_t BottleNeck, /* bottle neck rate; excl headers (bps) */ - int16_t DelayBuildUp); /* max delay from bottle neck buffering (ms) */ - -/* - * update long-term average bitrate and amount of data in buffer - */ -void WebRtcIsacfix_UpdateRateModel( - RateModel* State, - int16_t StreamSize, /* bytes in bitstream */ - int16_t FrameSamples, /* samples per frame */ - int16_t BottleNeck); /* bottle neck rate; excl headers (bps) */ - -void WebRtcIsacfix_InitRateModel(RateModel* State); - -/* Returns the new framelength value (input argument: bottle_neck) */ -int16_t WebRtcIsacfix_GetNewFrameLength(int16_t bottle_neck, - int16_t current_framelength); - -/* Returns the new SNR value (input argument: bottle_neck) */ -// returns snr in Q10 -int16_t WebRtcIsacfix_GetSnr(int16_t bottle_neck, int16_t framesamples); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_BANDWIDTH_ESTIMATOR_H_ \ - */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/codec.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/codec.h deleted file mode 100644 index 01d6fb907e80..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/codec.h +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * codec.h - * - * This header file contains the calls to the internal encoder - * and decoder functions. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_CODEC_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_CODEC_H_ - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -#ifdef __cplusplus -extern "C" { -#endif - -int WebRtcIsacfix_EstimateBandwidth(BwEstimatorstr* bwest_str, - Bitstr_dec* streamdata, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts); - -int WebRtcIsacfix_DecodeImpl(int16_t* signal_out16, - IsacFixDecoderInstance* ISACdec_obj, - size_t* current_framesamples); - -void WebRtcIsacfix_DecodePlcImpl(int16_t* decoded, - IsacFixDecoderInstance* ISACdec_obj, - size_t* current_framesample); - -int WebRtcIsacfix_EncodeImpl(int16_t* in, - IsacFixEncoderInstance* ISACenc_obj, - BwEstimatorstr* bw_estimatordata, - int16_t CodingMode); - -int WebRtcIsacfix_EncodeStoredData(IsacFixEncoderInstance* ISACenc_obj, - int BWnumber, - float scale); - -/* initialization functions */ - -void WebRtcIsacfix_InitMaskingEnc(MaskFiltstr_enc* maskdata); -void WebRtcIsacfix_InitMaskingDec(MaskFiltstr_dec* maskdata); - -void WebRtcIsacfix_InitPreFilterbank(PreFiltBankstr* prefiltdata); - -void WebRtcIsacfix_InitPostFilterbank(PostFiltBankstr* postfiltdata); - -void WebRtcIsacfix_InitPitchFilter(PitchFiltstr* pitchfiltdata); - -void WebRtcIsacfix_InitPitchAnalysis(PitchAnalysisStruct* State); - -void WebRtcIsacfix_InitPlc(PLCstr* State); - -/* transform functions */ - -void WebRtcIsacfix_InitTransform(void); - -typedef void (*Time2Spec)(int16_t* inre1Q9, - int16_t* inre2Q9, - int16_t* outre, - int16_t* outim); -typedef void (*Spec2Time)(int16_t* inreQ7, - int16_t* inimQ7, - int32_t* outre1Q16, - int32_t* outre2Q16); - -extern Time2Spec WebRtcIsacfix_Time2Spec; -extern Spec2Time WebRtcIsacfix_Spec2Time; - -void WebRtcIsacfix_Time2SpecC(int16_t* inre1Q9, - int16_t* inre2Q9, - int16_t* outre, - int16_t* outim); -void WebRtcIsacfix_Spec2TimeC(int16_t* inreQ7, - int16_t* inimQ7, - int32_t* outre1Q16, - int32_t* outre2Q16); - -#if defined(WEBRTC_HAS_NEON) -void WebRtcIsacfix_Time2SpecNeon(int16_t* inre1Q9, - int16_t* inre2Q9, - int16_t* outre, - int16_t* outim); -void WebRtcIsacfix_Spec2TimeNeon(int16_t* inreQ7, - int16_t* inimQ7, - int32_t* outre1Q16, - int32_t* outre2Q16); -#endif - -#if defined(MIPS32_LE) -void WebRtcIsacfix_Time2SpecMIPS(int16_t* inre1Q9, - int16_t* inre2Q9, - int16_t* outre, - int16_t* outim); -void WebRtcIsacfix_Spec2TimeMIPS(int16_t* inreQ7, - int16_t* inimQ7, - int32_t* outre1Q16, - int32_t* outre2Q16); -#endif - -/* filterbank functions */ - -void WebRtcIsacfix_SplitAndFilter1(int16_t* in, - int16_t* LP16, - int16_t* HP16, - PreFiltBankstr* prefiltdata); - -void WebRtcIsacfix_FilterAndCombine1(int16_t* tempin_ch1, - int16_t* tempin_ch2, - int16_t* out16, - PostFiltBankstr* postfiltdata); - -/* normalized lattice filters */ - -void WebRtcIsacfix_NormLatticeFilterMa(size_t orderCoef, - int32_t* stateGQ15, - int16_t* lat_inQ0, - int16_t* filt_coefQ15, - int32_t* gain_lo_hiQ17, - int16_t lo_hi, - int16_t* lat_outQ9); - -void WebRtcIsacfix_NormLatticeFilterAr(size_t orderCoef, - int16_t* stateGQ0, - int32_t* lat_inQ25, - int16_t* filt_coefQ15, - int32_t* gain_lo_hiQ17, - int16_t lo_hi, - int16_t* lat_outQ0); - -/* TODO(kma): Remove the following functions into individual header files. */ - -/* Internal functions in both C and ARM Neon versions */ - -int WebRtcIsacfix_AutocorrC(int32_t* __restrict r, - const int16_t* __restrict x, - int16_t N, - int16_t order, - int16_t* __restrict scale); - -void WebRtcIsacfix_FilterMaLoopC(int16_t input0, - int16_t input1, - int32_t input2, - int32_t* ptr0, - int32_t* ptr1, - int32_t* ptr2); - -#if defined(WEBRTC_HAS_NEON) -int WebRtcIsacfix_AutocorrNeon(int32_t* __restrict r, - const int16_t* __restrict x, - int16_t N, - int16_t order, - int16_t* __restrict scale); - -void WebRtcIsacfix_FilterMaLoopNeon(int16_t input0, - int16_t input1, - int32_t input2, - int32_t* ptr0, - int32_t* ptr1, - int32_t* ptr2); -#endif - -#if defined(MIPS32_LE) -int WebRtcIsacfix_AutocorrMIPS(int32_t* __restrict r, - const int16_t* __restrict x, - int16_t N, - int16_t order, - int16_t* __restrict scale); - -void WebRtcIsacfix_FilterMaLoopMIPS(int16_t input0, - int16_t input1, - int32_t input2, - int32_t* ptr0, - int32_t* ptr1, - int32_t* ptr2); -#endif - -/* Function pointers associated with the above functions. */ - -typedef int (*AutocorrFix)(int32_t* __restrict r, - const int16_t* __restrict x, - int16_t N, - int16_t order, - int16_t* __restrict scale); -extern AutocorrFix WebRtcIsacfix_AutocorrFix; - -typedef void (*FilterMaLoopFix)(int16_t input0, - int16_t input1, - int32_t input2, - int32_t* ptr0, - int32_t* ptr1, - int32_t* ptr2); -extern FilterMaLoopFix WebRtcIsacfix_FilterMaLoopFix; - -#ifdef __cplusplus -} // extern "C" -#endif - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_CODEC_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/decode.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/decode.c deleted file mode 100644 index 144208818a58..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/decode.c +++ /dev/null @@ -1,221 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * decode.c - * - * This C file contains the internal decoding function. - * - */ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - - - - -int WebRtcIsacfix_DecodeImpl(int16_t* signal_out16, - IsacFixDecoderInstance* ISACdec_obj, - size_t* current_framesamples) -{ - int k; - int err; - int16_t BWno; - int len = 0; - - int16_t model; - - - int16_t Vector_Word16_1[FRAMESAMPLES/2]; - int16_t Vector_Word16_2[FRAMESAMPLES/2]; - - int32_t Vector_Word32_1[FRAMESAMPLES/2]; - int32_t Vector_Word32_2[FRAMESAMPLES/2]; - - int16_t lofilt_coefQ15[ORDERLO*SUBFRAMES]; //refl. coeffs - int16_t hifilt_coefQ15[ORDERHI*SUBFRAMES]; //refl. coeffs - int32_t gain_lo_hiQ17[2*SUBFRAMES]; - - int16_t PitchLags_Q7[PITCH_SUBFRAMES]; - int16_t PitchGains_Q12[PITCH_SUBFRAMES]; - int16_t AvgPitchGain_Q12; - - int16_t tmp_1, tmp_2; - int32_t tmp32a; - int16_t gainQ13; - - - size_t frame_nb; /* counter */ - size_t frame_mode; /* 0 for 30ms, 1 for 60ms */ - static const size_t kProcessedSamples = 480; /* 480 (for both 30, 60 ms) */ - - /* PLC */ - int16_t overlapWin[ 240 ]; - - (ISACdec_obj->bitstr_obj).W_upper = 0xFFFFFFFF; - (ISACdec_obj->bitstr_obj).streamval = 0; - (ISACdec_obj->bitstr_obj).stream_index = 0; - (ISACdec_obj->bitstr_obj).full = 1; - - - /* decode framelength and BW estimation - not used, only for stream pointer*/ - err = WebRtcIsacfix_DecodeFrameLen(&ISACdec_obj->bitstr_obj, current_framesamples); - if (err<0) // error check - return err; - - frame_mode = *current_framesamples / MAX_FRAMESAMPLES; /* 0, or 1 */ - - err = WebRtcIsacfix_DecodeSendBandwidth(&ISACdec_obj->bitstr_obj, &BWno); - if (err<0) // error check - return err; - - /* one loop if it's one frame (30ms), two loops if two frames bundled together - * (60ms) */ - for (frame_nb = 0; frame_nb <= frame_mode; frame_nb++) { - - /* decode & dequantize pitch parameters */ - err = WebRtcIsacfix_DecodePitchGain(&(ISACdec_obj->bitstr_obj), PitchGains_Q12); - if (err<0) // error check - return err; - - err = WebRtcIsacfix_DecodePitchLag(&ISACdec_obj->bitstr_obj, PitchGains_Q12, PitchLags_Q7); - if (err<0) // error check - return err; - - AvgPitchGain_Q12 = (int16_t)(((int32_t)PitchGains_Q12[0] + PitchGains_Q12[1] + PitchGains_Q12[2] + PitchGains_Q12[3])>>2); - - /* decode & dequantize FiltCoef */ - err = WebRtcIsacfix_DecodeLpc(gain_lo_hiQ17, lofilt_coefQ15, hifilt_coefQ15, - &ISACdec_obj->bitstr_obj, &model); - - if (err<0) // error check - return err; - - /* decode & dequantize spectrum */ - len = WebRtcIsacfix_DecodeSpec(&ISACdec_obj->bitstr_obj, Vector_Word16_1, Vector_Word16_2, AvgPitchGain_Q12); - if (len < 0) // error check - return len; - - // Why does this need Q16 in and out? /JS - WebRtcIsacfix_Spec2Time(Vector_Word16_1, Vector_Word16_2, Vector_Word32_1, Vector_Word32_2); - - for (k=0; k Q9. - Vector_Word16_1[k] = (int16_t)((Vector_Word32_1[k] + 64) >> 7); - } - - /* ---- If this is recovery frame ---- */ - if( (ISACdec_obj->plcstr_obj).used == PLC_WAS_USED ) - { - (ISACdec_obj->plcstr_obj).used = PLC_NOT_USED; - if( (ISACdec_obj->plcstr_obj).B < 1000 ) - { - (ISACdec_obj->plcstr_obj).decayCoeffPriodic = 4000; - } - - ISACdec_obj->plcstr_obj.decayCoeffPriodic = WEBRTC_SPL_WORD16_MAX; /* DECAY_RATE is in Q15 */ - ISACdec_obj->plcstr_obj.decayCoeffNoise = WEBRTC_SPL_WORD16_MAX; /* DECAY_RATE is in Q15 */ - ISACdec_obj->plcstr_obj.pitchCycles = 0; - - PitchGains_Q12[0] = (int16_t)(PitchGains_Q12[0] * 700 >> 10); - - /* ---- Add-overlap ---- */ - WebRtcSpl_GetHanningWindow( overlapWin, RECOVERY_OVERLAP ); - for( k = 0; k < RECOVERY_OVERLAP; k++ ) - Vector_Word16_1[k] = WebRtcSpl_AddSatW16( - (int16_t)(ISACdec_obj->plcstr_obj.overlapLP[k] * - overlapWin[RECOVERY_OVERLAP - k - 1] >> 14), - (int16_t)(Vector_Word16_1[k] * overlapWin[k] >> 14)); - - - - } - - /* --- Store side info --- */ - if( frame_nb == frame_mode ) - { - /* --- LPC info */ - WEBRTC_SPL_MEMCPY_W16( (ISACdec_obj->plcstr_obj).lofilt_coefQ15, &lofilt_coefQ15[(SUBFRAMES-1)*ORDERLO], ORDERLO ); - WEBRTC_SPL_MEMCPY_W16( (ISACdec_obj->plcstr_obj).hifilt_coefQ15, &hifilt_coefQ15[(SUBFRAMES-1)*ORDERHI], ORDERHI ); - (ISACdec_obj->plcstr_obj).gain_lo_hiQ17[0] = gain_lo_hiQ17[(SUBFRAMES-1) * 2]; - (ISACdec_obj->plcstr_obj).gain_lo_hiQ17[1] = gain_lo_hiQ17[(SUBFRAMES-1) * 2 + 1]; - - /* --- LTP info */ - (ISACdec_obj->plcstr_obj).AvgPitchGain_Q12 = PitchGains_Q12[3]; - (ISACdec_obj->plcstr_obj).lastPitchGain_Q12 = PitchGains_Q12[3]; - (ISACdec_obj->plcstr_obj).lastPitchLag_Q7 = PitchLags_Q7[3]; - - if( PitchLags_Q7[3] < 3000 ) - (ISACdec_obj->plcstr_obj).lastPitchLag_Q7 += PitchLags_Q7[3]; - - WEBRTC_SPL_MEMCPY_W16( (ISACdec_obj->plcstr_obj).prevPitchInvIn, Vector_Word16_1, FRAMESAMPLES/2 ); - - } - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ - - /* inverse pitch filter */ - WebRtcIsacfix_PitchFilter(Vector_Word16_1, Vector_Word16_2, &ISACdec_obj->pitchfiltstr_obj, PitchLags_Q7, PitchGains_Q12, 4); - - if( frame_nb == frame_mode ) - { - WEBRTC_SPL_MEMCPY_W16( (ISACdec_obj->plcstr_obj).prevPitchInvOut, &(Vector_Word16_2[FRAMESAMPLES/2 - (PITCH_MAX_LAG + 10)]), PITCH_MAX_LAG ); - } - - - /* reduce gain to compensate for pitch enhancer */ - /* gain = 1.0f - 0.45f * AvgPitchGain; */ - tmp32a = AvgPitchGain_Q12 * 29; // Q18 - gainQ13 = (int16_t)((262144 - tmp32a) >> 5); // Q18 -> Q13. - - for (k = 0; k < FRAMESAMPLES/2; k++) - { - Vector_Word32_1[k] = (Vector_Word16_2[k] * gainQ13) * (1 << 3); // Q25 - } - - - /* perceptual post-filtering (using normalized lattice filter) */ - WebRtcIsacfix_NormLatticeFilterAr(ORDERLO, (ISACdec_obj->maskfiltstr_obj).PostStateLoGQ0, - Vector_Word32_1, lofilt_coefQ15, gain_lo_hiQ17, 0, Vector_Word16_1); - - /* --- Store Highpass Residual --- */ - for (k = 0; k < FRAMESAMPLES/2; k++) - Vector_Word32_1[k] = Vector_Word32_2[k] * (1 << 9); // Q16 -> Q25 - - for( k = 0; k < PITCH_MAX_LAG + 10; k++ ) - (ISACdec_obj->plcstr_obj).prevHP[k] = Vector_Word32_1[FRAMESAMPLES/2 - (PITCH_MAX_LAG + 10) + k]; - - - WebRtcIsacfix_NormLatticeFilterAr(ORDERHI, (ISACdec_obj->maskfiltstr_obj).PostStateHiGQ0, - Vector_Word32_1, hifilt_coefQ15, gain_lo_hiQ17, 1, Vector_Word16_2); - - /* recombine the 2 bands */ - - /* Form the polyphase signals, and compensate for DC offset */ - for (k=0;kpostfiltbankstr_obj); - - } - return len; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/decode_bwe.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/decode_bwe.c deleted file mode 100644 index 99676504cdd7..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/decode_bwe.c +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * decode_bwe.c - * - * This C file contains the internal decode bandwidth estimate function. - * - */ - - -#include "modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - - - - -int WebRtcIsacfix_EstimateBandwidth(BwEstimatorstr *bwest_str, - Bitstr_dec *streamdata, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts) -{ - int16_t index; - size_t frame_samples; - int err; - - /* decode framelength */ - err = WebRtcIsacfix_DecodeFrameLen(streamdata, &frame_samples); - /* error check */ - if (err<0) { - return err; - } - - /* decode BW estimation */ - err = WebRtcIsacfix_DecodeSendBandwidth(streamdata, &index); - /* error check */ - if (err<0) { - return err; - } - - /* Update BWE with received data */ - err = WebRtcIsacfix_UpdateUplinkBwImpl( - bwest_str, - rtp_seq_number, - (int16_t)(frame_samples * 1000 / FS), - send_ts, - arr_ts, - packet_size, /* in bytes */ - index); - - /* error check */ - if (err<0) { - return err; - } - - /* Succesful */ - return 0; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c deleted file mode 100644 index 873cf951ba3d..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c +++ /dev/null @@ -1,805 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * decode_plc.c - * - * Packet Loss Concealment. - * - */ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" - - -#define NO_OF_PRIMES 8 -#define NOISE_FILTER_LEN 30 - -/* - * function to decode the bitstream - * returns the total number of bytes in the stream - */ - -static int16_t plc_filterma_Fast( - int16_t *In, /* (i) Vector to be filtered. InOut[-orderCoef+1] - to InOut[-1] contains state */ - int16_t *Out, /* (o) Filtered vector */ - int16_t *B, /* (i) The filter coefficients (in Q0) */ - int16_t Blen, /* (i) Number of B coefficients */ - int16_t len, /* (i) Number of samples to be filtered */ - int16_t reduceDecay, - int16_t decay, - int16_t rshift ) -{ - int i, j; - int32_t o; - int32_t lim = (1 << (15 + rshift)) - 1; - - for (i = 0; i < len; i++) - { - const int16_t *b_ptr = &B[0]; - const int16_t *x_ptr = &In[i]; - - o = (int32_t)0; - - for (j = 0;j < Blen; j++) - { - o = WebRtcSpl_AddSatW32(o, *b_ptr * *x_ptr); - b_ptr++; - x_ptr--; - } - - /* to round off correctly */ - o = WebRtcSpl_AddSatW32(o, 1 << (rshift - 1)); - - /* saturate according to the domain of the filter coefficients */ - o = WEBRTC_SPL_SAT((int32_t)lim, o, (int32_t)-lim); - - /* o should be in the range of int16_t */ - o >>= rshift; - - /* decay the output signal; this is specific to plc */ - *Out++ = (int16_t)((int16_t)o * decay >> 15); - - /* change the decay */ - decay -= reduceDecay; - if( decay < 0 ) - decay = 0; - } - return( decay ); -} - - - - - - - - -static __inline int32_t log2_Q8_T( uint32_t x ) { - - int32_t zeros; - int16_t frac; - - zeros=WebRtcSpl_NormU32(x); - frac = (int16_t)(((x << zeros) & 0x7FFFFFFF) >> 23); - - /* log2(magn(i)) */ - return ((31 - zeros) << 8) + frac; -} - -static __inline int16_t exp2_Q10_T(int16_t x) { // Both in and out in Q10 - - int16_t tmp16_1, tmp16_2; - - tmp16_2=(int16_t)(0x0400|(x&0x03FF)); - tmp16_1 = -(x >> 10); - if(tmp16_1>0) - return tmp16_2 >> tmp16_1; - else - return tmp16_2 << -tmp16_1; - -} - - -/* - This is a fixed-point version of the above code with limLow = 700 and limHigh = 5000, - hard-coded. The values 700 and 5000 were experimentally obtained. - - The function implements membership values for two sets. The mebership functions are - of second orders corresponding to half-bell-shapped pulses. -*/ -static void MemshipValQ15( int16_t in, int16_t *A, int16_t *B ) -{ - int16_t x; - - in -= 700; /* translate the lowLim to 0, limHigh = 5000 - 700, M = 2150 */ - - if( in <= 2150 ) - { - if( in > 0 ) - { - /* b = in^2 / (2 * M^2), a = 1 - b in Q0. - We have to compute in Q15 */ - - /* x = in / 2150 {in Q15} = x * 15.2409 {in Q15} = - x*15 + (x*983)/(2^12); note that 983/2^12 = 0.23999 */ - - /* we are sure that x is in the range of int16_t */ - x = (int16_t)(in * 15 + (in * 983 >> 12)); - /* b = x^2 / 2 {in Q15} so a shift of 16 is required to - be in correct domain and one more for the division by 2 */ - *B = (int16_t)((x * x + 0x00010000) >> 17); - *A = WEBRTC_SPL_WORD16_MAX - *B; - } - else - { - *B = 0; - *A = WEBRTC_SPL_WORD16_MAX; - } - } - else - { - if( in < 4300 ) - { - /* This is a mirror case of the above */ - in = 4300 - in; - x = (int16_t)(in * 15 + (in * 983 >> 12)); - /* b = x^2 / 2 {in Q15} so a shift of 16 is required to - be in correct domain and one more for the division by 2 */ - *A = (int16_t)((x * x + 0x00010000) >> 17); - *B = WEBRTC_SPL_WORD16_MAX - *A; - - } - else - { - *A = 0; - *B = WEBRTC_SPL_WORD16_MAX; - } - } -} - - - - -static void LinearResampler(int16_t* in, - int16_t* out, - size_t lenIn, - size_t lenOut) -{ - size_t n = (lenIn - 1) * RESAMP_RES; - int16_t resOut, relativePos, diff; /* */ - size_t i, j; - uint16_t udiff; - - if( lenIn == lenOut ) - { - WEBRTC_SPL_MEMCPY_W16( out, in, lenIn ); - return; - } - - resOut = WebRtcSpl_DivW32W16ResW16( (int32_t)n, (int16_t)(lenOut-1) ); - - out[0] = in[0]; - for( i = 1, j = 0, relativePos = 0; i < lenOut; i++ ) - { - - relativePos += resOut; - while( relativePos > RESAMP_RES ) - { - j++; - relativePos -= RESAMP_RES; - } - - - /* an overflow may happen and the differce in sample values may - * require more than 16 bits. We like to avoid 32 bit arithmatic - * as much as possible */ - - if( (in[ j ] > 0) && (in[j + 1] < 0) ) - { - udiff = (uint16_t)(in[ j ] - in[j + 1]); - out[ i ] = in[ j ] - (uint16_t)( ((int32_t)( udiff * relativePos )) >> RESAMP_RES_BIT); - } - else - { - if( (in[j] < 0) && (in[j+1] > 0) ) - { - udiff = (uint16_t)( in[j + 1] - in[ j ] ); - out[ i ] = in[ j ] + (uint16_t)( ((int32_t)( udiff * relativePos )) >> RESAMP_RES_BIT); - } - else - { - diff = in[ j + 1 ] - in[ j ]; - out[i] = in[j] + (int16_t)(diff * relativePos >> RESAMP_RES_BIT); - } - } - } -} - - - - - -void WebRtcIsacfix_DecodePlcImpl(int16_t *signal_out16, - IsacFixDecoderInstance *ISACdec_obj, - size_t *current_framesamples ) -{ - int subframecnt; - - int16_t* Vector_Word16_1; - int16_t Vector_Word16_Extended_1[FRAMESAMPLES_HALF + NOISE_FILTER_LEN]; - int16_t* Vector_Word16_2; - int16_t Vector_Word16_Extended_2[FRAMESAMPLES_HALF + NOISE_FILTER_LEN]; - - int32_t Vector_Word32_1[FRAMESAMPLES_HALF]; - int32_t Vector_Word32_2[FRAMESAMPLES_HALF]; - - int16_t lofilt_coefQ15[ORDERLO*SUBFRAMES]; //refl. coeffs - int16_t hifilt_coefQ15[ORDERHI*SUBFRAMES]; //refl. coeffs - - int16_t pitchLags_Q7[PITCH_SUBFRAMES]; - int16_t pitchGains_Q12[PITCH_SUBFRAMES]; - - int16_t tmp_1, tmp_2; - int32_t tmp32a, tmp32b; - int16_t gainQ13; - - int16_t myDecayRate; - - /* ---------- PLC variables ------------ */ - size_t lag0, i, k; - int16_t noiseIndex; - int16_t stretchPitchLP[PITCH_MAX_LAG + 10], stretchPitchLP1[PITCH_MAX_LAG + 10]; - - int32_t gain_lo_hiQ17[2*SUBFRAMES]; - - int16_t nLP, pLP, wNoisyLP, wPriodicLP, tmp16; - size_t minIdx; - int32_t nHP, pHP, wNoisyHP, wPriodicHP, corr, minCorr, maxCoeff; - int16_t noise1, rshift; - - - int16_t ltpGain, pitchGain, myVoiceIndicator, myAbs, maxAbs; - int32_t varIn, varOut, logVarIn, logVarOut, Q, logMaxAbs; - int rightShiftIn, rightShiftOut; - - - /* ------------------------------------- */ - - - myDecayRate = (DECAY_RATE); - Vector_Word16_1 = &Vector_Word16_Extended_1[NOISE_FILTER_LEN]; - Vector_Word16_2 = &Vector_Word16_Extended_2[NOISE_FILTER_LEN]; - - - /* ----- Simply Copy Previous LPC parameters ------ */ - for( subframecnt = 0; subframecnt < SUBFRAMES; subframecnt++ ) - { - /* lower Band */ - WEBRTC_SPL_MEMCPY_W16(&lofilt_coefQ15[ subframecnt * ORDERLO ], - (ISACdec_obj->plcstr_obj).lofilt_coefQ15, ORDERLO); - gain_lo_hiQ17[2*subframecnt] = (ISACdec_obj->plcstr_obj).gain_lo_hiQ17[0]; - - /* Upper Band */ - WEBRTC_SPL_MEMCPY_W16(&hifilt_coefQ15[ subframecnt * ORDERHI ], - (ISACdec_obj->plcstr_obj).hifilt_coefQ15, ORDERHI); - gain_lo_hiQ17[2*subframecnt + 1] = (ISACdec_obj->plcstr_obj).gain_lo_hiQ17[1]; - } - - - - - lag0 = (size_t)(((ISACdec_obj->plcstr_obj.lastPitchLag_Q7 + 64) >> 7) + 1); - - - if( (ISACdec_obj->plcstr_obj).used != PLC_WAS_USED ) - { - (ISACdec_obj->plcstr_obj).pitchCycles = 0; - - (ISACdec_obj->plcstr_obj).lastPitchLP = - &((ISACdec_obj->plcstr_obj).prevPitchInvIn[FRAMESAMPLES_HALF - lag0]); - minCorr = WEBRTC_SPL_WORD32_MAX; - - if ((FRAMESAMPLES_HALF - 10) > 2 * lag0) - { - minIdx = 11; - for( i = 0; i < 21; i++ ) - { - corr = 0; - for( k = 0; k < lag0; k++ ) - { - corr = WebRtcSpl_AddSatW32(corr, WEBRTC_SPL_ABS_W32( - WebRtcSpl_SubSatW16( - (ISACdec_obj->plcstr_obj).lastPitchLP[k], - (ISACdec_obj->plcstr_obj).prevPitchInvIn[ - FRAMESAMPLES_HALF - 2*lag0 - 10 + i + k ] ) ) ); - } - if( corr < minCorr ) - { - minCorr = corr; - minIdx = i; - } - } - (ISACdec_obj->plcstr_obj).prevPitchLP = - &( (ISACdec_obj->plcstr_obj).prevPitchInvIn[ - FRAMESAMPLES_HALF - lag0*2 - 10 + minIdx] ); - } - else - { - (ISACdec_obj->plcstr_obj).prevPitchLP = - (ISACdec_obj->plcstr_obj).lastPitchLP; - } - pitchGain = (ISACdec_obj->plcstr_obj).lastPitchGain_Q12; - - WebRtcSpl_AutoCorrelation( - &(ISACdec_obj->plcstr_obj).prevPitchInvIn[FRAMESAMPLES_HALF - lag0], - lag0, 0, &varIn, &rightShiftIn); - WebRtcSpl_AutoCorrelation( - &(ISACdec_obj->plcstr_obj).prevPitchInvOut[PITCH_MAX_LAG + 10 - lag0], - lag0, 0, &varOut, &rightShiftOut); - - maxAbs = 0; - for( i = 0; i< lag0; i++) - { - myAbs = WEBRTC_SPL_ABS_W16( - (ISACdec_obj->plcstr_obj).prevPitchInvOut[ - PITCH_MAX_LAG + 10 - lag0 + i] ); - maxAbs = (myAbs > maxAbs)? myAbs:maxAbs; - } - logVarIn = log2_Q8_T( (uint32_t)( varIn ) ) + - (int32_t)(rightShiftIn << 8); - logVarOut = log2_Q8_T( (uint32_t)( varOut ) ) + - (int32_t)(rightShiftOut << 8); - logMaxAbs = log2_Q8_T( (uint32_t)( maxAbs ) ); - - ltpGain = (int16_t)(logVarOut - logVarIn); - Q = 2 * logMaxAbs - ( logVarOut - 1512 ); - - /* - * --- - * We are computing sqrt( (VarIn/lag0) / var( noise ) ) - * var( noise ) is almost 256. we have already computed log2( VarIn ) in Q8 - * so we actually compute 2^( 0.5*(log2( VarIn ) - log2( lag0 ) - log2( var(noise ) ) ). - * Note that put log function is in Q8 but the exponential function is in Q10. - * -- - */ - - logVarIn -= log2_Q8_T( (uint32_t)( lag0 ) ); - tmp16 = (int16_t)((logVarIn<<1) - (4<<10) ); - rightShiftIn = 0; - if( tmp16 > 4096 ) - { - tmp16 -= 4096; - tmp16 = exp2_Q10_T( tmp16 ); - tmp16 >>= 6; - } - else - tmp16 = exp2_Q10_T( tmp16 )>>10; - - (ISACdec_obj->plcstr_obj).std = tmp16 - 4; - - if( (ltpGain < 110) || (ltpGain > 230) ) - { - if( ltpGain < 100 && (pitchGain < 1800) ) - { - (ISACdec_obj->plcstr_obj).A = WEBRTC_SPL_WORD16_MAX; - } - else - { - (ISACdec_obj->plcstr_obj).A = ((ltpGain < 110) && (Q < 800) - )? WEBRTC_SPL_WORD16_MAX:0; - } - (ISACdec_obj->plcstr_obj).B = WEBRTC_SPL_WORD16_MAX - - (ISACdec_obj->plcstr_obj).A; - } - else - { - if( (pitchGain < 450) || (pitchGain > 1600) ) - { - (ISACdec_obj->plcstr_obj).A = ((pitchGain < 450) - )? WEBRTC_SPL_WORD16_MAX:0; - (ISACdec_obj->plcstr_obj).B = WEBRTC_SPL_WORD16_MAX - - (ISACdec_obj->plcstr_obj).A; - } - else - { - myVoiceIndicator = ltpGain * 2 + pitchGain; - MemshipValQ15( myVoiceIndicator, - &(ISACdec_obj->plcstr_obj).A, &(ISACdec_obj->plcstr_obj).B ); - } - } - - - - myVoiceIndicator = ltpGain * 16 + pitchGain * 2 + (pitchGain >> 8); - MemshipValQ15( myVoiceIndicator, - &(ISACdec_obj->plcstr_obj).A, &(ISACdec_obj->plcstr_obj).B ); - - - - (ISACdec_obj->plcstr_obj).stretchLag = lag0; - (ISACdec_obj->plcstr_obj).pitchIndex = 0; - - } - else - { - myDecayRate = (DECAY_RATE<<2); - } - - if( (ISACdec_obj->plcstr_obj).B < 1000 ) - { - myDecayRate += (DECAY_RATE<<3); - } - - /* ------------ reconstructing the residual signal ------------------ */ - - LinearResampler( (ISACdec_obj->plcstr_obj).lastPitchLP, - stretchPitchLP, lag0, (ISACdec_obj->plcstr_obj).stretchLag ); - /* inverse pitch filter */ - - pitchLags_Q7[0] = pitchLags_Q7[1] = pitchLags_Q7[2] = pitchLags_Q7[3] = - (int16_t)((ISACdec_obj->plcstr_obj).stretchLag<<7); - pitchGains_Q12[3] = ( (ISACdec_obj->plcstr_obj).lastPitchGain_Q12); - pitchGains_Q12[2] = (int16_t)(pitchGains_Q12[3] * 1010 >> 10); - pitchGains_Q12[1] = (int16_t)(pitchGains_Q12[2] * 1010 >> 10); - pitchGains_Q12[0] = (int16_t)(pitchGains_Q12[1] * 1010 >> 10); - - - /* most of the time either B or A are zero so seperating */ - if( (ISACdec_obj->plcstr_obj).B == 0 ) - { - for( i = 0; i < FRAMESAMPLES_HALF; i++ ) - { - /* --- Low Pass */ - (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND( - (ISACdec_obj->plcstr_obj).seed ); - Vector_Word16_1[i] = (ISACdec_obj->plcstr_obj.seed >> 10) - 16; - - /* --- Highpass */ - (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND( - (ISACdec_obj->plcstr_obj).seed ); - Vector_Word16_2[i] = (ISACdec_obj->plcstr_obj.seed >> 10) - 16; - - } - for( i = 1; i < NOISE_FILTER_LEN; i++ ) - { - (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND( - (ISACdec_obj->plcstr_obj).seed ); - Vector_Word16_Extended_1[i] = (ISACdec_obj->plcstr_obj.seed >> 10) - 16; - - (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND( - (ISACdec_obj->plcstr_obj).seed ); - Vector_Word16_Extended_2[i] = (ISACdec_obj->plcstr_obj.seed >> 10) - 16; - } - plc_filterma_Fast(Vector_Word16_1, Vector_Word16_Extended_1, - &(ISACdec_obj->plcstr_obj).prevPitchInvIn[FRAMESAMPLES_HALF - - NOISE_FILTER_LEN], (int16_t) NOISE_FILTER_LEN, - (int16_t) FRAMESAMPLES_HALF, (int16_t)(5), - (ISACdec_obj->plcstr_obj).decayCoeffNoise, (int16_t)(6)); - - maxCoeff = WebRtcSpl_MaxAbsValueW32( - &(ISACdec_obj->plcstr_obj).prevHP[ - PITCH_MAX_LAG + 10 - NOISE_FILTER_LEN], NOISE_FILTER_LEN ); - - rshift = 0; - while( maxCoeff > WEBRTC_SPL_WORD16_MAX ) - { - maxCoeff >>= 1; - rshift++; - } - for( i = 0; i < NOISE_FILTER_LEN; i++ ) { - Vector_Word16_1[FRAMESAMPLES_HALF - NOISE_FILTER_LEN + i] =(int16_t)( - ISACdec_obj->plcstr_obj.prevHP[PITCH_MAX_LAG + 10 - NOISE_FILTER_LEN + - i] >> rshift); - } - (ISACdec_obj->plcstr_obj).decayCoeffNoise = plc_filterma_Fast( - Vector_Word16_2, - Vector_Word16_Extended_2, - &Vector_Word16_1[FRAMESAMPLES_HALF - NOISE_FILTER_LEN], - (int16_t) NOISE_FILTER_LEN, - (int16_t) FRAMESAMPLES_HALF, - (int16_t) (5), - (ISACdec_obj->plcstr_obj).decayCoeffNoise, - (int16_t) (7) ); - - for( i = 0; i < FRAMESAMPLES_HALF; i++ ) - Vector_Word32_2[i] = Vector_Word16_Extended_2[i] << rshift; - - Vector_Word16_1 = Vector_Word16_Extended_1; - } - else - { - if( (ISACdec_obj->plcstr_obj).A == 0 ) - { - /* ------ Periodic Vector --- */ - for( i = 0, noiseIndex = 0; i < FRAMESAMPLES_HALF; i++, noiseIndex++ ) - { - /* --- Lowpass */ - pLP = (int16_t)(stretchPitchLP[ISACdec_obj->plcstr_obj.pitchIndex] * - ISACdec_obj->plcstr_obj.decayCoeffPriodic >> 15); - - /* --- Highpass */ - pHP = (int32_t)WEBRTC_SPL_MUL_16_32_RSFT15( - (ISACdec_obj->plcstr_obj).decayCoeffPriodic, - (ISACdec_obj->plcstr_obj).prevHP[PITCH_MAX_LAG + 10 - - (ISACdec_obj->plcstr_obj).stretchLag + - (ISACdec_obj->plcstr_obj).pitchIndex] ); - - /* --- lower the muliplier (more decay at next sample) --- */ - (ISACdec_obj->plcstr_obj).decayCoeffPriodic -= (myDecayRate); - if( (ISACdec_obj->plcstr_obj).decayCoeffPriodic < 0 ) - (ISACdec_obj->plcstr_obj).decayCoeffPriodic = 0; - - (ISACdec_obj->plcstr_obj).pitchIndex++; - - if( (ISACdec_obj->plcstr_obj).pitchIndex == - (ISACdec_obj->plcstr_obj).stretchLag ) - { - (ISACdec_obj->plcstr_obj).pitchIndex = 0; - (ISACdec_obj->plcstr_obj).pitchCycles++; - - if( (ISACdec_obj->plcstr_obj).stretchLag != (lag0 + 1) ) - { - (ISACdec_obj->plcstr_obj).stretchLag = lag0 + 1; - } - else - { - (ISACdec_obj->plcstr_obj).stretchLag = lag0; - } - - (ISACdec_obj->plcstr_obj).stretchLag = ( - (ISACdec_obj->plcstr_obj).stretchLag > PITCH_MAX_LAG - )? (PITCH_MAX_LAG):(ISACdec_obj->plcstr_obj).stretchLag; - - LinearResampler( (ISACdec_obj->plcstr_obj).lastPitchLP, - stretchPitchLP, lag0, (ISACdec_obj->plcstr_obj).stretchLag ); - - LinearResampler( (ISACdec_obj->plcstr_obj).prevPitchLP, - stretchPitchLP1, lag0, (ISACdec_obj->plcstr_obj).stretchLag ); - - switch( (ISACdec_obj->plcstr_obj).pitchCycles ) - { - case 1: - { - for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ ) - { - stretchPitchLP[k] = (int16_t)(( - (int32_t)stretchPitchLP[k]* 3 + - (int32_t)stretchPitchLP1[k])>>2); - } - break; - } - case 2: - { - for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ ) - { - stretchPitchLP[k] = (int16_t)(( - (int32_t)stretchPitchLP[k] + - (int32_t)stretchPitchLP1[k] )>>1); - } - break; - } - case 3: - { - for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ ) - { - stretchPitchLP[k] = (int16_t)((stretchPitchLP[k] + - (int32_t)stretchPitchLP1[k]*3 )>>2); - } - break; - } - } - - if( (ISACdec_obj->plcstr_obj).pitchCycles == 3 ) - { - myDecayRate += 35; //(myDecayRate>>1); - (ISACdec_obj->plcstr_obj).pitchCycles = 0; - } - - } - - /* ------ Sum the noisy and periodic signals ------ */ - Vector_Word16_1[i] = pLP; - Vector_Word32_2[i] = pHP; - } - } - else - { - for( i = 0, noiseIndex = 0; i < FRAMESAMPLES_HALF; i++, noiseIndex++ ) - { - - (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND( - (ISACdec_obj->plcstr_obj).seed ); - - noise1 = (ISACdec_obj->plcstr_obj.seed >> 10) - 16; - - nLP = (int16_t)((int16_t)(noise1 * ISACdec_obj->plcstr_obj.std) * - ISACdec_obj->plcstr_obj.decayCoeffNoise >> 15); - - /* --- Highpass */ - (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND( - (ISACdec_obj->plcstr_obj).seed ); - noise1 = (ISACdec_obj->plcstr_obj.seed >> 11) - 8; - - nHP = (int32_t)WEBRTC_SPL_MUL_16_32_RSFT15( - (ISACdec_obj->plcstr_obj).decayCoeffNoise, - (int32_t)(noise1*(ISACdec_obj->plcstr_obj).std) ); - - /* --- lower the muliplier (more decay at next sample) --- */ - (ISACdec_obj->plcstr_obj).decayCoeffNoise -= (myDecayRate); - if( (ISACdec_obj->plcstr_obj).decayCoeffNoise < 0 ) - (ISACdec_obj->plcstr_obj).decayCoeffNoise = 0; - - /* ------ Periodic Vector --- */ - /* --- Lowpass */ - pLP = (int16_t)(stretchPitchLP[ISACdec_obj->plcstr_obj.pitchIndex] * - ISACdec_obj->plcstr_obj.decayCoeffPriodic >> 15); - - /* --- Highpass */ - pHP = (int32_t)WEBRTC_SPL_MUL_16_32_RSFT15( - (ISACdec_obj->plcstr_obj).decayCoeffPriodic, - (ISACdec_obj->plcstr_obj).prevHP[PITCH_MAX_LAG + 10 - - (ISACdec_obj->plcstr_obj).stretchLag + - (ISACdec_obj->plcstr_obj).pitchIndex] ); - - /* --- lower the muliplier (more decay at next sample) --- */ - (ISACdec_obj->plcstr_obj).decayCoeffPriodic -= (myDecayRate); - if( (ISACdec_obj->plcstr_obj).decayCoeffPriodic < 0 ) - { - (ISACdec_obj->plcstr_obj).decayCoeffPriodic = 0; - } - - /* ------ Weighting the noisy and periodic vectors ------- */ - wNoisyLP = (int16_t)(ISACdec_obj->plcstr_obj.A * nLP >> 15); - wNoisyHP = (int32_t)(WEBRTC_SPL_MUL_16_32_RSFT15( - (ISACdec_obj->plcstr_obj).A, (nHP) ) ); - - wPriodicLP = (int16_t)(ISACdec_obj->plcstr_obj.B * pLP >> 15); - wPriodicHP = (int32_t)(WEBRTC_SPL_MUL_16_32_RSFT15( - (ISACdec_obj->plcstr_obj).B, pHP)); - - (ISACdec_obj->plcstr_obj).pitchIndex++; - - if((ISACdec_obj->plcstr_obj).pitchIndex == - (ISACdec_obj->plcstr_obj).stretchLag) - { - (ISACdec_obj->plcstr_obj).pitchIndex = 0; - (ISACdec_obj->plcstr_obj).pitchCycles++; - - if( (ISACdec_obj->plcstr_obj).stretchLag != (lag0 + 1) ) - (ISACdec_obj->plcstr_obj).stretchLag = lag0 + 1; - else - (ISACdec_obj->plcstr_obj).stretchLag = lag0; - - (ISACdec_obj->plcstr_obj).stretchLag = ( - (ISACdec_obj->plcstr_obj).stretchLag > PITCH_MAX_LAG - )? (PITCH_MAX_LAG):(ISACdec_obj->plcstr_obj).stretchLag; - LinearResampler( - (ISACdec_obj->plcstr_obj).lastPitchLP, - stretchPitchLP, lag0, (ISACdec_obj->plcstr_obj).stretchLag ); - - LinearResampler((ISACdec_obj->plcstr_obj).prevPitchLP, - stretchPitchLP1, lag0, (ISACdec_obj->plcstr_obj).stretchLag ); - - switch((ISACdec_obj->plcstr_obj).pitchCycles) - { - case 1: - { - for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ ) - { - stretchPitchLP[k] = (int16_t)(( - (int32_t)stretchPitchLP[k]* 3 + - (int32_t)stretchPitchLP1[k] )>>2); - } - break; - } - case 2: - { - for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ ) - { - stretchPitchLP[k] = (int16_t)(( - (int32_t)stretchPitchLP[k] + - (int32_t)stretchPitchLP1[k])>>1); - } - break; - } - case 3: - { - for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ ) - { - stretchPitchLP[k] = (int16_t)( - (stretchPitchLP[k] + - (int32_t)stretchPitchLP1[k]*3 )>>2); - } - break; - } - } - - if( (ISACdec_obj->plcstr_obj).pitchCycles == 3 ) - { - myDecayRate += 55; //(myDecayRate>>1); - (ISACdec_obj->plcstr_obj).pitchCycles = 0; - } - } - - /* ------ Sum the noisy and periodic signals ------ */ - Vector_Word16_1[i] = WebRtcSpl_AddSatW16(wNoisyLP, wPriodicLP); - Vector_Word32_2[i] = WebRtcSpl_AddSatW32(wNoisyHP, wPriodicHP); - } - } - } - /* ----------------- residual signal is reconstructed ------------------ */ - - k = (ISACdec_obj->plcstr_obj).pitchIndex; - /* --- Write one pitch cycle for recovery block --- */ - - for( i = 0; i < RECOVERY_OVERLAP; i++ ) - { - ISACdec_obj->plcstr_obj.overlapLP[i] = (int16_t)( - stretchPitchLP[k] * ISACdec_obj->plcstr_obj.decayCoeffPriodic >> 15); - k = ( k < ((ISACdec_obj->plcstr_obj).stretchLag - 1) )? (k+1):0; - } - - (ISACdec_obj->plcstr_obj).lastPitchLag_Q7 = - (int16_t)((ISACdec_obj->plcstr_obj).stretchLag << 7); - - - /* --- Inverse Pitch Filter --- */ - WebRtcIsacfix_PitchFilter(Vector_Word16_1, Vector_Word16_2, - &ISACdec_obj->pitchfiltstr_obj, pitchLags_Q7, pitchGains_Q12, 4); - - /* reduce gain to compensate for pitch enhancer */ - /* gain = 1.0f - 0.45f * AvgPitchGain; */ - tmp32a = ISACdec_obj->plcstr_obj.AvgPitchGain_Q12 * 29; // Q18 - tmp32b = 262144 - tmp32a; // Q18 - gainQ13 = (int16_t) (tmp32b >> 5); // Q13 - - /* perceptual post-filtering (using normalized lattice filter) */ - for (k = 0; k < FRAMESAMPLES_HALF; k++) - Vector_Word32_1[k] = (Vector_Word16_2[k] * gainQ13) << 3; // Q25 - - - WebRtcIsacfix_NormLatticeFilterAr(ORDERLO, - (ISACdec_obj->maskfiltstr_obj).PostStateLoGQ0, - Vector_Word32_1, lofilt_coefQ15, gain_lo_hiQ17, 0, Vector_Word16_1); - - WebRtcIsacfix_NormLatticeFilterAr(ORDERHI, - (ISACdec_obj->maskfiltstr_obj).PostStateHiGQ0, - Vector_Word32_2, hifilt_coefQ15, gain_lo_hiQ17, 1, Vector_Word16_2); - - /* recombine the 2 bands */ - - /* Form the polyphase signals, and compensate for DC offset */ - for (k=0;kpostfiltbankstr_obj); - - (ISACdec_obj->plcstr_obj).used = PLC_WAS_USED; - *current_framesamples = 480; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/encode.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/encode.c deleted file mode 100644 index ef3e320e2c41..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/encode.c +++ /dev/null @@ -1,635 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * encode.c - * - * Encoding function for the iSAC coder. - * - */ - -#include "rtc_base/checks.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/arith_routins.h" -#include "modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h" -#include "modules/audio_coding/codecs/isac/fix/source/lpc_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - - -int WebRtcIsacfix_EncodeImpl(int16_t *in, - IsacFixEncoderInstance *ISACenc_obj, - BwEstimatorstr *bw_estimatordata, - int16_t CodingMode) -{ - int16_t stream_length = 0; - int16_t usefulstr_len = 0; - int k; - int16_t BWno; - - int16_t lofilt_coefQ15[(ORDERLO)*SUBFRAMES]; - int16_t hifilt_coefQ15[(ORDERHI)*SUBFRAMES]; - int32_t gain_lo_hiQ17[2*SUBFRAMES]; - - int16_t LPandHP[FRAMESAMPLES/2 + QLOOKAHEAD]; - int16_t LP16a[FRAMESAMPLES/2 + QLOOKAHEAD]; - int16_t HP16a[FRAMESAMPLES/2 + QLOOKAHEAD]; - - int16_t PitchLags_Q7[PITCH_SUBFRAMES]; - int16_t PitchGains_Q12[PITCH_SUBFRAMES]; - int16_t AvgPitchGain_Q12; - - int16_t frame_mode; /* 0 for 30ms, 1 for 60ms */ - int16_t processed_samples; - int status; - - int32_t bits_gainsQ11; - int16_t MinBytes; - int16_t bmodel; - - transcode_obj transcodingParam; - int16_t payloadLimitBytes; - int16_t arithLenBeforeEncodingDFT; - int16_t iterCntr; - - /* copy new frame length and bottle neck rate only for the first 10 ms data */ - if (ISACenc_obj->buffer_index == 0) { - /* set the framelength for the next packet */ - ISACenc_obj->current_framesamples = ISACenc_obj->new_framelength; - } - - frame_mode = ISACenc_obj->current_framesamples/MAX_FRAMESAMPLES; /* 0 (30 ms) or 1 (60 ms) */ - processed_samples = ISACenc_obj->current_framesamples/(frame_mode+1); /* 480 (30, 60 ms) */ - - /* buffer speech samples (by 10ms packet) until the framelength is reached (30 or 60 ms) */ - /**************************************************************************************/ - /* fill the buffer with 10ms input data */ - for(k=0; kdata_buffer_fix[k + ISACenc_obj->buffer_index] = in[k]; - } - /* if buffersize is not equal to current framesize, and end of file is not reached yet, */ - /* increase index and go back to main to get more speech samples */ - if (ISACenc_obj->buffer_index + FRAMESAMPLES_10ms != processed_samples) { - ISACenc_obj->buffer_index = ISACenc_obj->buffer_index + FRAMESAMPLES_10ms; - return 0; - } - /* if buffer reached the right size, reset index and continue with encoding the frame */ - ISACenc_obj->buffer_index = 0; - - /* end of buffer function */ - /**************************/ - - /* encoding */ - /************/ - - if (frame_mode == 0 || ISACenc_obj->frame_nb == 0 ) - { - /* reset bitstream */ - ISACenc_obj->bitstr_obj.W_upper = 0xFFFFFFFF; - ISACenc_obj->bitstr_obj.streamval = 0; - ISACenc_obj->bitstr_obj.stream_index = 0; - ISACenc_obj->bitstr_obj.full = 1; - - if (CodingMode == 0) { - ISACenc_obj->BottleNeck = WebRtcIsacfix_GetUplinkBandwidth(bw_estimatordata); - ISACenc_obj->MaxDelay = WebRtcIsacfix_GetUplinkMaxDelay(bw_estimatordata); - } - if (CodingMode == 0 && frame_mode == 0 && (ISACenc_obj->enforceFrameSize == 0)) { - ISACenc_obj->new_framelength = WebRtcIsacfix_GetNewFrameLength(ISACenc_obj->BottleNeck, - ISACenc_obj->current_framesamples); - } - - // multiply the bottleneck by 0.88 before computing SNR, 0.88 is tuned by experimenting on TIMIT - // 901/1024 is 0.87988281250000 - ISACenc_obj->s2nr = WebRtcIsacfix_GetSnr( - (int16_t)(ISACenc_obj->BottleNeck * 901 >> 10), - ISACenc_obj->current_framesamples); - - /* encode frame length */ - status = WebRtcIsacfix_EncodeFrameLen(ISACenc_obj->current_framesamples, &ISACenc_obj->bitstr_obj); - if (status < 0) - { - /* Wrong frame size */ - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - - /* Save framelength for multiple packets memory */ - if (ISACenc_obj->SaveEnc_ptr != NULL) { - (ISACenc_obj->SaveEnc_ptr)->framelength=ISACenc_obj->current_framesamples; - } - - /* bandwidth estimation and coding */ - BWno = WebRtcIsacfix_GetDownlinkBwIndexImpl(bw_estimatordata); - status = WebRtcIsacfix_EncodeReceiveBandwidth(&BWno, &ISACenc_obj->bitstr_obj); - if (status < 0) - { - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - } - - /* split signal in two bands */ - WebRtcIsacfix_SplitAndFilter1(ISACenc_obj->data_buffer_fix, LP16a, HP16a, &ISACenc_obj->prefiltbankstr_obj ); - - /* estimate pitch parameters and pitch-filter lookahead signal */ - WebRtcIsacfix_PitchAnalysis(LP16a+QLOOKAHEAD, LPandHP, - &ISACenc_obj->pitchanalysisstr_obj, PitchLags_Q7, PitchGains_Q12); /* LPandHP = LP_lookahead_pfQ0, */ - - /* Set where to store data in multiple packets memory */ - if (ISACenc_obj->SaveEnc_ptr != NULL) { - if (frame_mode == 0 || ISACenc_obj->frame_nb == 0) - { - (ISACenc_obj->SaveEnc_ptr)->startIdx = 0; - } - else - { - (ISACenc_obj->SaveEnc_ptr)->startIdx = 1; - } - } - - /* quantize & encode pitch parameters */ - status = WebRtcIsacfix_EncodePitchGain(PitchGains_Q12, &ISACenc_obj->bitstr_obj, ISACenc_obj->SaveEnc_ptr); - if (status < 0) - { - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - status = WebRtcIsacfix_EncodePitchLag(PitchLags_Q7 , PitchGains_Q12, &ISACenc_obj->bitstr_obj, ISACenc_obj->SaveEnc_ptr); - if (status < 0) - { - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - AvgPitchGain_Q12 = (PitchGains_Q12[0] + PitchGains_Q12[1] + - PitchGains_Q12[2] + PitchGains_Q12[3]) >> 2; - - /* find coefficients for perceptual pre-filters */ - WebRtcIsacfix_GetLpcCoef(LPandHP, HP16a+QLOOKAHEAD, &ISACenc_obj->maskfiltstr_obj, - ISACenc_obj->s2nr, PitchGains_Q12, - gain_lo_hiQ17, lofilt_coefQ15, hifilt_coefQ15); /*LPandHP = LP_lookahead_pfQ0*/ - - // record LPC Gains for possible bit-rate reduction - for(k = 0; k < KLT_ORDER_GAIN; k++) - { - transcodingParam.lpcGains[k] = gain_lo_hiQ17[k]; - } - - /* code LPC model and shape - gains not quantized yet */ - status = WebRtcIsacfix_EncodeLpc(gain_lo_hiQ17, lofilt_coefQ15, hifilt_coefQ15, - &bmodel, &bits_gainsQ11, &ISACenc_obj->bitstr_obj, ISACenc_obj->SaveEnc_ptr, &transcodingParam); - if (status < 0) - { - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - arithLenBeforeEncodingDFT = (ISACenc_obj->bitstr_obj.stream_index << 1) + (1-ISACenc_obj->bitstr_obj.full); - - /* low-band filtering */ - WebRtcIsacfix_NormLatticeFilterMa(ORDERLO, ISACenc_obj->maskfiltstr_obj.PreStateLoGQ15, - LP16a, lofilt_coefQ15, gain_lo_hiQ17, 0, LPandHP);/* LPandHP = LP16b */ - - /* pitch filter */ - WebRtcIsacfix_PitchFilter(LPandHP, LP16a, &ISACenc_obj->pitchfiltstr_obj, PitchLags_Q7, PitchGains_Q12, 1);/* LPandHP = LP16b */ - - /* high-band filtering */ - WebRtcIsacfix_NormLatticeFilterMa(ORDERHI, ISACenc_obj->maskfiltstr_obj.PreStateHiGQ15, - HP16a, hifilt_coefQ15, gain_lo_hiQ17, 1, LPandHP);/*LPandHP = HP16b*/ - - /* transform */ - WebRtcIsacfix_Time2Spec(LP16a, LPandHP, LP16a, LPandHP); /*LPandHP = HP16b*/ - - /* Save data for multiple packets memory */ - if (ISACenc_obj->SaveEnc_ptr != NULL) { - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - (ISACenc_obj->SaveEnc_ptr)->fre[k + (ISACenc_obj->SaveEnc_ptr)->startIdx*FRAMESAMPLES_HALF] = LP16a[k]; - (ISACenc_obj->SaveEnc_ptr)->fim[k + (ISACenc_obj->SaveEnc_ptr)->startIdx*FRAMESAMPLES_HALF] = LPandHP[k]; - } - (ISACenc_obj->SaveEnc_ptr)->AvgPitchGain[(ISACenc_obj->SaveEnc_ptr)->startIdx] = AvgPitchGain_Q12; - } - - /* quantization and lossless coding */ - status = WebRtcIsacfix_EncodeSpec(LP16a, LPandHP, &ISACenc_obj->bitstr_obj, AvgPitchGain_Q12); - if((status <= -1) && (status != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) /*LPandHP = HP16b*/ - { - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - - if((frame_mode == 1) && (ISACenc_obj->frame_nb == 0)) - { - // it is a 60ms and we are in the first 30ms - // then the limit at this point should be half of the assigned value - payloadLimitBytes = ISACenc_obj->payloadLimitBytes60 >> 1; - } - else if (frame_mode == 0) - { - // it is a 30ms frame - payloadLimitBytes = (ISACenc_obj->payloadLimitBytes30) - 3; - } - else - { - // this is the second half of a 60ms frame. - payloadLimitBytes = ISACenc_obj->payloadLimitBytes60 - 3; // subract 3 because termination process may add 3 bytes - } - - iterCntr = 0; - while((((ISACenc_obj->bitstr_obj.stream_index) << 1) > payloadLimitBytes) || - (status == -ISAC_DISALLOWED_BITSTREAM_LENGTH)) - { - int16_t arithLenDFTByte; - int16_t bytesLeftQ5; - int16_t ratioQ5[8] = {0, 6, 9, 12, 16, 19, 22, 25}; - - // According to experiments on TIMIT the following is proper for audio, but it is not agressive enough for tonal inputs - // such as DTMF, sweep-sine, ... - // - // (0.55 - (0.8 - ratio[i]/32) * 5 / 6) * 2^14 - // int16_t scaleQ14[8] = {0, 648, 1928, 3208, 4915, 6195, 7475, 8755}; - - - // This is a supper-agressive scaling passed the tests (tonal inputs) tone with one iteration for payload limit - // of 120 (32kbps bottleneck), number of frames needed a rate-reduction was 58403 - // - int16_t scaleQ14[8] = {0, 348, 828, 1408, 2015, 3195, 3500, 3500}; - int16_t idx; - - if(iterCntr >= MAX_PAYLOAD_LIMIT_ITERATION) - { - // We were not able to limit the payload size - - if((frame_mode == 1) && (ISACenc_obj->frame_nb == 0)) - { - // This was the first 30ms of a 60ms frame. Although the payload is larger than it - // should be but we let the second 30ms be encoded. Maybe togetehr we won't exceed - // the limit. - ISACenc_obj->frame_nb = 1; - return 0; - } - else if((frame_mode == 1) && (ISACenc_obj->frame_nb == 1)) - { - ISACenc_obj->frame_nb = 0; - } - - if(status != -ISAC_DISALLOWED_BITSTREAM_LENGTH) - { - return -ISAC_PAYLOAD_LARGER_THAN_LIMIT; - } - else - { - return status; - } - } - if(status != -ISAC_DISALLOWED_BITSTREAM_LENGTH) - { - arithLenDFTByte = (ISACenc_obj->bitstr_obj.stream_index << 1) + (1-ISACenc_obj->bitstr_obj.full) - arithLenBeforeEncodingDFT; - bytesLeftQ5 = (payloadLimitBytes - arithLenBeforeEncodingDFT) << 5; - - // bytesLeft / arithLenDFTBytes indicates how much scaling is required a rough estimate (agressive) - // scale = 0.55 - (0.8 - bytesLeft / arithLenDFTBytes) * 5 / 6 - // bytesLeft / arithLenDFTBytes below 0.2 will have a scale of zero and above 0.8 are treated as 0.8 - // to avoid division we do more simplification. - // - // values of (bytesLeft / arithLenDFTBytes)*32 between ratioQ5[i] and ratioQ5[i+1] are rounded to ratioQ5[i] - // and the corresponding scale is chosen - - // we compare bytesLeftQ5 with ratioQ5[]*arithLenDFTByte; - idx = 4; - idx += (bytesLeftQ5 >= ratioQ5[idx] * arithLenDFTByte) ? 2 : -2; - idx += (bytesLeftQ5 >= ratioQ5[idx] * arithLenDFTByte) ? 1 : -1; - idx += (bytesLeftQ5 >= ratioQ5[idx] * arithLenDFTByte) ? 0 : -1; - } - else - { - // we are here because the bit-stream did not fit into the buffer, in this case, the stream_index is not - // trustable, especially if the is the first 30ms of a packet. Thereforem, we will go for the most agressive - // case. - idx = 0; - } - // scale FFT coefficients to reduce the bit-rate - for(k = 0; k < FRAMESAMPLES_HALF; k++) - { - LP16a[k] = (int16_t)(LP16a[k] * scaleQ14[idx] >> 14); - LPandHP[k] = (int16_t)(LPandHP[k] * scaleQ14[idx] >> 14); - } - - // Save data for multiple packets memory - if (ISACenc_obj->SaveEnc_ptr != NULL) - { - for(k = 0; k < FRAMESAMPLES_HALF; k++) - { - (ISACenc_obj->SaveEnc_ptr)->fre[k + (ISACenc_obj->SaveEnc_ptr)->startIdx*FRAMESAMPLES_HALF] = LP16a[k]; - (ISACenc_obj->SaveEnc_ptr)->fim[k + (ISACenc_obj->SaveEnc_ptr)->startIdx*FRAMESAMPLES_HALF] = LPandHP[k]; - } - } - - // scale the unquantized LPC gains and save the scaled version for the future use - for(k = 0; k < KLT_ORDER_GAIN; k++) - { - gain_lo_hiQ17[k] = WEBRTC_SPL_MUL_16_32_RSFT14(scaleQ14[idx], transcodingParam.lpcGains[k]);//transcodingParam.lpcGains[k]; // - transcodingParam.lpcGains[k] = gain_lo_hiQ17[k]; - } - - // reset the bit-stream object to the state which it had before encoding LPC Gains - ISACenc_obj->bitstr_obj.full = transcodingParam.full; - ISACenc_obj->bitstr_obj.stream_index = transcodingParam.stream_index; - ISACenc_obj->bitstr_obj.streamval = transcodingParam.streamval; - ISACenc_obj->bitstr_obj.W_upper = transcodingParam.W_upper; - ISACenc_obj->bitstr_obj.stream[transcodingParam.stream_index-1] = transcodingParam.beforeLastWord; - ISACenc_obj->bitstr_obj.stream[transcodingParam.stream_index] = transcodingParam.lastWord; - - - // quantize and encode LPC gain - WebRtcIsacfix_EstCodeLpcGain(gain_lo_hiQ17, &ISACenc_obj->bitstr_obj, ISACenc_obj->SaveEnc_ptr); - arithLenBeforeEncodingDFT = (ISACenc_obj->bitstr_obj.stream_index << 1) + (1-ISACenc_obj->bitstr_obj.full); - status = WebRtcIsacfix_EncodeSpec(LP16a, LPandHP, &ISACenc_obj->bitstr_obj, AvgPitchGain_Q12); - if((status <= -1) && (status != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) /*LPandHP = HP16b*/ - { - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - iterCntr++; - } - - if (frame_mode == 1 && ISACenc_obj->frame_nb == 0) - /* i.e. 60 ms framesize and just processed the first 30ms, */ - /* go back to main function to buffer the other 30ms speech frame */ - { - ISACenc_obj->frame_nb = 1; - return 0; - } - else if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - ISACenc_obj->frame_nb = 0; - /* also update the framelength for next packet, in Adaptive mode only */ - if (CodingMode == 0 && (ISACenc_obj->enforceFrameSize == 0)) { - ISACenc_obj->new_framelength = WebRtcIsacfix_GetNewFrameLength(ISACenc_obj->BottleNeck, - ISACenc_obj->current_framesamples); - } - } - - - /* complete arithmetic coding */ - stream_length = WebRtcIsacfix_EncTerminate(&ISACenc_obj->bitstr_obj); - /* can this be negative? */ - - if(CodingMode == 0) - { - - /* update rate model and get minimum number of bytes in this packet */ - MinBytes = WebRtcIsacfix_GetMinBytes(&ISACenc_obj->rate_data_obj, (int16_t) stream_length, - ISACenc_obj->current_framesamples, ISACenc_obj->BottleNeck, ISACenc_obj->MaxDelay); - - /* if bitstream is too short, add garbage at the end */ - - /* Store length of coded data */ - usefulstr_len = stream_length; - - /* Make sure MinBytes does not exceed packet size limit */ - if ((ISACenc_obj->frame_nb == 0) && (MinBytes > ISACenc_obj->payloadLimitBytes30)) { - MinBytes = ISACenc_obj->payloadLimitBytes30; - } else if ((ISACenc_obj->frame_nb == 1) && (MinBytes > ISACenc_obj->payloadLimitBytes60)) { - MinBytes = ISACenc_obj->payloadLimitBytes60; - } - - /* Make sure we don't allow more than 255 bytes of garbage data. - We store the length of the garbage data in 8 bits in the bitstream, - 255 is the max garbage lenght we can signal using 8 bits. */ - if( MinBytes > usefulstr_len + 255 ) { - MinBytes = usefulstr_len + 255; - } - - /* Save data for creation of multiple bitstreams */ - if (ISACenc_obj->SaveEnc_ptr != NULL) { - (ISACenc_obj->SaveEnc_ptr)->minBytes = MinBytes; - } - - while (stream_length < MinBytes) - { - RTC_DCHECK_GE(stream_length, 0); - if (stream_length & 0x0001){ - ISACenc_obj->bitstr_seed = WEBRTC_SPL_RAND( ISACenc_obj->bitstr_seed ); - ISACenc_obj->bitstr_obj.stream[stream_length / 2] |= - (uint16_t)(ISACenc_obj->bitstr_seed & 0xFF); - } else { - ISACenc_obj->bitstr_seed = WEBRTC_SPL_RAND( ISACenc_obj->bitstr_seed ); - ISACenc_obj->bitstr_obj.stream[stream_length / 2] = - ((uint16_t)ISACenc_obj->bitstr_seed << 8); - } - stream_length++; - } - - /* to get the real stream_length, without garbage */ - if (usefulstr_len & 0x0001) { - ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] &= 0xFF00; - ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] += (MinBytes - usefulstr_len) & 0x00FF; - } - else { - ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] &= 0x00FF; - ISACenc_obj->bitstr_obj.stream[usefulstr_len >> 1] += - ((uint16_t)((MinBytes - usefulstr_len) & 0x00FF) << 8); - } - } - else - { - /* update rate model */ - WebRtcIsacfix_UpdateRateModel(&ISACenc_obj->rate_data_obj, (int16_t) stream_length, - ISACenc_obj->current_framesamples, ISACenc_obj->BottleNeck); - } - return stream_length; -} - -/* This function is used to create a new bitstream with new BWE. - The same data as previously encoded with the fucntion WebRtcIsacfix_EncodeImpl() - is used. The data needed is taken from the struct, where it was stored - when calling the encoder. */ -int WebRtcIsacfix_EncodeStoredData(IsacFixEncoderInstance *ISACenc_obj, - int BWnumber, - float scale) -{ - int ii; - int status; - int16_t BWno = (int16_t)BWnumber; - int stream_length = 0; - - int16_t model; - const uint16_t *Q_PitchGain_cdf_ptr[1]; - const uint16_t **cdf; - const IsacSaveEncoderData *SaveEnc_str; - int32_t tmpLPCcoeffs_g[KLT_ORDER_GAIN<<1]; - int16_t tmpLPCindex_g[KLT_ORDER_GAIN<<1]; - int16_t tmp_fre[FRAMESAMPLES]; - int16_t tmp_fim[FRAMESAMPLES]; - - SaveEnc_str = ISACenc_obj->SaveEnc_ptr; - - /* Check if SaveEnc memory exists */ - if (SaveEnc_str == NULL) { - return (-1); - } - - /* Sanity Check - possible values for BWnumber is 0 - 23 */ - if ((BWnumber < 0) || (BWnumber > 23)) { - return -ISAC_RANGE_ERROR_BW_ESTIMATOR; - } - - /* reset bitstream */ - ISACenc_obj->bitstr_obj.W_upper = 0xFFFFFFFF; - ISACenc_obj->bitstr_obj.streamval = 0; - ISACenc_obj->bitstr_obj.stream_index = 0; - ISACenc_obj->bitstr_obj.full = 1; - - /* encode frame length */ - status = WebRtcIsacfix_EncodeFrameLen(SaveEnc_str->framelength, &ISACenc_obj->bitstr_obj); - if (status < 0) { - /* Wrong frame size */ - return status; - } - - /* encode bandwidth estimate */ - status = WebRtcIsacfix_EncodeReceiveBandwidth(&BWno, &ISACenc_obj->bitstr_obj); - if (status < 0) { - return status; - } - - /* Transcoding */ - /* If scale < 1, rescale data to produce lower bitrate signal */ - if ((0.0 < scale) && (scale < 1.0)) { - /* Compensate LPC gain */ - for (ii = 0; ii < (KLT_ORDER_GAIN*(1+SaveEnc_str->startIdx)); ii++) { - tmpLPCcoeffs_g[ii] = (int32_t) ((scale) * (float) SaveEnc_str->LPCcoeffs_g[ii]); - } - - /* Scale DFT */ - for (ii = 0; ii < (FRAMESAMPLES_HALF*(1+SaveEnc_str->startIdx)); ii++) { - tmp_fre[ii] = (int16_t) ((scale) * (float) SaveEnc_str->fre[ii]) ; - tmp_fim[ii] = (int16_t) ((scale) * (float) SaveEnc_str->fim[ii]) ; - } - } else { - for (ii = 0; ii < (KLT_ORDER_GAIN*(1+SaveEnc_str->startIdx)); ii++) { - tmpLPCindex_g[ii] = SaveEnc_str->LPCindex_g[ii]; - } - - for (ii = 0; ii < (FRAMESAMPLES_HALF*(1+SaveEnc_str->startIdx)); ii++) { - tmp_fre[ii] = SaveEnc_str->fre[ii]; - tmp_fim[ii] = SaveEnc_str->fim[ii]; - } - } - - /* Loop over number of 30 msec */ - for (ii = 0; ii <= SaveEnc_str->startIdx; ii++) - { - - /* encode pitch gains */ - *Q_PitchGain_cdf_ptr = WebRtcIsacfix_kPitchGainCdf; - status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj, &SaveEnc_str->pitchGain_index[ii], - Q_PitchGain_cdf_ptr, 1); - if (status < 0) { - return status; - } - - /* entropy coding of quantization pitch lags */ - /* voicing classificiation */ - if (SaveEnc_str->meanGain[ii] <= 819) { - cdf = WebRtcIsacfix_kPitchLagPtrLo; - } else if (SaveEnc_str->meanGain[ii] <= 1638) { - cdf = WebRtcIsacfix_kPitchLagPtrMid; - } else { - cdf = WebRtcIsacfix_kPitchLagPtrHi; - } - status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj, - &SaveEnc_str->pitchIndex[PITCH_SUBFRAMES*ii], cdf, PITCH_SUBFRAMES); - if (status < 0) { - return status; - } - - /* LPC */ - /* entropy coding of model number */ - model = 0; - status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj, &model, - WebRtcIsacfix_kModelCdfPtr, 1); - if (status < 0) { - return status; - } - - /* entropy coding of quantization indices - LPC shape only */ - status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj, &SaveEnc_str->LPCindex_s[KLT_ORDER_SHAPE*ii], - WebRtcIsacfix_kCdfShapePtr[0], KLT_ORDER_SHAPE); - if (status < 0) { - return status; - } - - /* If transcoding, get new LPC gain indices */ - if (scale < 1.0) { - WebRtcIsacfix_TranscodeLpcCoef(&tmpLPCcoeffs_g[KLT_ORDER_GAIN*ii], &tmpLPCindex_g[KLT_ORDER_GAIN*ii]); - } - - /* entropy coding of quantization indices - LPC gain */ - status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj, &tmpLPCindex_g[KLT_ORDER_GAIN*ii], - WebRtcIsacfix_kCdfGainPtr[0], KLT_ORDER_GAIN); - if (status < 0) { - return status; - } - - /* quantization and lossless coding */ - status = WebRtcIsacfix_EncodeSpec(&tmp_fre[ii*FRAMESAMPLES_HALF], &tmp_fim[ii*FRAMESAMPLES_HALF], - &ISACenc_obj->bitstr_obj, SaveEnc_str->AvgPitchGain[ii]); - if (status < 0) { - return status; - } - } - - /* complete arithmetic coding */ - stream_length = WebRtcIsacfix_EncTerminate(&ISACenc_obj->bitstr_obj); - - return stream_length; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c deleted file mode 100644 index 842e77f47e19..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c +++ /dev/null @@ -1,2056 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * entropy_coding.c - * - * This file contains all functions used to arithmetically - * encode the iSAC bistream. - * - */ - -#include - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/fix/source/arith_routins.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/lpc_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h" -#include "rtc_base/sanitizer.h" - -/* - * Eenumerations for arguments to functions WebRtcIsacfix_MatrixProduct1() - * and WebRtcIsacfix_MatrixProduct2(). -*/ - -enum matrix_index_factor { - kTIndexFactor1 = 1, - kTIndexFactor2 = 2, - kTIndexFactor3 = SUBFRAMES, - kTIndexFactor4 = LPC_SHAPE_ORDER -}; - -enum matrix_index_step { - kTIndexStep1 = 1, - kTIndexStep2 = SUBFRAMES, - kTIndexStep3 = LPC_SHAPE_ORDER -}; - -enum matrixprod_loop_count { - kTLoopCount1 = SUBFRAMES, - kTLoopCount2 = 2, - kTLoopCount3 = LPC_SHAPE_ORDER -}; - -enum matrix1_shift_value { - kTMatrix1_shift0 = 0, - kTMatrix1_shift1 = 1, - kTMatrix1_shift5 = 5 -}; - -enum matrixprod_init_case { - kTInitCase0 = 0, - kTInitCase1 = 1 -}; - -/* - This function implements the fix-point correspondant function to lrint. - - FLP: (int32_t)floor(flt+.499999999999) - FIP: (fixVal+roundVal)>>qDomain - - where roundVal = 2^(qDomain-1) = 1<<(qDomain-1) - -*/ -static __inline int32_t CalcLrIntQ(int32_t fixVal, int16_t qDomain) { - return (fixVal + (1 << (qDomain - 1))) >> qDomain; -} - -/* - __inline uint32_t stepwise(int32_t dinQ10) { - - int32_t ind, diQ10, dtQ10; - - diQ10 = dinQ10; - if (diQ10 < DPMIN_Q10) - diQ10 = DPMIN_Q10; - if (diQ10 >= DPMAX_Q10) - diQ10 = DPMAX_Q10 - 1; - - dtQ10 = diQ10 - DPMIN_Q10;*/ /* Q10 + Q10 = Q10 */ -/* ind = (dtQ10 * 5) >> 10; */ /* 2^10 / 5 = 0.2 in Q10 */ -/* Q10 -> Q0 */ - -/* return rpointsFIX_Q10[ind]; - - } -*/ - -/* logN(x) = logN(2)*log2(x) = 0.6931*log2(x). Output in Q8. */ -/* The input argument X to logN(X) is 2^17 times higher than the - input floating point argument Y to log(Y), since the X value - is a Q17 value. This can be compensated for after the call, by - subraction a value Z for each Q-step. One Q-step means that - X gets 2 thimes higher, i.e. Z = logN(2)*256 = 0.693147180559*256 = - 177.445678 should be subtracted (since logN() returns a Q8 value). - For a X value in Q17, the value 177.445678*17 = 3017 should be - subtracted */ -static int16_t CalcLogN(int32_t arg) { - int16_t zeros, log2, frac, logN; - - zeros=WebRtcSpl_NormU32(arg); - frac = (int16_t)((uint32_t)((arg << zeros) & 0x7FFFFFFF) >> 23); - log2 = (int16_t)(((31 - zeros) << 8) + frac); // log2(x) in Q8 - logN = (int16_t)(log2 * 22713 >> 15); // log(2) = 0.693147 = 22713 in Q15 - logN=logN+11; //Scalar compensation which minimizes the (log(x)-logN(x))^2 error over all x. - - return logN; -} - - -/* - expN(x) = 2^(a*x), where a = log2(e) ~= 1.442695 - - Input: Q8 (int16_t) - Output: Q17 (int32_t) - - a = log2(e) = log2(exp(1)) ~= 1.442695 ==> a = 23637 in Q14 (1.442688) - To this value, 700 is added or subtracted in order to get an average error - nearer zero, instead of always same-sign. -*/ - -static int32_t CalcExpN(int16_t x) { - int16_t axINT, axFRAC; - int16_t exp16; - int32_t exp; - int16_t ax = (int16_t)(x * 23637 >> 14); // Q8 - - if (x>=0) { - axINT = ax >> 8; //Q0 - axFRAC = ax&0x00FF; - exp16 = 1 << axINT; // Q0 - axFRAC = axFRAC+256; //Q8 - exp = exp16 * axFRAC; // Q0*Q8 = Q8 - exp <<= 9; // Q17 - } else { - ax = -ax; - axINT = 1 + (ax >> 8); //Q0 - axFRAC = 0x00FF - (ax&0x00FF); - exp16 = (int16_t)(32768 >> axINT); // Q15 - axFRAC = axFRAC+256; //Q8 - exp = exp16 * axFRAC; // Q15*Q8 = Q23 - exp >>= 6; // Q17 - } - - return exp; -} - - -/* compute correlation from power spectrum */ -static void CalcCorrelation(int32_t *PSpecQ12, int32_t *CorrQ7) -{ - int32_t summ[FRAMESAMPLES/8]; - int32_t diff[FRAMESAMPLES/8]; - int32_t sum; - int k, n; - - for (k = 0; k < FRAMESAMPLES/8; k++) { - summ[k] = (PSpecQ12[k] + PSpecQ12[FRAMESAMPLES / 4 - 1 - k] + 16) >> 5; - diff[k] = (PSpecQ12[k] - PSpecQ12[FRAMESAMPLES / 4 - 1 - k] + 16) >> 5; - } - - sum = 2; - for (n = 0; n < FRAMESAMPLES/8; n++) - sum += summ[n]; - CorrQ7[0] = sum; - - for (k = 0; k < AR_ORDER; k += 2) { - sum = 0; - for (n = 0; n < FRAMESAMPLES/8; n++) - sum += (WebRtcIsacfix_kCos[k][n] * diff[n] + 256) >> 9; - CorrQ7[k+1] = sum; - } - - for (k=1; k> 9; - CorrQ7[k+1] = sum; - } -} - -// Some arithmetic operations that are allowed to overflow. (It's still -// undefined behavior, so not a good idea; this just makes UBSan ignore the -// violations, so that our old code can continue to do what it's always been -// doing.) -static inline int32_t RTC_NO_SANITIZE("signed-integer-overflow") - OverflowingMulS16S32ToS32(int16_t a, int32_t b) { - return a * b; -} -static inline int32_t RTC_NO_SANITIZE("signed-integer-overflow") - OverflowingAddS32S32ToS32(int32_t a, int32_t b) { - return a + b; -} -static inline int32_t RTC_NO_SANITIZE("signed-integer-overflow") - OverflowingSubS32S32ToS32(int32_t a, int32_t b) { - return a - b; -} - -/* compute inverse AR power spectrum */ -static void CalcInvArSpec(const int16_t *ARCoefQ12, - const int32_t gainQ10, - int32_t *CurveQ16) -{ - int32_t CorrQ11[AR_ORDER+1]; - int32_t sum, tmpGain; - int32_t diffQ16[FRAMESAMPLES/8]; - const int16_t *CS_ptrQ9; - int k, n; - int16_t round, shftVal = 0, sh; - - sum = 0; - for (n = 0; n < AR_ORDER+1; n++) - sum += WEBRTC_SPL_MUL(ARCoefQ12[n], ARCoefQ12[n]); /* Q24 */ - sum = ((sum >> 6) * 65 + 32768) >> 16; /* Result in Q8. */ - CorrQ11[0] = (sum * gainQ10 + 256) >> 9; - - /* To avoid overflow, we shift down gainQ10 if it is large. We will not lose any precision */ - if(gainQ10>400000){ - tmpGain = gainQ10 >> 3; - round = 32; - shftVal = 6; - } else { - tmpGain = gainQ10; - round = 256; - shftVal = 9; - } - - for (k = 1; k < AR_ORDER+1; k++) { - sum = 16384; - for (n = k; n < AR_ORDER+1; n++) - sum += WEBRTC_SPL_MUL(ARCoefQ12[n-k], ARCoefQ12[n]); /* Q24 */ - sum >>= 15; - CorrQ11[k] = (sum * tmpGain + round) >> shftVal; - } - sum = CorrQ11[0] << 7; - for (n = 0; n < FRAMESAMPLES/8; n++) - CurveQ16[n] = sum; - - for (k = 1; k < AR_ORDER; k += 2) { - for (n = 0; n < FRAMESAMPLES/8; n++) - CurveQ16[n] += - (OverflowingMulS16S32ToS32(WebRtcIsacfix_kCos[k][n], CorrQ11[k + 1]) + - 2) >> - 2; - } - - CS_ptrQ9 = WebRtcIsacfix_kCos[0]; - - /* If CorrQ11[1] too large we avoid getting overflow in the calculation by shifting */ - sh=WebRtcSpl_NormW32(CorrQ11[1]); - if (CorrQ11[1]==0) /* Use next correlation */ - sh=WebRtcSpl_NormW32(CorrQ11[2]); - - if (sh<9) - shftVal = 9 - sh; - else - shftVal = 0; - - for (n = 0; n < FRAMESAMPLES/8; n++) - diffQ16[n] = (CS_ptrQ9[n] * (CorrQ11[1] >> shftVal) + 2) >> 2; - for (k = 2; k < AR_ORDER; k += 2) { - CS_ptrQ9 = WebRtcIsacfix_kCos[k]; - for (n = 0; n < FRAMESAMPLES/8; n++) - diffQ16[n] += (CS_ptrQ9[n] * (CorrQ11[k + 1] >> shftVal) + 2) >> 2; - } - - for (k=0; k> 6) * 65 + 32768) >> 16; /* Result in Q8. */ - CorrQ11[0] = (sum * gainQ10 + 256) >> 9; - - /* To avoid overflow, we shift down gainQ10 if it is large. We will not lose any precision */ - if(gainQ10>400000){ - tmpGain = gainQ10 >> 3; - round = 32; - shftVal = 6; - } else { - tmpGain = gainQ10; - round = 256; - shftVal = 9; - } - - for (k = 1; k < AR_ORDER+1; k++) { - sum = 16384; - for (n = k; n < AR_ORDER+1; n++) - sum += WEBRTC_SPL_MUL(ARCoefQ12[n-k], ARCoefQ12[n]); /* Q24 */ - sum >>= 15; - CorrQ11[k] = (sum * tmpGain + round) >> shftVal; - } - sum = CorrQ11[0] << 7; - for (n = 0; n < FRAMESAMPLES/8; n++) - summQ16[n] = sum; - - for (k = 1; k < (AR_ORDER); k += 2) { - for (n = 0; n < FRAMESAMPLES/8; n++) - summQ16[n] += ((CorrQ11[k + 1] * WebRtcIsacfix_kCos[k][n]) + 2) >> 2; - } - - CS_ptrQ9 = WebRtcIsacfix_kCos[0]; - - /* If CorrQ11[1] too large we avoid getting overflow in the calculation by shifting */ - sh=WebRtcSpl_NormW32(CorrQ11[1]); - if (CorrQ11[1]==0) /* Use next correlation */ - sh=WebRtcSpl_NormW32(CorrQ11[2]); - - if (sh<9) - shftVal = 9 - sh; - else - shftVal = 0; - - for (n = 0; n < FRAMESAMPLES/8; n++) - diffQ16[n] = (CS_ptrQ9[n] * (CorrQ11[1] >> shftVal) + 2) >> 2; - for (k = 2; k < AR_ORDER; k += 2) { - CS_ptrQ9 = WebRtcIsacfix_kCos[k]; - for (n = 0; n < FRAMESAMPLES/8; n++) - diffQ16[n] += (CS_ptrQ9[n] * (CorrQ11[k + 1] >> shftVal) + 2) >> 2; - } - - in_sqrt = summQ16[0] + (diffQ16[0] << shftVal); - - /* convert to magnitude spectrum, by doing square-roots (modified from SPLIB) */ - res = 1 << (WebRtcSpl_GetSizeInBits(in_sqrt) >> 1); - - for (k = 0; k < FRAMESAMPLES/8; k++) - { - in_sqrt = summQ16[k] + (diffQ16[k] << shftVal); - i = 10; - - /* make in_sqrt positive to prohibit sqrt of negative values */ - if(in_sqrt<0) - in_sqrt=-in_sqrt; - - newRes = (in_sqrt / res + res) >> 1; - do - { - res = newRes; - newRes = (in_sqrt / res + res) >> 1; - } while (newRes != res && i-- > 0); - - CurveQ8[k] = (int16_t)newRes; - } - for (k = FRAMESAMPLES/8; k < FRAMESAMPLES/4; k++) { - - in_sqrt = summQ16[FRAMESAMPLES / 4 - 1 - k] - - (diffQ16[FRAMESAMPLES / 4 - 1 - k] << shftVal); - i = 10; - - /* make in_sqrt positive to prohibit sqrt of negative values */ - if(in_sqrt<0) - in_sqrt=-in_sqrt; - - newRes = (in_sqrt / res + res) >> 1; - do - { - res = newRes; - newRes = (in_sqrt / res + res) >> 1; - } while (newRes != res && i-- > 0); - - CurveQ8[k] = (int16_t)newRes; - } - -} - - - -/* generate array of dither samples in Q7 */ -static void GenerateDitherQ7(int16_t *bufQ7, - uint32_t seed, - int16_t length, - int16_t AvgPitchGain_Q12) -{ - int k; - int16_t dither1_Q7, dither2_Q7, dither_gain_Q14, shft; - - if (AvgPitchGain_Q12 < 614) /* this threshold should be equal to that in decode_spec() */ - { - for (k = 0; k < length-2; k += 3) - { - /* new random unsigned int32_t */ - seed = WEBRTC_SPL_UMUL(seed, 196314165) + 907633515; - - /* fixed-point dither sample between -64 and 64 (Q7) */ - dither1_Q7 = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - /* new random unsigned int32_t */ - seed = WEBRTC_SPL_UMUL(seed, 196314165) + 907633515; - - /* fixed-point dither sample between -64 and 64 */ - dither2_Q7 = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - shft = (int16_t)(WEBRTC_SPL_RSHIFT_U32(seed, 25) & 15); - if (shft < 5) - { - bufQ7[k] = dither1_Q7; - bufQ7[k+1] = dither2_Q7; - bufQ7[k+2] = 0; - } - else if (shft < 10) - { - bufQ7[k] = dither1_Q7; - bufQ7[k+1] = 0; - bufQ7[k+2] = dither2_Q7; - } - else - { - bufQ7[k] = 0; - bufQ7[k+1] = dither1_Q7; - bufQ7[k+2] = dither2_Q7; - } - } - } - else - { - dither_gain_Q14 = (int16_t)(22528 - WEBRTC_SPL_MUL(10, AvgPitchGain_Q12)); - - /* dither on half of the coefficients */ - for (k = 0; k < length-1; k += 2) - { - /* new random unsigned int32_t */ - seed = WEBRTC_SPL_UMUL(seed, 196314165) + 907633515; - - /* fixed-point dither sample between -64 and 64 */ - dither1_Q7 = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - /* dither sample is placed in either even or odd index */ - shft = (int16_t)(WEBRTC_SPL_RSHIFT_U32(seed, 25) & 1); /* either 0 or 1 */ - - bufQ7[k + shft] = (int16_t)((dither_gain_Q14 * dither1_Q7 + 8192) >> 14); - bufQ7[k + 1 - shft] = 0; - } - } -} - - - - -/* - * function to decode the complex spectrum from the bitstream - * returns the total number of bytes in the stream - */ -int WebRtcIsacfix_DecodeSpec(Bitstr_dec *streamdata, - int16_t *frQ7, - int16_t *fiQ7, - int16_t AvgPitchGain_Q12) -{ - int16_t data[FRAMESAMPLES]; - int32_t invARSpec2_Q16[FRAMESAMPLES/4]; - int16_t ARCoefQ12[AR_ORDER+1]; - int16_t RCQ15[AR_ORDER]; - int16_t gainQ10; - int32_t gain2_Q10; - int len; - int k; - - /* create dither signal */ - GenerateDitherQ7(data, streamdata->W_upper, FRAMESAMPLES, AvgPitchGain_Q12); /* Dither is output in vector 'Data' */ - - /* decode model parameters */ - if (WebRtcIsacfix_DecodeRcCoef(streamdata, RCQ15) < 0) - return -ISAC_RANGE_ERROR_DECODE_SPECTRUM; - - - WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12); - - if (WebRtcIsacfix_DecodeGain2(streamdata, &gain2_Q10) < 0) - return -ISAC_RANGE_ERROR_DECODE_SPECTRUM; - - /* compute inverse AR power spectrum */ - CalcInvArSpec(ARCoefQ12, gain2_Q10, invARSpec2_Q16); - - /* arithmetic decoding of spectrum */ - /* 'data' input and output. Input = Dither */ - len = WebRtcIsacfix_DecLogisticMulti2(data, streamdata, invARSpec2_Q16, (int16_t)FRAMESAMPLES); - - if (len<1) - return -ISAC_RANGE_ERROR_DECODE_SPECTRUM; - - /* subtract dither and scale down spectral samples with low SNR */ - if (AvgPitchGain_Q12 <= 614) - { - for (k = 0; k < FRAMESAMPLES; k += 4) - { - gainQ10 = WebRtcSpl_DivW32W16ResW16(30 << 10, - (int16_t)((uint32_t)(invARSpec2_Q16[k >> 2] + 2195456) >> 16)); - *frQ7++ = (int16_t)((data[k] * gainQ10 + 512) >> 10); - *fiQ7++ = (int16_t)((data[k + 1] * gainQ10 + 512) >> 10); - *frQ7++ = (int16_t)((data[k + 2] * gainQ10 + 512) >> 10); - *fiQ7++ = (int16_t)((data[k + 3] * gainQ10 + 512) >> 10); - } - } - else - { - for (k = 0; k < FRAMESAMPLES; k += 4) - { - gainQ10 = WebRtcSpl_DivW32W16ResW16(36 << 10, - (int16_t)((uint32_t)(invARSpec2_Q16[k >> 2] + 2654208) >> 16)); - *frQ7++ = (int16_t)((data[k] * gainQ10 + 512) >> 10); - *fiQ7++ = (int16_t)((data[k + 1] * gainQ10 + 512) >> 10); - *frQ7++ = (int16_t)((data[k + 2] * gainQ10 + 512) >> 10); - *fiQ7++ = (int16_t)((data[k + 3] * gainQ10 + 512) >> 10); - } - } - - return len; -} - - -int WebRtcIsacfix_EncodeSpec(const int16_t *fr, - const int16_t *fi, - Bitstr_enc *streamdata, - int16_t AvgPitchGain_Q12) -{ - int16_t dataQ7[FRAMESAMPLES]; - int32_t PSpec[FRAMESAMPLES/4]; - uint16_t invARSpecQ8[FRAMESAMPLES/4]; - int32_t CorrQ7[AR_ORDER+1]; - int32_t CorrQ7_norm[AR_ORDER+1]; - int16_t RCQ15[AR_ORDER]; - int16_t ARCoefQ12[AR_ORDER+1]; - int32_t gain2_Q10; - int16_t val; - int32_t nrg; - uint32_t sum; - int16_t lft_shft; - int16_t status; - int k, n, j; - - - /* create dither_float signal */ - GenerateDitherQ7(dataQ7, streamdata->W_upper, FRAMESAMPLES, AvgPitchGain_Q12); - - /* add dither and quantize, and compute power spectrum */ - /* Vector dataQ7 contains Dither in Q7 */ - for (k = 0; k < FRAMESAMPLES; k += 4) - { - val = ((*fr++ + dataQ7[k] + 64) & 0xFF80) - dataQ7[k]; /* Data = Dither */ - dataQ7[k] = val; /* New value in Data */ - sum = WEBRTC_SPL_UMUL(val, val); - - val = ((*fi++ + dataQ7[k+1] + 64) & 0xFF80) - dataQ7[k+1]; /* Data = Dither */ - dataQ7[k+1] = val; /* New value in Data */ - sum += WEBRTC_SPL_UMUL(val, val); - - val = ((*fr++ + dataQ7[k+2] + 64) & 0xFF80) - dataQ7[k+2]; /* Data = Dither */ - dataQ7[k+2] = val; /* New value in Data */ - sum += WEBRTC_SPL_UMUL(val, val); - - val = ((*fi++ + dataQ7[k+3] + 64) & 0xFF80) - dataQ7[k+3]; /* Data = Dither */ - dataQ7[k+3] = val; /* New value in Data */ - sum += WEBRTC_SPL_UMUL(val, val); - - PSpec[k>>2] = WEBRTC_SPL_RSHIFT_U32(sum, 2); - } - - /* compute correlation from power spectrum */ - CalcCorrelation(PSpec, CorrQ7); - - - /* find AR coefficients */ - /* number of bit shifts to 14-bit normalize CorrQ7[0] (leaving room for sign) */ - lft_shft = WebRtcSpl_NormW32(CorrQ7[0]) - 18; - - if (lft_shft > 0) { - for (k=0; k> -lft_shft; - } - - /* find RC coefficients */ - WebRtcSpl_AutoCorrToReflCoef(CorrQ7_norm, AR_ORDER, RCQ15); - - /* quantize & code RC Coef */ - status = WebRtcIsacfix_EncodeRcCoef(RCQ15, streamdata); - if (status < 0) { - return status; - } - - /* RC -> AR coefficients */ - WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12); - - /* compute ARCoef' * Corr * ARCoef in Q19 */ - nrg = 0; - for (j = 0; j <= AR_ORDER; j++) { - for (n = 0; n <= j; n++) - nrg += (ARCoefQ12[j] * ((CorrQ7_norm[j - n] * ARCoefQ12[n] + 256) >> 9) + - 4) >> 3; - for (n = j+1; n <= AR_ORDER; n++) - nrg += (ARCoefQ12[j] * ((CorrQ7_norm[n - j] * ARCoefQ12[n] + 256) >> 9) + - 4) >> 3; - } - - if (lft_shft > 0) - nrg >>= lft_shft; - else - nrg <<= -lft_shft; - - if(nrg>131072) - gain2_Q10 = WebRtcSpl_DivResultInQ31(FRAMESAMPLES >> 2, nrg); /* also shifts 31 bits to the left! */ - else - gain2_Q10 = FRAMESAMPLES >> 2; - - /* quantize & code gain2_Q10 */ - if (WebRtcIsacfix_EncodeGain2(&gain2_Q10, streamdata)) - return -1; - - /* compute inverse AR magnitude spectrum */ - CalcRootInvArSpec(ARCoefQ12, gain2_Q10, invARSpecQ8); - - - /* arithmetic coding of spectrum */ - status = WebRtcIsacfix_EncLogisticMulti2(streamdata, dataQ7, invARSpecQ8, (int16_t)FRAMESAMPLES); - if ( status ) - return( status ); - - return 0; -} - - -/* Matlab's LAR definition */ -static void Rc2LarFix(const int16_t *rcQ15, int32_t *larQ17, int16_t order) { - - /* - - This is a piece-wise implemenetation of a rc2lar-function (all values in the comment - are Q15 values and are based on [0 24956/32768 30000/32768 32500/32768], i.e. - [0.76159667968750 0.91552734375000 0.99182128906250] - - x0 x1 a k x0(again) b - ================================================================================== - 0.00 0.76: 0 2.625997508581 0 0 - 0.76 0.91: 2.000012018559 7.284502668663 0.761596679688 -3.547841027073 - 0.91 0.99: 3.121320351712 31.115835041229 0.915527343750 -25.366077452148 - 0.99 1.00: 5.495270168700 686.663805654056 0.991821289063 -675.552510708011 - - The implementation is y(x)= a + (x-x0)*k, but this can be simplified to - - y(x) = a-x0*k + x*k = b + x*k, where b = a-x0*k - - akx=[0 2.625997508581 0 - 2.000012018559 7.284502668663 0.761596679688 - 3.121320351712 31.115835041229 0.915527343750 - 5.495270168700 686.663805654056 0.991821289063]; - - b = akx(:,1) - akx(:,3).*akx(:,2) - - [ 0.0 - -3.547841027073 - -25.366077452148 - -675.552510708011] - - */ - - int k; - int16_t rc; - int32_t larAbsQ17; - - for (k = 0; k < order; k++) { - - rc = WEBRTC_SPL_ABS_W16(rcQ15[k]); //Q15 - - /* Calculate larAbsQ17 in Q17 from rc in Q15 */ - - if (rc<24956) { //0.7615966 in Q15 - // (Q15*Q13)>>11 = Q17 - larAbsQ17 = rc * 21512 >> 11; - } else if (rc<30000) { //0.91552734375 in Q15 - // Q17 + (Q15*Q12)>>10 = Q17 - larAbsQ17 = -465024 + (rc * 29837 >> 10); - } else if (rc<32500) { //0.99182128906250 in Q15 - // Q17 + (Q15*Q10)>>8 = Q17 - larAbsQ17 = -3324784 + (rc * 31863 >> 8); - } else { - // Q17 + (Q15*Q5)>>3 = Q17 - larAbsQ17 = -88546020 + (rc * 21973 >> 3); - } - - if (rcQ15[k]>0) { - larQ17[k] = larAbsQ17; - } else { - larQ17[k] = -larAbsQ17; - } - } -} - - -static void Lar2RcFix(const int32_t *larQ17, int16_t *rcQ15, int16_t order) { - - /* - This is a piece-wise implemenetation of a lar2rc-function - See comment in Rc2LarFix() about details. - */ - - int k; - int16_t larAbsQ11; - int32_t rc; - - for (k = 0; k < order; k++) { - - larAbsQ11 = (int16_t)WEBRTC_SPL_ABS_W32((larQ17[k] + 32) >> 6); // Q11 - - if (larAbsQ11<4097) { //2.000012018559 in Q11 - // Q11*Q16>>12 = Q15 - rc = larAbsQ11 * 24957 >> 12; - } else if (larAbsQ11<6393) { //3.121320351712 in Q11 - // (Q11*Q17 + Q13)>>13 = Q15 - rc = (larAbsQ11 * 17993 + 130738688) >> 13; - } else if (larAbsQ11<11255) { //5.495270168700 in Q11 - // (Q11*Q19 + Q30)>>15 = Q15 - rc = (larAbsQ11 * 16850 + 875329820) >> 15; - } else { - // (Q11*Q24>>16 + Q19)>>4 = Q15 - rc = (((larAbsQ11 * 24433) >> 16) + 515804) >> 4; - } - - if (larQ17[k]<=0) { - rc = -rc; - } - - rcQ15[k] = (int16_t) rc; // Q15 - } -} - -static void Poly2LarFix(int16_t *lowbandQ15, - int16_t orderLo, - int16_t *hibandQ15, - int16_t orderHi, - int16_t Nsub, - int32_t *larsQ17) { - - int k, n; - int32_t *outpQ17; - int16_t orderTot; - int32_t larQ17[MAX_ORDER]; // Size 7+6 is enough - - orderTot = (orderLo + orderHi); - outpQ17 = larsQ17; - for (k = 0; k < Nsub; k++) { - - Rc2LarFix(lowbandQ15, larQ17, orderLo); - - for (n = 0; n < orderLo; n++) - outpQ17[n] = larQ17[n]; //Q17 - - Rc2LarFix(hibandQ15, larQ17, orderHi); - - for (n = 0; n < orderHi; n++) - outpQ17[n + orderLo] = larQ17[n]; //Q17; - - outpQ17 += orderTot; - lowbandQ15 += orderLo; - hibandQ15 += orderHi; - } -} - - -static void Lar2polyFix(int32_t *larsQ17, - int16_t *lowbandQ15, - int16_t orderLo, - int16_t *hibandQ15, - int16_t orderHi, - int16_t Nsub) { - - int k, n; - int16_t orderTot; - int16_t *outplQ15, *outphQ15; - int32_t *inpQ17; - int16_t rcQ15[7+6]; - - orderTot = (orderLo + orderHi); - outplQ15 = lowbandQ15; - outphQ15 = hibandQ15; - inpQ17 = larsQ17; - for (k = 0; k < Nsub; k++) { - - /* gains not handled here as in the FLP version */ - - /* Low band */ - Lar2RcFix(&inpQ17[0], rcQ15, orderLo); - for (n = 0; n < orderLo; n++) - outplQ15[n] = rcQ15[n]; // Refl. coeffs - - /* High band */ - Lar2RcFix(&inpQ17[orderLo], rcQ15, orderHi); - for (n = 0; n < orderHi; n++) - outphQ15[n] = rcQ15[n]; // Refl. coeffs - - inpQ17 += orderTot; - outplQ15 += orderLo; - outphQ15 += orderHi; - } -} - -/* -Function WebRtcIsacfix_MatrixProduct1C() does one form of matrix multiplication. -It first shifts input data of one matrix, determines the right indexes for the -two matrixes, multiply them, and write the results into an output buffer. - -Note that two factors (or, multipliers) determine the initialization values of -the variable `matrix1_index` in the code. The relationship is -`matrix1_index` = `matrix1_index_factor1` * `matrix1_index_factor2`, where -`matrix1_index_factor1` is given by the argument while `matrix1_index_factor2` -is determined by the value of argument `matrix1_index_init_case`; -`matrix1_index_factor2` is the value of the outmost loop counter j (when -`matrix1_index_init_case` is 0), or the value of the middle loop counter k (when -`matrix1_index_init_case` is non-zero). - -`matrix0_index` is determined the same way. - -Arguments: - matrix0[]: matrix0 data in Q15 domain. - matrix1[]: matrix1 data. - matrix_product[]: output data (matrix product). - matrix1_index_factor1: The first of two factors determining the - initialization value of matrix1_index. - matrix0_index_factor1: The first of two factors determining the - initialization value of matrix0_index. - matrix1_index_init_case: Case number for selecting the second of two - factors determining the initialization value - of matrix1_index and matrix0_index. - matrix1_index_step: Incremental step for matrix1_index. - matrix0_index_step: Incremental step for matrix0_index. - inner_loop_count: Maximum count of the inner loop. - mid_loop_count: Maximum count of the intermediate loop. - shift: Left shift value for matrix1. -*/ -void WebRtcIsacfix_MatrixProduct1C(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - const int matrix1_index_factor1, - const int matrix0_index_factor1, - const int matrix1_index_init_case, - const int matrix1_index_step, - const int matrix0_index_step, - const int inner_loop_count, - const int mid_loop_count, - const int shift) { - int j = 0, k = 0, n = 0; - int matrix0_index = 0, matrix1_index = 0, matrix_prod_index = 0; - int* matrix0_index_factor2 = &k; - int* matrix1_index_factor2 = &j; - if (matrix1_index_init_case != 0) { - matrix0_index_factor2 = &j; - matrix1_index_factor2 = &k; - } - - for (j = 0; j < SUBFRAMES; j++) { - matrix_prod_index = mid_loop_count * j; - for (k = 0; k < mid_loop_count; k++) { - int32_t sum32 = 0; - matrix0_index = matrix0_index_factor1 * (*matrix0_index_factor2); - matrix1_index = matrix1_index_factor1 * (*matrix1_index_factor2); - for (n = 0; n < inner_loop_count; n++) { - sum32 += WEBRTC_SPL_MUL_16_32_RSFT16( - matrix0[matrix0_index], matrix1[matrix1_index] * (1 << shift)); - matrix0_index += matrix0_index_step; - matrix1_index += matrix1_index_step; - } - matrix_product[matrix_prod_index] = sum32; - matrix_prod_index++; - } - } -} - -/* -Function WebRtcIsacfix_MatrixProduct2C() returns the product of two matrixes, -one of which has two columns. It first has to determine the correct index of -the first matrix before doing the actual element multiplication. - -Arguments: - matrix0[]: A matrix in Q15 domain. - matrix1[]: A matrix in Q21 domain. - matrix_product[]: Output data in Q17 domain. - matrix0_index_factor: A factor determining the initialization value - of matrix0_index. - matrix0_index_step: Incremental step for matrix0_index. -*/ -void WebRtcIsacfix_MatrixProduct2C(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - const int matrix0_index_factor, - const int matrix0_index_step) { - int j = 0, n = 0; - int matrix1_index = 0, matrix0_index = 0, matrix_prod_index = 0; - for (j = 0; j < SUBFRAMES; j++) { - int32_t sum32 = 0, sum32_2 = 0; - matrix1_index = 0; - matrix0_index = matrix0_index_factor * j; - for (n = SUBFRAMES; n > 0; n--) { - sum32 += (WEBRTC_SPL_MUL_16_32_RSFT16(matrix0[matrix0_index], - matrix1[matrix1_index])); - sum32_2 += (WEBRTC_SPL_MUL_16_32_RSFT16(matrix0[matrix0_index], - matrix1[matrix1_index + 1])); - matrix1_index += 2; - matrix0_index += matrix0_index_step; - } - matrix_product[matrix_prod_index] = sum32 >> 3; - matrix_product[matrix_prod_index + 1] = sum32_2 >> 3; - matrix_prod_index += 2; - } -} - -int WebRtcIsacfix_DecodeLpc(int32_t *gain_lo_hiQ17, - int16_t *LPCCoef_loQ15, - int16_t *LPCCoef_hiQ15, - Bitstr_dec *streamdata, - int16_t *outmodel) { - - int32_t larsQ17[KLT_ORDER_SHAPE]; // KLT_ORDER_GAIN+KLT_ORDER_SHAPE == (ORDERLO+ORDERHI)*SUBFRAMES - int err; - - err = WebRtcIsacfix_DecodeLpcCoef(streamdata, larsQ17, gain_lo_hiQ17, outmodel); - if (err<0) // error check - return -ISAC_RANGE_ERROR_DECODE_LPC; - - Lar2polyFix(larsQ17, LPCCoef_loQ15, ORDERLO, LPCCoef_hiQ15, ORDERHI, SUBFRAMES); - - return 0; -} - -/* decode & dequantize LPC Coef */ -int WebRtcIsacfix_DecodeLpcCoef(Bitstr_dec *streamdata, - int32_t *LPCCoefQ17, - int32_t *gain_lo_hiQ17, - int16_t *outmodel) -{ - int j, k, n; - int err; - int16_t pos, pos2, posg, poss; - int16_t gainpos; - int16_t model; - int16_t index_QQ[KLT_ORDER_SHAPE]; - int32_t tmpcoeffs_gQ17[KLT_ORDER_GAIN]; - int32_t tmpcoeffs2_gQ21[KLT_ORDER_GAIN]; - int16_t tmpcoeffs_sQ10[KLT_ORDER_SHAPE]; - int32_t tmpcoeffs_sQ17[KLT_ORDER_SHAPE]; - int32_t tmpcoeffs2_sQ18[KLT_ORDER_SHAPE]; - int32_t sumQQ; - int16_t sumQQ16; - int32_t tmp32; - - - - /* entropy decoding of model number */ - err = WebRtcIsacfix_DecHistOneStepMulti(&model, streamdata, WebRtcIsacfix_kModelCdfPtr, WebRtcIsacfix_kModelInitIndex, 1); - if (err<0) // error check - return err; - - /* entropy decoding of quantization indices */ - err = WebRtcIsacfix_DecHistOneStepMulti(index_QQ, streamdata, WebRtcIsacfix_kCdfShapePtr[model], WebRtcIsacfix_kInitIndexShape[model], KLT_ORDER_SHAPE); - if (err<0) // error check - return err; - /* find quantization levels for coefficients */ - for (k=0; k> 7; // (Q10*Q15)>>7 = Q18 - pos++; - pos2++; - } - tmpcoeffs2_sQ18[poss] = sumQQ; //Q18 - poss++; - } - } - - /* right transform */ // Transpose matrix - WebRtcIsacfix_MatrixProduct2(WebRtcIsacfix_kT2GainQ15[0], tmpcoeffs2_gQ21, - tmpcoeffs_gQ17, kTIndexFactor1, kTIndexStep2); - WebRtcIsacfix_MatrixProduct1(WebRtcIsacfix_kT2ShapeQ15[model], - tmpcoeffs2_sQ18, tmpcoeffs_sQ17, kTIndexFactor1, kTIndexFactor1, - kTInitCase1, kTIndexStep3, kTIndexStep2, kTLoopCount1, kTLoopCount3, - kTMatrix1_shift0); - - /* scaling, mean addition, and gain restoration */ - gainpos = 0; - posg = 0;poss = 0;pos=0; - for (k=0; k> 11); - sumQQ16 += WebRtcIsacfix_kMeansGainQ8[model][posg]; - sumQQ = CalcExpN(sumQQ16); // Q8 in and Q17 out - gain_lo_hiQ17[gainpos] = sumQQ; //Q17 - gainpos++; - posg++; - - // Divide by 4 and get Q17 to Q8, i.e. shift 2+9. - sumQQ16 = (int16_t)(tmpcoeffs_gQ17[posg] >> 11); - sumQQ16 += WebRtcIsacfix_kMeansGainQ8[model][posg]; - sumQQ = CalcExpN(sumQQ16); // Q8 in and Q17 out - gain_lo_hiQ17[gainpos] = sumQQ; //Q17 - gainpos++; - posg++; - - /* lo band LAR coeffs */ - for (n=0; n>16 = Q17, with 1/2.1 = 0.47619047619 ~= 31208 in Q16 - tmp32 = tmp32 + WebRtcIsacfix_kMeansShapeQ17[model][poss]; // Q17+Q17 = Q17 - LPCCoefQ17[pos] = tmp32; - } - - /* hi band LAR coeffs */ - for (n=0; n>16)<<3 = Q17, with 1/0.45 = 2.222222222222 ~= 18204 in Q13 - tmp32 = - WEBRTC_SPL_MUL_16_32_RSFT16(18204, tmpcoeffs_sQ17[poss]) * (1 << 3); - tmp32 = tmp32 + WebRtcIsacfix_kMeansShapeQ17[model][poss]; // Q17+Q17 = Q17 - LPCCoefQ17[pos] = tmp32; - } - } - - - *outmodel=model; - - return 0; -} - -/* estimate codel length of LPC Coef */ -static int EstCodeLpcCoef(int32_t *LPCCoefQ17, - int32_t *gain_lo_hiQ17, - int16_t *model, - int32_t *sizeQ11, - Bitstr_enc *streamdata, - IsacSaveEncoderData* encData, - transcode_obj *transcodingParam) { - int j, k, n; - int16_t posQQ, pos2QQ, gainpos; - int16_t pos, poss, posg, offsg; - int16_t index_gQQ[KLT_ORDER_GAIN], index_sQQ[KLT_ORDER_SHAPE]; - int16_t index_ovr_gQQ[KLT_ORDER_GAIN], index_ovr_sQQ[KLT_ORDER_SHAPE]; - int32_t BitsQQ; - - int16_t tmpcoeffs_gQ6[KLT_ORDER_GAIN]; - int32_t tmpcoeffs_gQ17[KLT_ORDER_GAIN]; - int32_t tmpcoeffs_sQ17[KLT_ORDER_SHAPE]; - int32_t tmpcoeffs2_gQ21[KLT_ORDER_GAIN]; - int32_t tmpcoeffs2_sQ17[KLT_ORDER_SHAPE]; - int32_t sumQQ; - int32_t tmp32; - int16_t sumQQ16; - int status = 0; - - /* write LAR coefficients to statistics file */ - /* Save data for creation of multiple bitstreams (and transcoding) */ - if (encData != NULL) { - for (k=0; kLPCcoeffs_g[KLT_ORDER_GAIN*encData->startIdx + k] = gain_lo_hiQ17[k]; - } - } - - /* log gains, mean removal and scaling */ - posg = 0;poss = 0;pos=0; gainpos=0; - - for (k=0; k WebRtcIsacfix_kMaxIndGain[k]) { - index_gQQ[k] = WebRtcIsacfix_kMaxIndGain[k]; - } - index_ovr_gQQ[k] = WebRtcIsacfix_kOffsetGain[0][k]+index_gQQ[k]; - posQQ = WebRtcIsacfix_kOfLevelsGain[0] + index_ovr_gQQ[k]; - - /* Save data for creation of multiple bitstreams */ - if (encData != NULL) { - encData->LPCindex_g[KLT_ORDER_GAIN*encData->startIdx + k] = index_gQQ[k]; - } - - /* determine number of bits */ - sumQQ = WebRtcIsacfix_kCodeLenGainQ11[posQQ]; //Q11 - BitsQQ += sumQQ; - } - - for (k=0; k WebRtcIsacfix_kMaxIndShape[k]) - index_sQQ[k] = WebRtcIsacfix_kMaxIndShape[k]; - index_ovr_sQQ[k] = WebRtcIsacfix_kOffsetShape[0][k]+index_sQQ[k]; - - posQQ = WebRtcIsacfix_kOfLevelsShape[0] + index_ovr_sQQ[k]; - sumQQ = WebRtcIsacfix_kCodeLenShapeQ11[posQQ]; //Q11 - BitsQQ += sumQQ; - } - - - - *model = 0; - *sizeQ11=BitsQQ; - - /* entropy coding of model number */ - status = WebRtcIsacfix_EncHistMulti(streamdata, model, WebRtcIsacfix_kModelCdfPtr, 1); - if (status < 0) { - return status; - } - - /* entropy coding of quantization indices - shape only */ - status = WebRtcIsacfix_EncHistMulti(streamdata, index_sQQ, WebRtcIsacfix_kCdfShapePtr[0], KLT_ORDER_SHAPE); - if (status < 0) { - return status; - } - - /* Save data for creation of multiple bitstreams */ - if (encData != NULL) { - for (k=0; kLPCindex_s[KLT_ORDER_SHAPE*encData->startIdx + k] = index_sQQ[k]; - } - } - /* save the state of the bitstream object 'streamdata' for the possible bit-rate reduction */ - transcodingParam->full = streamdata->full; - transcodingParam->stream_index = streamdata->stream_index; - transcodingParam->streamval = streamdata->streamval; - transcodingParam->W_upper = streamdata->W_upper; - transcodingParam->beforeLastWord = streamdata->stream[streamdata->stream_index-1]; - transcodingParam->lastWord = streamdata->stream[streamdata->stream_index]; - - /* entropy coding of index */ - status = WebRtcIsacfix_EncHistMulti(streamdata, index_gQQ, WebRtcIsacfix_kCdfGainPtr[0], KLT_ORDER_GAIN); - if (status < 0) { - return status; - } - - /* find quantization levels for shape coefficients */ - for (k=0; k>16 = Q17, with 1/2.1 = 0.47619047619 ~= 31208 in Q16 - tmp32 = tmp32 + WebRtcIsacfix_kMeansShapeQ17[0][poss]; // Q17+Q17 = Q17 - LPCCoefQ17[pos] = tmp32; - } - - /* hi band LAR coeffs */ - for (n=0; n>16)<<3 = Q17, with 1/0.45 = 2.222222222222 ~= 18204 in Q13 - tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(18204, tmpcoeffs_sQ17[poss]) << 3; - tmp32 = tmp32 + WebRtcIsacfix_kMeansShapeQ17[0][poss]; // Q17+Q17 = Q17 - LPCCoefQ17[pos] = tmp32; - } - - } - - //to update tmpcoeffs_gQ17 to the proper state - for (k=0; k> (16 - 1) = Q17; Q17 << 4 = Q21. - sumQQ = (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT1GainQ15[0][0], - tmpcoeffs_gQ17[offsg]) << 1); - sumQQ += (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT1GainQ15[0][1], - tmpcoeffs_gQ17[offsg + 1]) << 1); - tmpcoeffs2_gQ21[posg] = sumQQ << 4; - posg++; - - sumQQ = (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT1GainQ15[0][2], - tmpcoeffs_gQ17[offsg]) << 1); - sumQQ += (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT1GainQ15[0][3], - tmpcoeffs_gQ17[offsg + 1]) << 1); - tmpcoeffs2_gQ21[posg] = sumQQ << 4; - posg++; - offsg += 2; - } - - /* right transform */ // Transpose matrix - WebRtcIsacfix_MatrixProduct2(WebRtcIsacfix_kT2GainQ15[0], tmpcoeffs2_gQ21, - tmpcoeffs_gQ17, kTIndexFactor1, kTIndexStep2); - - /* scaling, mean addition, and gain restoration */ - posg = 0; - gainpos = 0; - for (k=0; k<2*SUBFRAMES; k++) { - - // Divide by 4 and get Q17 to Q8, i.e. shift 2+9. - sumQQ16 = (int16_t)(tmpcoeffs_gQ17[posg] >> 11); - sumQQ16 += WebRtcIsacfix_kMeansGainQ8[0][posg]; - sumQQ = CalcExpN(sumQQ16); // Q8 in and Q17 out - gain_lo_hiQ17[gainpos] = sumQQ; //Q17 - - gainpos++; - pos++;posg++; - } - - return 0; -} - -int WebRtcIsacfix_EstCodeLpcGain(int32_t *gain_lo_hiQ17, - Bitstr_enc *streamdata, - IsacSaveEncoderData* encData) { - int j, k; - int16_t posQQ, pos2QQ, gainpos; - int16_t posg; - int16_t index_gQQ[KLT_ORDER_GAIN]; - - int16_t tmpcoeffs_gQ6[KLT_ORDER_GAIN]; - int32_t tmpcoeffs_gQ17[KLT_ORDER_GAIN]; - int32_t tmpcoeffs2_gQ21[KLT_ORDER_GAIN]; - int32_t sumQQ; - int status = 0; - - /* write LAR coefficients to statistics file */ - /* Save data for creation of multiple bitstreams (and transcoding) */ - if (encData != NULL) { - for (k=0; kLPCcoeffs_g[KLT_ORDER_GAIN*encData->startIdx + k] = gain_lo_hiQ17[k]; - } - } - - /* log gains, mean removal and scaling */ - posg = 0; gainpos = 0; - - for (k=0; k WebRtcIsacfix_kMaxIndGain[k]) { - index_gQQ[k] = WebRtcIsacfix_kMaxIndGain[k]; - } - - /* Save data for creation of multiple bitstreams */ - if (encData != NULL) { - encData->LPCindex_g[KLT_ORDER_GAIN*encData->startIdx + k] = index_gQQ[k]; - } - } - - /* entropy coding of index */ - status = WebRtcIsacfix_EncHistMulti(streamdata, index_gQQ, WebRtcIsacfix_kCdfGainPtr[0], KLT_ORDER_GAIN); - if (status < 0) { - return status; - } - - return 0; -} - - -int WebRtcIsacfix_EncodeLpc(int32_t *gain_lo_hiQ17, - int16_t *LPCCoef_loQ15, - int16_t *LPCCoef_hiQ15, - int16_t *model, - int32_t *sizeQ11, - Bitstr_enc *streamdata, - IsacSaveEncoderData* encData, - transcode_obj *transcodeParam) -{ - int status = 0; - int32_t larsQ17[KLT_ORDER_SHAPE]; // KLT_ORDER_SHAPE == (ORDERLO+ORDERHI)*SUBFRAMES - // = (6+12)*6 == 108 - - Poly2LarFix(LPCCoef_loQ15, ORDERLO, LPCCoef_hiQ15, ORDERHI, SUBFRAMES, larsQ17); - - status = EstCodeLpcCoef(larsQ17, gain_lo_hiQ17, model, sizeQ11, - streamdata, encData, transcodeParam); - if (status < 0) { - return (status); - } - - Lar2polyFix(larsQ17, LPCCoef_loQ15, ORDERLO, LPCCoef_hiQ15, ORDERHI, SUBFRAMES); - - return 0; -} - - -/* decode & dequantize RC */ -int WebRtcIsacfix_DecodeRcCoef(Bitstr_dec *streamdata, int16_t *RCQ15) -{ - int k, err; - int16_t index[AR_ORDER]; - - /* entropy decoding of quantization indices */ - err = WebRtcIsacfix_DecHistOneStepMulti(index, streamdata, WebRtcIsacfix_kRcCdfPtr, WebRtcIsacfix_kRcInitInd, AR_ORDER); - if (err<0) // error check - return err; - - /* find quantization levels for reflection coefficients */ - for (k=0; k WebRtcIsacfix_kRcBound[index[k]]) - { - while (RCQ15[k] > WebRtcIsacfix_kRcBound[index[k] + 1]) - index[k]++; - } - else - { - while (RCQ15[k] < WebRtcIsacfix_kRcBound[--index[k]]) ; - } - - RCQ15[k] = *(WebRtcIsacfix_kRcLevPtr[k] + index[k]); - } - - - /* entropy coding of quantization indices */ - status = WebRtcIsacfix_EncHistMulti(streamdata, index, WebRtcIsacfix_kRcCdfPtr, AR_ORDER); - - /* If error in WebRtcIsacfix_EncHistMulti(), status will be negative, otherwise 0 */ - return status; -} - - -/* decode & dequantize squared Gain */ -int WebRtcIsacfix_DecodeGain2(Bitstr_dec *streamdata, int32_t *gainQ10) -{ - int err; - int16_t index; - - /* entropy decoding of quantization index */ - err = WebRtcIsacfix_DecHistOneStepMulti( - &index, - streamdata, - WebRtcIsacfix_kGainPtr, - WebRtcIsacfix_kGainInitInd, - 1); - /* error check */ - if (err<0) { - return err; - } - - /* find quantization level */ - *gainQ10 = WebRtcIsacfix_kGain2Lev[index]; - - return 0; -} - - - -/* quantize & code squared Gain */ -int WebRtcIsacfix_EncodeGain2(int32_t *gainQ10, Bitstr_enc *streamdata) -{ - int16_t index; - int status = 0; - - /* find quantization index */ - index = WebRtcIsacfix_kGainInitInd[0]; - if (*gainQ10 > WebRtcIsacfix_kGain2Bound[index]) - { - while (*gainQ10 > WebRtcIsacfix_kGain2Bound[index + 1]) - index++; - } - else - { - while (*gainQ10 < WebRtcIsacfix_kGain2Bound[--index]) ; - } - - /* dequantize */ - *gainQ10 = WebRtcIsacfix_kGain2Lev[index]; - - /* entropy coding of quantization index */ - status = WebRtcIsacfix_EncHistMulti(streamdata, &index, WebRtcIsacfix_kGainPtr, 1); - - /* If error in WebRtcIsacfix_EncHistMulti(), status will be negative, otherwise 0 */ - return status; -} - - -/* code and decode Pitch Gains and Lags functions */ - -/* decode & dequantize Pitch Gains */ -int WebRtcIsacfix_DecodePitchGain(Bitstr_dec *streamdata, int16_t *PitchGains_Q12) -{ - int err; - int16_t index_comb; - const uint16_t *pitch_gain_cdf_ptr[1]; - - /* entropy decoding of quantization indices */ - *pitch_gain_cdf_ptr = WebRtcIsacfix_kPitchGainCdf; - err = WebRtcIsacfix_DecHistBisectMulti(&index_comb, streamdata, pitch_gain_cdf_ptr, WebRtcIsacfix_kCdfTableSizeGain, 1); - /* error check, Q_mean_Gain.. tables are of size 144 */ - if ((err < 0) || (index_comb < 0) || (index_comb >= 144)) - return -ISAC_RANGE_ERROR_DECODE_PITCH_GAIN; - - /* unquantize back to pitch gains by table look-up */ - PitchGains_Q12[0] = WebRtcIsacfix_kPitchGain1[index_comb]; - PitchGains_Q12[1] = WebRtcIsacfix_kPitchGain2[index_comb]; - PitchGains_Q12[2] = WebRtcIsacfix_kPitchGain3[index_comb]; - PitchGains_Q12[3] = WebRtcIsacfix_kPitchGain4[index_comb]; - - return 0; -} - - -/* quantize & code Pitch Gains */ -int WebRtcIsacfix_EncodePitchGain(int16_t* PitchGains_Q12, - Bitstr_enc* streamdata, - IsacSaveEncoderData* encData) { - int k,j; - int16_t SQ15[PITCH_SUBFRAMES]; - int16_t index[3]; - int16_t index_comb; - const uint16_t *pitch_gain_cdf_ptr[1]; - int32_t CQ17; - int status = 0; - - - /* get the approximate arcsine (almost linear)*/ - for (k=0; k> 2); // Q15 - - - /* find quantization index; only for the first three transform coefficients */ - for (k=0; k<3; k++) - { - /* transform */ - CQ17=0; - for (j=0; j> 10; // Q17 - } - - index[k] = (int16_t)((CQ17 + 8192)>>14); // Rounding and scaling with stepsize (=1/0.125=8) - - /* check that the index is not outside the boundaries of the table */ - if (index[k] < WebRtcIsacfix_kLowerlimiGain[k]) index[k] = WebRtcIsacfix_kLowerlimiGain[k]; - else if (index[k] > WebRtcIsacfix_kUpperlimitGain[k]) index[k] = WebRtcIsacfix_kUpperlimitGain[k]; - index[k] -= WebRtcIsacfix_kLowerlimiGain[k]; - } - - /* calculate unique overall index */ - index_comb = (int16_t)(WEBRTC_SPL_MUL(WebRtcIsacfix_kMultsGain[0], index[0]) + - WEBRTC_SPL_MUL(WebRtcIsacfix_kMultsGain[1], index[1]) + index[2]); - - /* unquantize back to pitch gains by table look-up */ - // (Y) - PitchGains_Q12[0] = WebRtcIsacfix_kPitchGain1[index_comb]; - PitchGains_Q12[1] = WebRtcIsacfix_kPitchGain2[index_comb]; - PitchGains_Q12[2] = WebRtcIsacfix_kPitchGain3[index_comb]; - PitchGains_Q12[3] = WebRtcIsacfix_kPitchGain4[index_comb]; - - - /* entropy coding of quantization pitch gains */ - *pitch_gain_cdf_ptr = WebRtcIsacfix_kPitchGainCdf; - status = WebRtcIsacfix_EncHistMulti(streamdata, &index_comb, pitch_gain_cdf_ptr, 1); - if (status < 0) { - return status; - } - - /* Save data for creation of multiple bitstreams */ - if (encData != NULL) { - encData->pitchGain_index[encData->startIdx] = index_comb; - } - - return 0; -} - - - -/* Pitch LAG */ - - -/* decode & dequantize Pitch Lags */ -int WebRtcIsacfix_DecodePitchLag(Bitstr_dec *streamdata, - int16_t *PitchGain_Q12, - int16_t *PitchLags_Q7) -{ - int k, err; - int16_t index[PITCH_SUBFRAMES]; - const int16_t *mean_val2Q10, *mean_val4Q10; - - const int16_t *lower_limit; - const uint16_t *init_index; - const uint16_t *cdf_size; - const uint16_t **cdf; - - int32_t meangainQ12; - int32_t CQ11, CQ10,tmp32a,tmp32b; - int16_t shft; - - meangainQ12=0; - for (k = 0; k < 4; k++) - meangainQ12 += PitchGain_Q12[k]; - - meangainQ12 >>= 2; // Get average. - - /* voicing classificiation */ - if (meangainQ12 <= 819) { // mean_gain < 0.2 - shft = -1; // StepSize=2.0; - cdf = WebRtcIsacfix_kPitchLagPtrLo; - cdf_size = WebRtcIsacfix_kPitchLagSizeLo; - mean_val2Q10 = WebRtcIsacfix_kMeanLag2Lo; - mean_val4Q10 = WebRtcIsacfix_kMeanLag4Lo; - lower_limit = WebRtcIsacfix_kLowerLimitLo; - init_index = WebRtcIsacfix_kInitIndLo; - } else if (meangainQ12 <= 1638) { // mean_gain < 0.4 - shft = 0; // StepSize=1.0; - cdf = WebRtcIsacfix_kPitchLagPtrMid; - cdf_size = WebRtcIsacfix_kPitchLagSizeMid; - mean_val2Q10 = WebRtcIsacfix_kMeanLag2Mid; - mean_val4Q10 = WebRtcIsacfix_kMeanLag4Mid; - lower_limit = WebRtcIsacfix_kLowerLimitMid; - init_index = WebRtcIsacfix_kInitIndMid; - } else { - shft = 1; // StepSize=0.5; - cdf = WebRtcIsacfix_kPitchLagPtrHi; - cdf_size = WebRtcIsacfix_kPitchLagSizeHi; - mean_val2Q10 = WebRtcIsacfix_kMeanLag2Hi; - mean_val4Q10 = WebRtcIsacfix_kMeanLag4Hi; - lower_limit = WebRtcIsacfix_kLowerLimitHi; - init_index = WebRtcIsacfix_kInitIndHi; - } - - /* entropy decoding of quantization indices */ - err = WebRtcIsacfix_DecHistBisectMulti(index, streamdata, cdf, cdf_size, 1); - if ((err<0) || (index[0]<0)) // error check - return -ISAC_RANGE_ERROR_DECODE_PITCH_LAG; - - err = WebRtcIsacfix_DecHistOneStepMulti(index+1, streamdata, cdf+1, init_index, 3); - if (err<0) // error check - return -ISAC_RANGE_ERROR_DECODE_PITCH_LAG; - - - /* unquantize back to transform coefficients and do the inverse transform: S = T'*C */ - CQ11 = ((int32_t)index[0] + lower_limit[0]); // Q0 - CQ11 = WEBRTC_SPL_SHIFT_W32(CQ11,11-shft); // Scale with StepSize, Q11 - for (k=0; k> 5); - } - - CQ10 = mean_val2Q10[index[1]]; - for (k=0; k> 10; - PitchLags_Q7[k] += (int16_t)(tmp32b >> 5); - } - - CQ10 = mean_val4Q10[index[3]]; - for (k=0; k> 10; - PitchLags_Q7[k] += (int16_t)(tmp32b >> 5); - } - - return 0; -} - - - -/* quantize & code Pitch Lags */ -int WebRtcIsacfix_EncodePitchLag(int16_t* PitchLagsQ7, - int16_t* PitchGain_Q12, - Bitstr_enc* streamdata, - IsacSaveEncoderData* encData) { - int k, j; - int16_t index[PITCH_SUBFRAMES]; - int32_t meangainQ12, CQ17; - int32_t CQ11, CQ10,tmp32a; - - const int16_t *mean_val2Q10,*mean_val4Q10; - const int16_t *lower_limit, *upper_limit; - const uint16_t **cdf; - int16_t shft, tmp16b; - int32_t tmp32b; - int status = 0; - - /* compute mean pitch gain */ - meangainQ12=0; - for (k = 0; k < 4; k++) - meangainQ12 += PitchGain_Q12[k]; - - meangainQ12 >>= 2; - - /* Save data for creation of multiple bitstreams */ - if (encData != NULL) { - encData->meanGain[encData->startIdx] = meangainQ12; - } - - /* voicing classificiation */ - if (meangainQ12 <= 819) { // mean_gain < 0.2 - shft = -1; // StepSize=2.0; - cdf = WebRtcIsacfix_kPitchLagPtrLo; - mean_val2Q10 = WebRtcIsacfix_kMeanLag2Lo; - mean_val4Q10 = WebRtcIsacfix_kMeanLag4Lo; - lower_limit = WebRtcIsacfix_kLowerLimitLo; - upper_limit = WebRtcIsacfix_kUpperLimitLo; - } else if (meangainQ12 <= 1638) { // mean_gain < 0.4 - shft = 0; // StepSize=1.0; - cdf = WebRtcIsacfix_kPitchLagPtrMid; - mean_val2Q10 = WebRtcIsacfix_kMeanLag2Mid; - mean_val4Q10 = WebRtcIsacfix_kMeanLag4Mid; - lower_limit = WebRtcIsacfix_kLowerLimitMid; - upper_limit = WebRtcIsacfix_kUpperLimitMid; - } else { - shft = 1; // StepSize=0.5; - cdf = WebRtcIsacfix_kPitchLagPtrHi; - mean_val2Q10 = WebRtcIsacfix_kMeanLag2Hi; - mean_val4Q10 = WebRtcIsacfix_kMeanLag4Hi; - lower_limit = WebRtcIsacfix_kLowerLimitHi; - upper_limit = WebRtcIsacfix_kUpperLimitHi; - } - - /* find quantization index */ - for (k=0; k<4; k++) - { - /* transform */ - CQ17=0; - for (j=0; j> 2; // Q17 - - CQ17 = WEBRTC_SPL_SHIFT_W32(CQ17,shft); // Scale with StepSize - - /* quantize */ - tmp16b = (int16_t)((CQ17 + 65536) >> 17); - index[k] = tmp16b; - - /* check that the index is not outside the boundaries of the table */ - if (index[k] < lower_limit[k]) index[k] = lower_limit[k]; - else if (index[k] > upper_limit[k]) index[k] = upper_limit[k]; - index[k] -= lower_limit[k]; - - /* Save data for creation of multiple bitstreams */ - if(encData != NULL) { - encData->pitchIndex[PITCH_SUBFRAMES*encData->startIdx + k] = index[k]; - } - } - - /* unquantize back to transform coefficients and do the inverse transform: S = T'*C */ - CQ11 = (index[0] + lower_limit[0]); // Q0 - CQ11 = WEBRTC_SPL_SHIFT_W32(CQ11,11-shft); // Scale with StepSize, Q11 - - for (k=0; k> 5); // Q7. - } - - CQ10 = mean_val2Q10[index[1]]; - for (k=0; k> 10; - PitchLagsQ7[k] += (int16_t)(tmp32b >> 5); // Q7. - } - - CQ10 = mean_val4Q10[index[3]]; - for (k=0; k> 10; - PitchLagsQ7[k] += (int16_t)(tmp32b >> 5); // Q7. - } - - /* entropy coding of quantization pitch lags */ - status = WebRtcIsacfix_EncHistMulti(streamdata, index, cdf, PITCH_SUBFRAMES); - - /* If error in WebRtcIsacfix_EncHistMulti(), status will be negative, otherwise 0 */ - return status; -} - - - -/* Routines for inband signaling of bandwitdh estimation */ -/* Histograms based on uniform distribution of indices */ -/* Move global variables later! */ - - -/* cdf array for frame length indicator */ -const uint16_t kFrameLenCdf[4] = { - 0, 21845, 43690, 65535}; - -/* pointer to cdf array for frame length indicator */ -const uint16_t * const kFrameLenCdfPtr[1] = {kFrameLenCdf}; - -/* initial cdf index for decoder of frame length indicator */ -const uint16_t kFrameLenInitIndex[1] = {1}; - - -int WebRtcIsacfix_DecodeFrameLen(Bitstr_dec *streamdata, - size_t *framesamples) -{ - - int err; - int16_t frame_mode; - - err = 0; - /* entropy decoding of frame length [1:30ms,2:60ms] */ - err = WebRtcIsacfix_DecHistOneStepMulti(&frame_mode, streamdata, kFrameLenCdfPtr, kFrameLenInitIndex, 1); - if (err<0) // error check - return -ISAC_RANGE_ERROR_DECODE_FRAME_LENGTH; - - switch(frame_mode) { - case 1: - *framesamples = 480; /* 30ms */ - break; - case 2: - *framesamples = 960; /* 60ms */ - break; - default: - err = -ISAC_DISALLOWED_FRAME_MODE_DECODER; - } - - return err; -} - - -int WebRtcIsacfix_EncodeFrameLen(int16_t framesamples, Bitstr_enc *streamdata) { - - int status; - int16_t frame_mode; - - status = 0; - frame_mode = 0; - /* entropy coding of frame length [1:480 samples,2:960 samples] */ - switch(framesamples) { - case 480: - frame_mode = 1; - break; - case 960: - frame_mode = 2; - break; - default: - status = - ISAC_DISALLOWED_FRAME_MODE_ENCODER; - } - - if (status < 0) - return status; - - status = WebRtcIsacfix_EncHistMulti(streamdata, &frame_mode, kFrameLenCdfPtr, 1); - - return status; -} - -/* cdf array for estimated bandwidth */ -const uint16_t kBwCdf[25] = { - 0, 2731, 5461, 8192, 10923, 13653, 16384, 19114, 21845, 24576, 27306, 30037, - 32768, 35498, 38229, 40959, 43690, 46421, 49151, 51882, 54613, 57343, 60074, - 62804, 65535}; - -/* pointer to cdf array for estimated bandwidth */ -const uint16_t * const kBwCdfPtr[1] = {kBwCdf}; - -/* initial cdf index for decoder of estimated bandwidth*/ -const uint16_t kBwInitIndex[1] = {7}; - - -int WebRtcIsacfix_DecodeSendBandwidth(Bitstr_dec *streamdata, int16_t *BWno) { - - int err; - int16_t BWno32; - - /* entropy decoding of sender's BW estimation [0..23] */ - err = WebRtcIsacfix_DecHistOneStepMulti(&BWno32, streamdata, kBwCdfPtr, kBwInitIndex, 1); - if (err<0) // error check - return -ISAC_RANGE_ERROR_DECODE_BANDWIDTH; - *BWno = (int16_t)BWno32; - return err; - -} - - -int WebRtcIsacfix_EncodeReceiveBandwidth(int16_t *BWno, Bitstr_enc *streamdata) -{ - int status = 0; - /* entropy encoding of receiver's BW estimation [0..23] */ - status = WebRtcIsacfix_EncHistMulti(streamdata, BWno, kBwCdfPtr, 1); - - return status; -} - -/* estimate codel length of LPC Coef */ -void WebRtcIsacfix_TranscodeLpcCoef(int32_t *gain_lo_hiQ17, - int16_t *index_gQQ) { - int j, k; - int16_t posQQ, pos2QQ; - int16_t posg, offsg, gainpos; - int32_t tmpcoeffs_gQ6[KLT_ORDER_GAIN]; - int32_t tmpcoeffs_gQ17[KLT_ORDER_GAIN]; - int32_t tmpcoeffs2_gQ21[KLT_ORDER_GAIN]; - int32_t sumQQ; - - - /* log gains, mean removal and scaling */ - posg = 0; gainpos=0; - - for (k=0; k WebRtcIsacfix_kMaxIndGain[k]) { - index_gQQ[k] = WebRtcIsacfix_kMaxIndGain[k]; - } - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h deleted file mode 100644 index ae11394f7c00..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * entropy_coding.h - * - * This header file contains all of the functions used to arithmetically - * encode the iSAC bistream - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ENTROPY_CODING_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ENTROPY_CODING_H_ - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -/* decode complex spectrum (return number of bytes in stream) */ -int WebRtcIsacfix_DecodeSpec(Bitstr_dec* streamdata, - int16_t* frQ7, - int16_t* fiQ7, - int16_t AvgPitchGain_Q12); - -/* encode complex spectrum */ -int WebRtcIsacfix_EncodeSpec(const int16_t* fr, - const int16_t* fi, - Bitstr_enc* streamdata, - int16_t AvgPitchGain_Q12); - -/* decode & dequantize LPC Coef */ -int WebRtcIsacfix_DecodeLpcCoef(Bitstr_dec* streamdata, - int32_t* LPCCoefQ17, - int32_t* gain_lo_hiQ17, - int16_t* outmodel); - -int WebRtcIsacfix_DecodeLpc(int32_t* gain_lo_hiQ17, - int16_t* LPCCoef_loQ15, - int16_t* LPCCoef_hiQ15, - Bitstr_dec* streamdata, - int16_t* outmodel); - -/* quantize & code LPC Coef */ -int WebRtcIsacfix_EncodeLpc(int32_t* gain_lo_hiQ17, - int16_t* LPCCoef_loQ15, - int16_t* LPCCoef_hiQ15, - int16_t* model, - int32_t* sizeQ11, - Bitstr_enc* streamdata, - IsacSaveEncoderData* encData, - transcode_obj* transcodeParam); - -int WebRtcIsacfix_EstCodeLpcGain(int32_t* gain_lo_hiQ17, - Bitstr_enc* streamdata, - IsacSaveEncoderData* encData); -/* decode & dequantize RC */ -int WebRtcIsacfix_DecodeRcCoef(Bitstr_dec* streamdata, int16_t* RCQ15); - -/* quantize & code RC */ -int WebRtcIsacfix_EncodeRcCoef(int16_t* RCQ15, Bitstr_enc* streamdata); - -/* decode & dequantize squared Gain */ -int WebRtcIsacfix_DecodeGain2(Bitstr_dec* streamdata, int32_t* Gain2); - -/* quantize & code squared Gain (input is squared gain) */ -int WebRtcIsacfix_EncodeGain2(int32_t* gain2, Bitstr_enc* streamdata); - -int WebRtcIsacfix_EncodePitchGain(int16_t* PitchGains_Q12, - Bitstr_enc* streamdata, - IsacSaveEncoderData* encData); - -int WebRtcIsacfix_EncodePitchLag(int16_t* PitchLagQ7, - int16_t* PitchGain_Q12, - Bitstr_enc* streamdata, - IsacSaveEncoderData* encData); - -int WebRtcIsacfix_DecodePitchGain(Bitstr_dec* streamdata, - int16_t* PitchGain_Q12); - -int WebRtcIsacfix_DecodePitchLag(Bitstr_dec* streamdata, - int16_t* PitchGain_Q12, - int16_t* PitchLagQ7); - -int WebRtcIsacfix_DecodeFrameLen(Bitstr_dec* streamdata, size_t* framelength); - -int WebRtcIsacfix_EncodeFrameLen(int16_t framelength, Bitstr_enc* streamdata); - -int WebRtcIsacfix_DecodeSendBandwidth(Bitstr_dec* streamdata, int16_t* BWno); - -int WebRtcIsacfix_EncodeReceiveBandwidth(int16_t* BWno, Bitstr_enc* streamdata); - -void WebRtcIsacfix_TranscodeLpcCoef(int32_t* tmpcoeffs_gQ6, int16_t* index_gQQ); - -// Pointer functions for LPC transforms. - -typedef void (*MatrixProduct1)(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix1_index_factor1, - int matrix0_index_factor1, - int matrix1_index_init_case, - int matrix1_index_step, - int matrix0_index_step, - int inner_loop_count, - int mid_loop_count, - int shift); -typedef void (*MatrixProduct2)(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix0_index_factor, - int matrix0_index_step); - -extern MatrixProduct1 WebRtcIsacfix_MatrixProduct1; -extern MatrixProduct2 WebRtcIsacfix_MatrixProduct2; - -void WebRtcIsacfix_MatrixProduct1C(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix1_index_factor1, - int matrix0_index_factor1, - int matrix1_index_init_case, - int matrix1_index_step, - int matrix0_index_step, - int inner_loop_count, - int mid_loop_count, - int shift); -void WebRtcIsacfix_MatrixProduct2C(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix0_index_factor, - int matrix0_index_step); - -#if defined(WEBRTC_HAS_NEON) -void WebRtcIsacfix_MatrixProduct1Neon(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix1_index_factor1, - int matrix0_index_factor1, - int matrix1_index_init_case, - int matrix1_index_step, - int matrix0_index_step, - int inner_loop_count, - int mid_loop_count, - int shift); -void WebRtcIsacfix_MatrixProduct2Neon(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix0_index_factor, - int matrix0_index_step); -#endif - -#if defined(MIPS32_LE) -void WebRtcIsacfix_MatrixProduct1MIPS(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix1_index_factor1, - int matrix0_index_factor1, - int matrix1_index_init_case, - int matrix1_index_step, - int matrix0_index_step, - int inner_loop_count, - int mid_loop_count, - int shift); - -void WebRtcIsacfix_MatrixProduct2MIPS(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix0_index_factor, - int matrix0_index_step); -#endif - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ENTROPY_CODING_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding_mips.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding_mips.c deleted file mode 100644 index a66a43ef9948..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding_mips.c +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -// MIPS optimization of the function WebRtcIsacfix_MatrixProduct1. -// Bit-exact with the function WebRtcIsacfix_MatrixProduct1C from -// entropy_coding.c file. -void WebRtcIsacfix_MatrixProduct1MIPS(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - const int matrix1_index_factor1, - const int matrix0_index_factor1, - const int matrix1_index_init_case, - const int matrix1_index_step, - const int matrix0_index_step, - const int inner_loop_count, - const int mid_loop_count, - const int shift) { - if (matrix1_index_init_case != 0) { - int j = SUBFRAMES, k = 0, n = 0; - int32_t r0, r1, r2, sum32; - int32_t* product_start = matrix_product; - int32_t* product_ptr; - const uint32_t product_step = 4 * mid_loop_count; - const uint32_t matrix0_step = 2 * matrix0_index_step; - const uint32_t matrix1_step = 4 * matrix1_index_step; - const uint32_t matrix0_step2 = 2 * matrix0_index_factor1; - const uint32_t matrix1_step2 = 4 * matrix1_index_factor1; - const int16_t* matrix0_start = matrix0; - const int32_t* matrix1_start = matrix1; - int16_t* matrix0_ptr; - int32_t* matrix1_ptr; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "1: \n\t" - "addu %[product_ptr], %[product_start], $0 \n\t" - "addu %[k], %[product_step], $0 \n\t" - "addiu %[j], %[j], -1 \n\t" - "addu %[matrix1_start], %[matrix1], $0 \n\t" - "2: \n\t" - "addu %[matrix1_ptr], %[matrix1_start], $0 \n\t" - "addu %[matrix0_ptr], %[matrix0_start], $0 \n\t" - "addu %[n], %[inner_loop_count], $0 \n\t" - "mul %[sum32], $0, $0 \n\t" - "3: \n\t" - "lw %[r0], 0(%[matrix1_ptr]) \n\t" - "lh %[r1], 0(%[matrix0_ptr]) \n\t" - "addu %[matrix1_ptr], %[matrix1_ptr], %[matrix1_step] \n\t" - "sllv %[r0], %[r0], %[shift] \n\t" - "andi %[r2], %[r0], 0xffff \n\t" - "sra %[r2], %[r2], 1 \n\t" - "mul %[r2], %[r2], %[r1] \n\t" - "sra %[r0], %[r0], 16 \n\t" - "mul %[r0], %[r0], %[r1] \n\t" - "addu %[matrix0_ptr], %[matrix0_ptr], %[matrix0_step] \n\t" - "addiu %[n], %[n], -1 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r2], %[r2], 15 \n\t" -#else - "addiu %[r2], %[r2], 0x4000 \n\t" - "sra %[r2], %[r2], 15 \n\t" -#endif - "addu %[sum32], %[sum32], %[r2] \n\t" - "bgtz %[n], 3b \n\t" - " addu %[sum32], %[sum32], %[r0] \n\t" - "addiu %[k], %[k], -4 \n\t" - "addu %[matrix1_start], %[matrix1_start], %[matrix1_step2] \n\t" - "sw %[sum32], 0(%[product_ptr]) \n\t" - "bgtz %[k], 2b \n\t" - " addiu %[product_ptr], %[product_ptr], 4 \n\t" - "addu %[matrix0_start], %[matrix0_start], %[matrix0_step2] \n\t" - "bgtz %[j], 1b \n\t" - " addu %[product_start], %[product_start], %[product_step] \n\t" - ".set pop \n\t" - : [product_ptr] "=&r" (product_ptr), [product_start] "+r" (product_start), - [k] "=&r" (k), [j] "+r" (j), [matrix1_start] "=&r"(matrix1_start), - [matrix1_ptr] "=&r" (matrix1_ptr), [matrix0_ptr] "=&r" (matrix0_ptr), - [matrix0_start] "+r" (matrix0_start), [n] "=&r" (n), [r0] "=&r" (r0), - [sum32] "=&r" (sum32), [r1] "=&r" (r1),[r2] "=&r" (r2) - : [product_step] "r" (product_step), [matrix1] "r" (matrix1), - [inner_loop_count] "r" (inner_loop_count), - [matrix1_step] "r" (matrix1_step), [shift] "r" (shift), - [matrix0_step] "r" (matrix0_step), [matrix1_step2] "r" (matrix1_step2), - [matrix0_step2] "r" (matrix0_step2) - : "hi", "lo", "memory" - ); - } else { - int j = SUBFRAMES, k = 0, n = 0; - int32_t r0, r1, r2, sum32; - int32_t* product_start = matrix_product; - int32_t* product_ptr; - const uint32_t product_step = 4 * mid_loop_count; - const uint32_t matrix0_step = 2 * matrix0_index_step; - const uint32_t matrix1_step = 4 * matrix1_index_step; - const uint32_t matrix0_step2 = 2 * matrix0_index_factor1; - const uint32_t matrix1_step2 = 4 * matrix1_index_factor1; - const int16_t* matrix0_start = matrix0; - const int32_t* matrix1_start = matrix1; - int16_t* matrix0_ptr; - int32_t* matrix1_ptr; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "1: \n\t" - "addu %[product_ptr], %[product_start], $0 \n\t" - "addu %[k], %[product_step], $0 \n\t" - "addiu %[j], %[j], -1 \n\t" - "addu %[matrix0_start], %[matrix0], $0 \n\t" - "2: \n\t" - "addu %[matrix1_ptr], %[matrix1_start], $0 \n\t" - "addu %[matrix0_ptr], %[matrix0_start], $0 \n\t" - "addu %[n], %[inner_loop_count], $0 \n\t" - "mul %[sum32], $0, $0 \n\t" - "3: \n\t" - "lw %[r0], 0(%[matrix1_ptr]) \n\t" - "lh %[r1], 0(%[matrix0_ptr]) \n\t" - "addu %[matrix1_ptr], %[matrix1_ptr], %[matrix1_step] \n\t" - "sllv %[r0], %[r0], %[shift] \n\t" - "andi %[r2], %[r0], 0xffff \n\t" - "sra %[r2], %[r2], 1 \n\t" - "mul %[r2], %[r2], %[r1] \n\t" - "sra %[r0], %[r0], 16 \n\t" - "mul %[r0], %[r0], %[r1] \n\t" - "addu %[matrix0_ptr], %[matrix0_ptr], %[matrix0_step] \n\t" - "addiu %[n], %[n], -1 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r2], %[r2], 15 \n\t" -#else - "addiu %[r2], %[r2], 0x4000 \n\t" - "sra %[r2], %[r2], 15 \n\t" -#endif - "addu %[sum32], %[sum32], %[r2] \n\t" - "bgtz %[n], 3b \n\t" - " addu %[sum32], %[sum32], %[r0] \n\t" - "addiu %[k], %[k], -4 \n\t" - "addu %[matrix0_start], %[matrix0_start], %[matrix0_step2] \n\t" - "sw %[sum32], 0(%[product_ptr]) \n\t" - "bgtz %[k], 2b \n\t" - " addiu %[product_ptr], %[product_ptr], 4 \n\t" - "addu %[matrix1_start], %[matrix1_start], %[matrix1_step2] \n\t" - "bgtz %[j], 1b \n\t" - " addu %[product_start], %[product_start], %[product_step] \n\t" - ".set pop \n\t" - : [product_ptr] "=&r" (product_ptr), [product_start] "+r" (product_start), - [k] "=&r" (k), [j] "+r" (j), [matrix1_start] "+r"(matrix1_start), - [matrix1_ptr] "=&r" (matrix1_ptr), [matrix0_ptr] "=&r" (matrix0_ptr), - [matrix0_start] "=&r" (matrix0_start), [n] "=&r" (n), [r0] "=&r" (r0), - [sum32] "=&r" (sum32), [r1] "=&r" (r1),[r2] "=&r" (r2) - : [product_step] "r" (product_step), [matrix0] "r" (matrix0), - [inner_loop_count] "r" (inner_loop_count), - [matrix1_step] "r" (matrix1_step), [shift] "r" (shift), - [matrix0_step] "r" (matrix0_step), [matrix1_step2] "r" (matrix1_step2), - [matrix0_step2] "r" (matrix0_step2) - : "hi", "lo", "memory" - ); - } -} - -// MIPS optimization of the function WebRtcIsacfix_MatrixProduct2. -// Bit-exact with the function WebRtcIsacfix_MatrixProduct2C from -// entropy_coding.c file. -void WebRtcIsacfix_MatrixProduct2MIPS(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - const int matrix0_index_factor, - const int matrix0_index_step) { - int j = 0, n = 0; - int loop_count = SUBFRAMES; - const int16_t* matrix0_ptr; - const int32_t* matrix1_ptr; - const int16_t* matrix0_start = matrix0; - const int matrix0_step = 2 * matrix0_index_step; - const int matrix0_step2 = 2 * matrix0_index_factor; - int32_t r0, r1, r2, r3, r4, sum32, sum32_2; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addu %[j], %[loop_count], $0 \n\t" - "addu %[matrix0_start], %[matrix0], $0 \n\t" - "1: \n\t" - "addu %[matrix1_ptr], %[matrix1], $0 \n\t" - "addu %[matrix0_ptr], %[matrix0_start], $0 \n\t" - "addu %[n], %[loop_count], $0 \n\t" - "mul %[sum32], $0, $0 \n\t" - "mul %[sum32_2], $0, $0 \n\t" - "2: \n\t" - "lw %[r0], 0(%[matrix1_ptr]) \n\t" - "lw %[r1], 4(%[matrix1_ptr]) \n\t" - "lh %[r2], 0(%[matrix0_ptr]) \n\t" - "andi %[r3], %[r0], 0xffff \n\t" - "sra %[r3], %[r3], 1 \n\t" - "mul %[r3], %[r3], %[r2] \n\t" - "andi %[r4], %[r1], 0xffff \n\t" - "sra %[r4], %[r4], 1 \n\t" - "mul %[r4], %[r4], %[r2] \n\t" - "sra %[r0], %[r0], 16 \n\t" - "mul %[r0], %[r0], %[r2] \n\t" - "sra %[r1], %[r1], 16 \n\t" - "mul %[r1], %[r1], %[r2] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r3], %[r3], 15 \n\t" - "shra_r.w %[r4], %[r4], 15 \n\t" -#else - "addiu %[r3], %[r3], 0x4000 \n\t" - "sra %[r3], %[r3], 15 \n\t" - "addiu %[r4], %[r4], 0x4000 \n\t" - "sra %[r4], %[r4], 15 \n\t" -#endif - "addiu %[matrix1_ptr], %[matrix1_ptr], 8 \n\t" - "addu %[matrix0_ptr], %[matrix0_ptr], %[matrix0_step] \n\t" - "addiu %[n], %[n], -1 \n\t" - "addu %[sum32], %[sum32], %[r3] \n\t" - "addu %[sum32_2], %[sum32_2], %[r4] \n\t" - "addu %[sum32], %[sum32], %[r0] \n\t" - "bgtz %[n], 2b \n\t" - " addu %[sum32_2], %[sum32_2], %[r1] \n\t" - "sra %[sum32], %[sum32], 3 \n\t" - "sra %[sum32_2], %[sum32_2], 3 \n\t" - "addiu %[j], %[j], -1 \n\t" - "addu %[matrix0_start], %[matrix0_start], %[matrix0_step2] \n\t" - "sw %[sum32], 0(%[matrix_product]) \n\t" - "sw %[sum32_2], 4(%[matrix_product]) \n\t" - "bgtz %[j], 1b \n\t" - " addiu %[matrix_product], %[matrix_product], 8 \n\t" - ".set pop \n\t" - : [j] "=&r" (j), [matrix0_start] "=&r" (matrix0_start), - [matrix1_ptr] "=&r" (matrix1_ptr), [matrix0_ptr] "=&r" (matrix0_ptr), - [n] "=&r" (n), [sum32] "=&r" (sum32), [sum32_2] "=&r" (sum32_2), - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [r4] "=&r" (r4), [matrix_product] "+r" (matrix_product) - : [loop_count] "r" (loop_count), [matrix0] "r" (matrix0), - [matrix1] "r" (matrix1), [matrix0_step] "r" (matrix0_step), - [matrix0_step2] "r" (matrix0_step2) - : "hi", "lo", "memory" - ); -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding_neon.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding_neon.c deleted file mode 100644 index 02005678800a..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding_neon.c +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* This file contains WebRtcIsacfix_MatrixProduct1Neon() and - * WebRtcIsacfix_MatrixProduct2Neon() for ARM Neon platform. API's are in - * entropy_coding.c. Results are bit exact with the c code for - * generic platforms. - */ - -#include -#include - -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "rtc_base/checks.h" - -void WebRtcIsacfix_MatrixProduct1Neon(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - const int matrix1_index_factor1, - const int matrix0_index_factor1, - const int matrix1_index_init_case, - const int matrix1_index_step, - const int matrix0_index_step, - const int inner_loop_count, - const int mid_loop_count, - const int shift) { - int j = 0, k = 0, n = 0; - int matrix1_index = 0, matrix0_index = 0, matrix_prod_index = 0; - int* matrix1_index_factor2 = &j; - int* matrix0_index_factor2 = &k; - if (matrix1_index_init_case != 0) { - matrix1_index_factor2 = &k; - matrix0_index_factor2 = &j; - } - int32x4_t shift32x4 = vdupq_n_s32(shift); - int32x2_t shift32x2 = vdup_n_s32(shift); - int32x4_t sum_32x4 = vdupq_n_s32(0); - int32x2_t sum_32x2 = vdup_n_s32(0); - - RTC_DCHECK_EQ(0, inner_loop_count % 2); - RTC_DCHECK_EQ(0, mid_loop_count % 2); - - if (matrix1_index_init_case != 0 && matrix1_index_factor1 == 1) { - for (j = 0; j < SUBFRAMES; j++) { - matrix_prod_index = mid_loop_count * j; - for (k = 0; k < (mid_loop_count >> 2) << 2; k += 4) { - sum_32x4 = veorq_s32(sum_32x4, sum_32x4); // Initialize to zeros. - matrix1_index = k; - matrix0_index = matrix0_index_factor1 * j; - for (n = 0; n < inner_loop_count; n++) { - int32x4_t matrix0_32x4 = - vdupq_n_s32((int32_t)(matrix0[matrix0_index]) << 15); - int32x4_t matrix1_32x4 = - vshlq_s32(vld1q_s32(&matrix1[matrix1_index]), shift32x4); - int32x4_t multi_32x4 = vqdmulhq_s32(matrix0_32x4, matrix1_32x4); - sum_32x4 = vqaddq_s32(sum_32x4, multi_32x4); - matrix1_index += matrix1_index_step; - matrix0_index += matrix0_index_step; - } - vst1q_s32(&matrix_product[matrix_prod_index], sum_32x4); - matrix_prod_index += 4; - } - if (mid_loop_count % 4 > 1) { - sum_32x2 = veor_s32(sum_32x2, sum_32x2); // Initialize to zeros. - matrix1_index = k; - k += 2; - matrix0_index = matrix0_index_factor1 * j; - for (n = 0; n < inner_loop_count; n++) { - int32x2_t matrix0_32x2 = - vdup_n_s32((int32_t)(matrix0[matrix0_index]) << 15); - int32x2_t matrix1_32x2 = - vshl_s32(vld1_s32(&matrix1[matrix1_index]), shift32x2); - int32x2_t multi_32x2 = vqdmulh_s32(matrix0_32x2, matrix1_32x2); - sum_32x2 = vqadd_s32(sum_32x2, multi_32x2); - matrix1_index += matrix1_index_step; - matrix0_index += matrix0_index_step; - } - vst1_s32(&matrix_product[matrix_prod_index], sum_32x2); - matrix_prod_index += 2; - } - } - } - else if (matrix1_index_init_case == 0 && matrix0_index_factor1 == 1) { - int32x2_t multi_32x2 = vdup_n_s32(0); - int32x2_t matrix0_32x2 = vdup_n_s32(0); - for (j = 0; j < SUBFRAMES; j++) { - matrix_prod_index = mid_loop_count * j; - for (k = 0; k < (mid_loop_count >> 2) << 2; k += 4) { - sum_32x4 = veorq_s32(sum_32x4, sum_32x4); // Initialize to zeros. - matrix1_index = matrix1_index_factor1 * j; - matrix0_index = k; - for (n = 0; n < inner_loop_count; n++) { - int32x4_t matrix1_32x4 = vdupq_n_s32(matrix1[matrix1_index] << shift); - int32x4_t matrix0_32x4 = - vshll_n_s16(vld1_s16(&matrix0[matrix0_index]), 15); - int32x4_t multi_32x4 = vqdmulhq_s32(matrix0_32x4, matrix1_32x4); - sum_32x4 = vqaddq_s32(sum_32x4, multi_32x4); - matrix1_index += matrix1_index_step; - matrix0_index += matrix0_index_step; - } - vst1q_s32(&matrix_product[matrix_prod_index], sum_32x4); - matrix_prod_index += 4; - } - if (mid_loop_count % 4 > 1) { - sum_32x2 = veor_s32(sum_32x2, sum_32x2); // Initialize to zeros. - matrix1_index = matrix1_index_factor1 * j; - matrix0_index = k; - for (n = 0; n < inner_loop_count; n++) { - int32x2_t matrix1_32x2 = vdup_n_s32(matrix1[matrix1_index] << shift); - matrix0_32x2 = - vset_lane_s32((int32_t)matrix0[matrix0_index], matrix0_32x2, 0); - matrix0_32x2 = vset_lane_s32((int32_t)matrix0[matrix0_index + 1], - matrix0_32x2, 1); - matrix0_32x2 = vshl_n_s32(matrix0_32x2, 15); - multi_32x2 = vqdmulh_s32(matrix1_32x2, matrix0_32x2); - sum_32x2 = vqadd_s32(sum_32x2, multi_32x2); - matrix1_index += matrix1_index_step; - matrix0_index += matrix0_index_step; - } - vst1_s32(&matrix_product[matrix_prod_index], sum_32x2); - matrix_prod_index += 2; - } - } - } - else if (matrix1_index_init_case == 0 && - matrix1_index_step == 1 && - matrix0_index_step == 1) { - int32x2_t multi_32x2 = vdup_n_s32(0); - int32x2_t matrix0_32x2 = vdup_n_s32(0); - for (j = 0; j < SUBFRAMES; j++) { - matrix_prod_index = mid_loop_count * j; - for (k = 0; k < mid_loop_count; k++) { - sum_32x4 = veorq_s32(sum_32x4, sum_32x4); // Initialize to zeros. - matrix1_index = matrix1_index_factor1 * j; - matrix0_index = matrix0_index_factor1 * k; - for (n = 0; n < (inner_loop_count >> 2) << 2; n += 4) { - int32x4_t matrix1_32x4 = - vshlq_s32(vld1q_s32(&matrix1[matrix1_index]), shift32x4); - int32x4_t matrix0_32x4 = - vshll_n_s16(vld1_s16(&matrix0[matrix0_index]), 15); - int32x4_t multi_32x4 = vqdmulhq_s32(matrix0_32x4, matrix1_32x4); - sum_32x4 = vqaddq_s32(sum_32x4, multi_32x4); - matrix1_index += 4; - matrix0_index += 4; - } - sum_32x2 = vqadd_s32(vget_low_s32(sum_32x4), vget_high_s32(sum_32x4)); - if (inner_loop_count % 4 > 1) { - int32x2_t matrix1_32x2 = - vshl_s32(vld1_s32(&matrix1[matrix1_index]), shift32x2); - matrix0_32x2 = - vset_lane_s32((int32_t)matrix0[matrix0_index], matrix0_32x2, 0); - matrix0_32x2 = vset_lane_s32((int32_t)matrix0[matrix0_index + 1], - matrix0_32x2, 1); - matrix0_32x2 = vshl_n_s32(matrix0_32x2, 15); - multi_32x2 = vqdmulh_s32(matrix1_32x2, matrix0_32x2); - sum_32x2 = vqadd_s32(sum_32x2, multi_32x2); - } - sum_32x2 = vpadd_s32(sum_32x2, sum_32x2); - vst1_lane_s32(&matrix_product[matrix_prod_index], sum_32x2, 0); - matrix_prod_index++; - } - } - } - else { - for (j = 0; j < SUBFRAMES; j++) { - matrix_prod_index = mid_loop_count * j; - for (k=0; k < mid_loop_count; k++) { - int32_t sum32 = 0; - matrix1_index = matrix1_index_factor1 * (*matrix1_index_factor2); - matrix0_index = matrix0_index_factor1 * (*matrix0_index_factor2); - for (n = 0; n < inner_loop_count; n++) { - sum32 += (WEBRTC_SPL_MUL_16_32_RSFT16(matrix0[matrix0_index], - matrix1[matrix1_index] << shift)); - matrix1_index += matrix1_index_step; - matrix0_index += matrix0_index_step; - } - matrix_product[matrix_prod_index] = sum32; - matrix_prod_index++; - } - } - } -} - -void WebRtcIsacfix_MatrixProduct2Neon(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - const int matrix0_index_factor, - const int matrix0_index_step) { - int j = 0, n = 0; - int matrix1_index = 0, matrix0_index = 0, matrix_prod_index = 0; - int32x2_t sum_32x2 = vdup_n_s32(0); - for (j = 0; j < SUBFRAMES; j++) { - sum_32x2 = veor_s32(sum_32x2, sum_32x2); // Initialize to zeros. - matrix1_index = 0; - matrix0_index = matrix0_index_factor * j; - for (n = SUBFRAMES; n > 0; n--) { - int32x2_t matrix0_32x2 = - vdup_n_s32((int32_t)(matrix0[matrix0_index]) << 15); - int32x2_t matrix1_32x2 = vld1_s32(&matrix1[matrix1_index]); - int32x2_t multi_32x2 = vqdmulh_s32(matrix0_32x2, matrix1_32x2); - sum_32x2 = vqadd_s32(sum_32x2, multi_32x2); - matrix1_index += 2; - matrix0_index += matrix0_index_step; - } - sum_32x2 = vshr_n_s32(sum_32x2, 3); - vst1_s32(&matrix_product[matrix_prod_index], sum_32x2); - matrix_prod_index += 2; - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/fft.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/fft.c deleted file mode 100644 index a0ed3f83ce96..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/fft.c +++ /dev/null @@ -1,415 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * fft.c - * - * Fast Fourier Transform - * - */ - - -#include "modules/audio_coding/codecs/isac/fix/source/fft.h" - -static const int16_t kSortTabFft[240] = { - 0, 60, 120, 180, 20, 80, 140, 200, 40, 100, 160, 220, - 4, 64, 124, 184, 24, 84, 144, 204, 44, 104, 164, 224, - 8, 68, 128, 188, 28, 88, 148, 208, 48, 108, 168, 228, - 12, 72, 132, 192, 32, 92, 152, 212, 52, 112, 172, 232, - 16, 76, 136, 196, 36, 96, 156, 216, 56, 116, 176, 236, - 1, 61, 121, 181, 21, 81, 141, 201, 41, 101, 161, 221, - 5, 65, 125, 185, 25, 85, 145, 205, 45, 105, 165, 225, - 9, 69, 129, 189, 29, 89, 149, 209, 49, 109, 169, 229, - 13, 73, 133, 193, 33, 93, 153, 213, 53, 113, 173, 233, - 17, 77, 137, 197, 37, 97, 157, 217, 57, 117, 177, 237, - 2, 62, 122, 182, 22, 82, 142, 202, 42, 102, 162, 222, - 6, 66, 126, 186, 26, 86, 146, 206, 46, 106, 166, 226, - 10, 70, 130, 190, 30, 90, 150, 210, 50, 110, 170, 230, - 14, 74, 134, 194, 34, 94, 154, 214, 54, 114, 174, 234, - 18, 78, 138, 198, 38, 98, 158, 218, 58, 118, 178, 238, - 3, 63, 123, 183, 23, 83, 143, 203, 43, 103, 163, 223, - 7, 67, 127, 187, 27, 87, 147, 207, 47, 107, 167, 227, - 11, 71, 131, 191, 31, 91, 151, 211, 51, 111, 171, 231, - 15, 75, 135, 195, 35, 95, 155, 215, 55, 115, 175, 235, - 19, 79, 139, 199, 39, 99, 159, 219, 59, 119, 179, 239 -}; - -/* Cosine table in Q14 */ -static const int16_t kCosTabFfftQ14[240] = { - 16384, 16378, 16362, 16333, 16294, 16244, 16182, 16110, 16026, 15931, 15826, 15709, - 15582, 15444, 15296, 15137, 14968, 14788, 14598, 14399, 14189, 13970, 13741, 13502, - 13255, 12998, 12733, 12458, 12176, 11885, 11585, 11278, 10963, 10641, 10311, 9974, - 9630, 9280, 8923, 8561, 8192, 7818, 7438, 7053, 6664, 6270, 5872, 5469, - 5063, 4653, 4240, 3825, 3406, 2986, 2563, 2139, 1713, 1285, 857, 429, - 0, -429, -857, -1285, -1713, -2139, -2563, -2986, -3406, -3825, -4240, -4653, - -5063, -5469, -5872, -6270, -6664, -7053, -7438, -7818, -8192, -8561, -8923, -9280, - -9630, -9974, -10311, -10641, -10963, -11278, -11585, -11885, -12176, -12458, -12733, -12998, - -13255, -13502, -13741, -13970, -14189, -14399, -14598, -14788, -14968, -15137, -15296, -15444, - -15582, -15709, -15826, -15931, -16026, -16110, -16182, -16244, -16294, -16333, -16362, -16378, - -16384, -16378, -16362, -16333, -16294, -16244, -16182, -16110, -16026, -15931, -15826, -15709, - -15582, -15444, -15296, -15137, -14968, -14788, -14598, -14399, -14189, -13970, -13741, -13502, - -13255, -12998, -12733, -12458, -12176, -11885, -11585, -11278, -10963, -10641, -10311, -9974, - -9630, -9280, -8923, -8561, -8192, -7818, -7438, -7053, -6664, -6270, -5872, -5469, - -5063, -4653, -4240, -3825, -3406, -2986, -2563, -2139, -1713, -1285, -857, -429, - 0, 429, 857, 1285, 1713, 2139, 2563, 2986, 3406, 3825, 4240, 4653, - 5063, 5469, 5872, 6270, 6664, 7053, 7438, 7818, 8192, 8561, 8923, 9280, - 9630, 9974, 10311, 10641, 10963, 11278, 11585, 11885, 12176, 12458, 12733, 12998, - 13255, 13502, 13741, 13970, 14189, 14399, 14598, 14788, 14968, 15137, 15296, 15444, - 15582, 15709, 15826, 15931, 16026, 16110, 16182, 16244, 16294, 16333, 16362, 16378 -}; - - - -/* Uses 16x16 mul, without rounding, which is faster. Uses WEBRTC_SPL_MUL_16_16_RSFT */ -int16_t WebRtcIsacfix_FftRadix16Fastest(int16_t RexQx[], int16_t ImxQx[], int16_t iSign) { - - int16_t dd, ee, ff, gg, hh, ii; - int16_t k0, k1, k2, k3, k4, kk; - int16_t tmp116, tmp216; - - int16_t ccc1Q14, ccc2Q14, ccc3Q14, sss1Q14, sss2Q14, sss3Q14; - int16_t sss60Q14, ccc72Q14, sss72Q14; - int16_t aaQx, ajQx, akQx, ajmQx, ajpQx, akmQx, akpQx; - int16_t bbQx, bjQx, bkQx, bjmQx, bjpQx, bkmQx, bkpQx; - - int16_t ReDATAQx[240], ImDATAQx[240]; - - sss60Q14 = kCosTabFfftQ14[20]; - ccc72Q14 = kCosTabFfftQ14[48]; - sss72Q14 = kCosTabFfftQ14[12]; - - if (iSign < 0) { - sss72Q14 = -sss72Q14; - sss60Q14 = -sss60Q14; - } - /* Complexity is: 10 cycles */ - - /* compute fourier transform */ - - // transform for factor of 4 - for (kk=0; kk<60; kk++) { - k0 = kk; - k1 = k0 + 60; - k2 = k1 + 60; - k3 = k2 + 60; - - akpQx = RexQx[k0] + RexQx[k2]; - akmQx = RexQx[k0] - RexQx[k2]; - ajpQx = RexQx[k1] + RexQx[k3]; - ajmQx = RexQx[k1] - RexQx[k3]; - bkpQx = ImxQx[k0] + ImxQx[k2]; - bkmQx = ImxQx[k0] - ImxQx[k2]; - bjpQx = ImxQx[k1] + ImxQx[k3]; - bjmQx = ImxQx[k1] - ImxQx[k3]; - - RexQx[k0] = akpQx + ajpQx; - ImxQx[k0] = bkpQx + bjpQx; - ajpQx = akpQx - ajpQx; - bjpQx = bkpQx - bjpQx; - if (iSign < 0) { - akpQx = akmQx + bjmQx; - bkpQx = bkmQx - ajmQx; - akmQx -= bjmQx; - bkmQx += ajmQx; - } else { - akpQx = akmQx - bjmQx; - bkpQx = bkmQx + ajmQx; - akmQx += bjmQx; - bkmQx -= ajmQx; - } - - ccc1Q14 = kCosTabFfftQ14[kk]; - ccc2Q14 = kCosTabFfftQ14[2 * kk]; - ccc3Q14 = kCosTabFfftQ14[3 * kk]; - sss1Q14 = kCosTabFfftQ14[kk + 60]; - sss2Q14 = kCosTabFfftQ14[2 * kk + 60]; - sss3Q14 = kCosTabFfftQ14[3 * kk + 60]; - if (iSign==1) { - sss1Q14 = -sss1Q14; - sss2Q14 = -sss2Q14; - sss3Q14 = -sss3Q14; - } - - //Do several multiplications like Q14*Q16>>14 = Q16 - // RexQ16[k1] = akpQ16 * ccc1Q14 - bkpQ16 * sss1Q14; - // RexQ16[k2] = ajpQ16 * ccc2Q14 - bjpQ16 * sss2Q14; - // RexQ16[k3] = akmQ16 * ccc3Q14 - bkmQ16 * sss3Q14; - // ImxQ16[k1] = akpQ16 * sss1Q14 + bkpQ16 * ccc1Q14; - // ImxQ16[k2] = ajpQ16 * sss2Q14 + bjpQ16 * ccc2Q14; - // ImxQ16[k3] = akmQ16 * sss3Q14 + bkmQ16 * ccc3Q14; - - RexQx[k1] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc1Q14, akpQx, 14) - - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss1Q14, bkpQx, 14); // 6 non-mul + 2 mul cycles, i.e. 8 cycles (6+2*7=20 cycles if 16x32mul) - RexQx[k2] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, ajpQx, 14) - - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bjpQx, 14); - RexQx[k3] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc3Q14, akmQx, 14) - - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss3Q14, bkmQx, 14); - ImxQx[k1] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss1Q14, akpQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc1Q14, bkpQx, 14); - ImxQx[k2] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, ajpQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bjpQx, 14); - ImxQx[k3] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss3Q14, akmQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc3Q14, bkmQx, 14); - //This mul segment needs 6*8 = 48 cycles for 16x16 muls, but 6*20 = 120 cycles for 16x32 muls - - - } - /* Complexity is: 51+48 = 99 cycles for 16x16 muls, but 51+120 = 171 cycles for 16x32 muls*/ - - // transform for factor of 3 - kk=0; - k1=20; - k2=40; - - for (hh=0; hh<4; hh++) { - for (ii=0; ii<20; ii++) { - akQx = RexQx[kk]; - bkQx = ImxQx[kk]; - ajQx = RexQx[k1] + RexQx[k2]; - bjQx = ImxQx[k1] + ImxQx[k2]; - RexQx[kk] = akQx + ajQx; - ImxQx[kk] = bkQx + bjQx; - tmp116 = ajQx >> 1; - tmp216 = bjQx >> 1; - akQx = akQx - tmp116; - bkQx = bkQx - tmp216; - tmp116 = RexQx[k1] - RexQx[k2]; - tmp216 = ImxQx[k1] - ImxQx[k2]; - - ajQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss60Q14, tmp116, 14); // Q14*Qx>>14 = Qx - bjQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss60Q14, tmp216, 14); // Q14*Qx>>14 = Qx - RexQx[k1] = akQx - bjQx; - RexQx[k2] = akQx + bjQx; - ImxQx[k1] = bkQx + ajQx; - ImxQx[k2] = bkQx - ajQx; - - kk++; - k1++; - k2++; - } - /* Complexity : (31+6)*20 = 740 cycles for 16x16 muls, but (31+18)*20 = 980 cycles for 16x32 muls*/ - kk=kk+40; - k1=k1+40; - k2=k2+40; - } - /* Complexity : 4*(740+3) = 2972 cycles for 16x16 muls, but 4*(980+3) = 3932 cycles for 16x32 muls*/ - - /* multiply by rotation factor for odd factor 3 or 5 (not for 4) - Same code (duplicated) for both ii=2 and ii=3 */ - kk = 1; - ee = 0; - ff = 0; - - for (gg=0; gg<19; gg++) { - kk += 20; - ff = ff+4; - for (hh=0; hh<2; hh++) { - ee = ff + hh * ff; - dd = ee + 60; - ccc2Q14 = kCosTabFfftQ14[ee]; - sss2Q14 = kCosTabFfftQ14[dd]; - if (iSign==1) { - sss2Q14 = -sss2Q14; - } - for (ii=0; ii<4; ii++) { - akQx = RexQx[kk]; - bkQx = ImxQx[kk]; - RexQx[kk] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akQx, 14) - // Q14*Qx>>14 = Qx - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkQx, 14); - ImxQx[kk] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akQx, 14) + // Q14*Qx>>14 = Qx - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkQx, 14); - - - kk += 60; - } - kk = kk - 220; - } - // Complexity: 2*(13+5+4*13+2) = 144 for 16x16 muls, but 2*(13+5+4*33+2) = 304 cycles for 16x32 muls - kk = kk - 59; - } - // Complexity: 19*144 = 2736 for 16x16 muls, but 19*304 = 5776 cycles for 16x32 muls - - // transform for factor of 5 - kk = 0; - ccc2Q14 = kCosTabFfftQ14[96]; - sss2Q14 = kCosTabFfftQ14[84]; - if (iSign==1) { - sss2Q14 = -sss2Q14; - } - - for (hh=0; hh<4; hh++) { - for (ii=0; ii<12; ii++) { - k1 = kk + 4; - k2 = k1 + 4; - k3 = k2 + 4; - k4 = k3 + 4; - - akpQx = RexQx[k1] + RexQx[k4]; - akmQx = RexQx[k1] - RexQx[k4]; - bkpQx = ImxQx[k1] + ImxQx[k4]; - bkmQx = ImxQx[k1] - ImxQx[k4]; - ajpQx = RexQx[k2] + RexQx[k3]; - ajmQx = RexQx[k2] - RexQx[k3]; - bjpQx = ImxQx[k2] + ImxQx[k3]; - bjmQx = ImxQx[k2] - ImxQx[k3]; - aaQx = RexQx[kk]; - bbQx = ImxQx[kk]; - RexQx[kk] = aaQx + akpQx + ajpQx; - ImxQx[kk] = bbQx + bkpQx + bjpQx; - - akQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, akpQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, ajpQx, 14) + aaQx; - bkQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, bkpQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bjpQx, 14) + bbQx; - ajQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, akmQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, ajmQx, 14); - bjQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, bkmQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bjmQx, 14); - // 32+4*8=64 or 32+4*20=112 - - RexQx[k1] = akQx - bjQx; - RexQx[k4] = akQx + bjQx; - ImxQx[k1] = bkQx + ajQx; - ImxQx[k4] = bkQx - ajQx; - - akQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akpQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, ajpQx, 14) + aaQx; - bkQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkpQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, bjpQx, 14) + bbQx; - ajQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akmQx, 14) - - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, ajmQx, 14); - bjQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkmQx, 14) - - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, bjmQx, 14); - // 8+4*8=40 or 8+4*20=88 - - RexQx[k2] = akQx - bjQx; - RexQx[k3] = akQx + bjQx; - ImxQx[k2] = bkQx + ajQx; - ImxQx[k3] = bkQx - ajQx; - - kk = k4 + 4; - } - // Complexity: 12*(64+40+10) = 1368 for 16x16 muls, but 12*(112+88+10) = 2520 cycles for 16x32 muls - kk -= 239; - } - // Complexity: 4*1368 = 5472 for 16x16 muls, but 4*2520 = 10080 cycles for 16x32 muls - - /* multiply by rotation factor for odd factor 3 or 5 (not for 4) - Same code (duplicated) for both ii=2 and ii=3 */ - kk = 1; - ee=0; - - for (gg=0; gg<3; gg++) { - kk += 4; - dd = 12 + 12 * gg; - ff = 0; - for (hh=0; hh<4; hh++) { - ff = ff+dd; - ee = ff+60; - for (ii=0; ii<12; ii++) { - akQx = RexQx[kk]; - bkQx = ImxQx[kk]; - - ccc2Q14 = kCosTabFfftQ14[ff]; - sss2Q14 = kCosTabFfftQ14[ee]; - - if (iSign==1) { - sss2Q14 = -sss2Q14; - } - - RexQx[kk] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akQx, 14) - - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkQx, 14); - ImxQx[kk] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkQx, 14); - - kk += 20; - } - kk = kk - 236; - // Complexity: 12*(12+12) = 288 for 16x16 muls, but 12*(12+32) = 528 cycles for 16x32 muls - } - kk = kk - 19; - // Complexity: 4*288+6 for 16x16 muls, but 4*528+6 cycles for 16x32 muls - } - // Complexity: 3*4*288+6 = 3462 for 16x16 muls, but 3*4*528+6 = 6342 cycles for 16x32 muls - - - // last transform for factor of 4 */ - for (kk=0; kk<240; kk=kk+4) { - k1 = kk + 1; - k2 = k1 + 1; - k3 = k2 + 1; - - akpQx = RexQx[kk] + RexQx[k2]; - akmQx = RexQx[kk] - RexQx[k2]; - ajpQx = RexQx[k1] + RexQx[k3]; - ajmQx = RexQx[k1] - RexQx[k3]; - bkpQx = ImxQx[kk] + ImxQx[k2]; - bkmQx = ImxQx[kk] - ImxQx[k2]; - bjpQx = ImxQx[k1] + ImxQx[k3]; - bjmQx = ImxQx[k1] - ImxQx[k3]; - RexQx[kk] = akpQx + ajpQx; - ImxQx[kk] = bkpQx + bjpQx; - ajpQx = akpQx - ajpQx; - bjpQx = bkpQx - bjpQx; - if (iSign < 0) { - akpQx = akmQx + bjmQx; - bkpQx = bkmQx - ajmQx; - akmQx -= bjmQx; - bkmQx += ajmQx; - } else { - akpQx = akmQx - bjmQx; - bkpQx = bkmQx + ajmQx; - akmQx += bjmQx; - bkmQx -= ajmQx; - } - RexQx[k1] = akpQx; - RexQx[k2] = ajpQx; - RexQx[k3] = akmQx; - ImxQx[k1] = bkpQx; - ImxQx[k2] = bjpQx; - ImxQx[k3] = bkmQx; - } - // Complexity: 60*45 = 2700 for 16x16 muls, but 60*45 = 2700 cycles for 16x32 muls - - /* permute the results to normal order */ - for (ii=0; ii<240; ii++) { - ReDATAQx[ii]=RexQx[ii]; - ImDATAQx[ii]=ImxQx[ii]; - } - // Complexity: 240*2=480 cycles - - for (ii=0; ii<240; ii++) { - RexQx[ii]=ReDATAQx[kSortTabFft[ii]]; - ImxQx[ii]=ImDATAQx[kSortTabFft[ii]]; - } - // Complexity: 240*2*2=960 cycles - - // Total complexity: - // 16x16 16x32 - // Complexity: 10 10 - // Complexity: 99 171 - // Complexity: 2972 3932 - // Complexity: 2736 5776 - // Complexity: 5472 10080 - // Complexity: 3462 6342 - // Complexity: 2700 2700 - // Complexity: 480 480 - // Complexity: 960 960 - // ======================= - // 18891 30451 - // - // If this FFT is called 2 time each frame, i.e. 67 times per second, it will correspond to - // a C54 complexity of 67*18891/1000000 = 1.27 MIPS with 16x16-muls, and 67*30451/1000000 = - // = 2.04 MIPS with 16x32-muls. Note that this routine somtimes is called 6 times during the - // encoding of a frame, i.e. the max complexity would be 7/2*1.27 = 4.4 MIPS for the 16x16 mul case. - - - return 0; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/fft.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/fft.h deleted file mode 100644 index 4fe9b96be4f8..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/fft.h +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/*--------------------------------*-C-*---------------------------------* - * File: - * fft.h - * ---------------------------------------------------------------------* - * Re[]: real value array - * Im[]: imaginary value array - * nTotal: total number of complex values - * nPass: number of elements involved in this pass of transform - * nSpan: nspan/nPass = number of bytes to increment pointer - * in Re[] and Im[] - * isign: exponent: +1 = forward -1 = reverse - * scaling: normalizing constant by which the final result is *divided* - * scaling == -1, normalize by total dimension of the transform - * scaling < -1, normalize by the square-root of the total dimension - * - * ---------------------------------------------------------------------- - * See the comments in the code for correct usage! - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FFT_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FFT_H_ - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -int16_t WebRtcIsacfix_FftRadix16Fastest(int16_t RexQx[], - int16_t ImxQx[], - int16_t iSign); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FFT_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h deleted file mode 100644 index f741e6f677d2..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_INTERNAL_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_INTERNAL_H_ - -#include - -#if defined(__cplusplus) || defined(c_plusplus) -extern "C" { -#endif - -/* Arguments: - * io: Input/output, in Q0. - * len: Input, sample length. - * coefficient: Input. - * state: Input/output, filter state, in Q4. - */ -typedef void (*HighpassFilterFixDec32)(int16_t* io, - int16_t len, - const int16_t* coefficient, - int32_t* state); -extern HighpassFilterFixDec32 WebRtcIsacfix_HighpassFilterFixDec32; - -void WebRtcIsacfix_HighpassFilterFixDec32C(int16_t* io, - int16_t len, - const int16_t* coefficient, - int32_t* state); - -#if defined(MIPS_DSP_R1_LE) -void WebRtcIsacfix_HighpassFilterFixDec32MIPS(int16_t* io, - int16_t len, - const int16_t* coefficient, - int32_t* state); -#endif - -typedef void (*AllpassFilter2FixDec16)( - int16_t* data_ch1, // Input and output in channel 1, in Q0 - int16_t* data_ch2, // Input and output in channel 2, in Q0 - const int16_t* factor_ch1, // Scaling factor for channel 1, in Q15 - const int16_t* factor_ch2, // Scaling factor for channel 2, in Q15 - int length, // Length of the data buffers - int32_t* filter_state_ch1, // Filter state for channel 1, in Q16 - int32_t* filter_state_ch2); // Filter state for channel 2, in Q16 -extern AllpassFilter2FixDec16 WebRtcIsacfix_AllpassFilter2FixDec16; - -void WebRtcIsacfix_AllpassFilter2FixDec16C(int16_t* data_ch1, - int16_t* data_ch2, - const int16_t* factor_ch1, - const int16_t* factor_ch2, - int length, - int32_t* filter_state_ch1, - int32_t* filter_state_ch2); - -#if defined(WEBRTC_HAS_NEON) -void WebRtcIsacfix_AllpassFilter2FixDec16Neon(int16_t* data_ch1, - int16_t* data_ch2, - const int16_t* factor_ch1, - const int16_t* factor_ch2, - int length, - int32_t* filter_state_ch1, - int32_t* filter_state_ch2); -#endif - -#if defined(MIPS_DSP_R1_LE) -void WebRtcIsacfix_AllpassFilter2FixDec16MIPS(int16_t* data_ch1, - int16_t* data_ch2, - const int16_t* factor_ch1, - const int16_t* factor_ch2, - int length, - int32_t* filter_state_ch1, - int32_t* filter_state_ch2); -#endif - -#if defined(__cplusplus) || defined(c_plusplus) -} -#endif - -#endif -/* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_INTERNAL_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.c deleted file mode 100644 index f2dec79c2d0a..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.c +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * filterbank_tables.c - * - * This file contains variables that are used in - * filterbanks.c - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_tables.h" - -/* HPstcoeff_in_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; - * In float, they are: {-1.94895953203325f, 0.94984516000000f, - * -0.05101826139794f, 0.05015484000000f}; - */ -const int16_t WebRtcIsacfix_kHpStCoeffInQ30[8] = { - 16189, -31932, /* Q30 lo/hi pair */ - 17243, 15562, /* Q30 lo/hi pair */ - -17186, -26748, /* Q35 lo/hi pair */ - -27476, 26296 /* Q35 lo/hi pair */ -}; - -/* HPstcoeff_out_1_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; - * In float, they are: {-1.99701049409000f, 0.99714204490000f, - * 0.01701049409000f, -0.01704204490000f}; - */ -const int16_t WebRtcIsacfix_kHPStCoeffOut1Q30[8] = { - -1306, -32719, /* Q30 lo/hi pair */ - 11486, 16337, /* Q30 lo/hi pair */ - 26078, 8918, /* Q35 lo/hi pair */ - 3956, -8935 /* Q35 lo/hi pair */ -}; - -/* HPstcoeff_out_2_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; - * In float, they are: {-1.98645294509837f, 0.98672435560000f, - * 0.00645294509837f, -0.00662435560000f}; - */ -const int16_t WebRtcIsacfix_kHPStCoeffOut2Q30[8] = { - -2953, -32546, /* Q30 lo/hi pair */ - 32233, 16166, /* Q30 lo/hi pair */ - 13217, 3383, /* Q35 lo/hi pair */ - -4597, -3473 /* Q35 lo/hi pair */ -}; - -/* The upper channel all-pass filter factors */ -const int16_t WebRtcIsacfix_kUpperApFactorsQ15[2] = { - 1137, 12537 -}; - -/* The lower channel all-pass filter factors */ -const int16_t WebRtcIsacfix_kLowerApFactorsQ15[2] = { - 5059, 24379 -}; diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.h deleted file mode 100644 index 01e5a7ba8552..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.h +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * filterbank_tables.h - * - * Header file for variables that are defined in - * filterbank_tables.c. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_TABLES_H_ - -#include - -#if defined(__cplusplus) || defined(c_plusplus) -extern "C" { -#endif - -/********************* Coefficient Tables ************************/ - -/* HPstcoeff_in_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */ -/* [Q30lo Q30hi Q30lo Q30hi Q35lo Q35hi Q35lo Q35hi] */ -extern const int16_t WebRtcIsacfix_kHpStCoeffInQ30[8]; - -/* HPstcoeff_out_1_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */ -/* [Q30lo Q30hi Q30lo Q30hi Q35lo Q35hi Q35lo Q35hi] */ -extern const int16_t WebRtcIsacfix_kHPStCoeffOut1Q30[8]; - -/* HPstcoeff_out_2_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */ -/* [Q30lo Q30hi Q30lo Q30hi Q35lo Q35hi Q35lo Q35hi] */ -extern const int16_t WebRtcIsacfix_kHPStCoeffOut2Q30[8]; - -/* The upper channel all-pass filter factors */ -extern const int16_t WebRtcIsacfix_kUpperApFactorsQ15[2]; - -/* The lower channel all-pass filter factors */ -extern const int16_t WebRtcIsacfix_kLowerApFactorsQ15[2]; - -#if defined(__cplusplus) || defined(c_plusplus) -} -#endif - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_TABLES_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c deleted file mode 100644 index 57b3e70b89cb..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * filterbanks.c - * - * This file contains function - * WebRtcIsacfix_SplitAndFilter, and WebRtcIsacfix_FilterAndCombine - * which implement filterbanks that produce decimated lowpass and - * highpass versions of a signal, and performs reconstruction. - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h" - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "rtc_base/checks.h" - -// Declare a function pointer. -AllpassFilter2FixDec16 WebRtcIsacfix_AllpassFilter2FixDec16; - -void WebRtcIsacfix_AllpassFilter2FixDec16C( - int16_t *data_ch1, // Input and output in channel 1, in Q0 - int16_t *data_ch2, // Input and output in channel 2, in Q0 - const int16_t *factor_ch1, // Scaling factor for channel 1, in Q15 - const int16_t *factor_ch2, // Scaling factor for channel 2, in Q15 - const int length, // Length of the data buffers - int32_t *filter_state_ch1, // Filter state for channel 1, in Q16 - int32_t *filter_state_ch2) { // Filter state for channel 2, in Q16 - int n = 0; - int32_t state0_ch1 = filter_state_ch1[0], state1_ch1 = filter_state_ch1[1]; - int32_t state0_ch2 = filter_state_ch2[0], state1_ch2 = filter_state_ch2[1]; - int16_t in_out = 0; - int32_t a = 0, b = 0; - - // Assembly file assumption. - RTC_DCHECK_EQ(0, length % 2); - - for (n = 0; n < length; n++) { - // Process channel 1: - in_out = data_ch1[n]; - a = factor_ch1[0] * in_out; // Q15 * Q0 = Q15 - a *= 1 << 1; // Q15 -> Q16 - b = WebRtcSpl_AddSatW32(a, state0_ch1); - a = -factor_ch1[0] * (int16_t)(b >> 16); // Q15 - state0_ch1 = - WebRtcSpl_AddSatW32(a * (1 << 1), (int32_t)in_out * (1 << 16)); // Q16 - in_out = (int16_t) (b >> 16); // Save as Q0 - - a = factor_ch1[1] * in_out; // Q15 * Q0 = Q15 - a *= 1 << 1; // Q15 -> Q16 - b = WebRtcSpl_AddSatW32(a, state1_ch1); // Q16 - a = -factor_ch1[1] * (int16_t)(b >> 16); // Q15 - state1_ch1 = - WebRtcSpl_AddSatW32(a * (1 << 1), (int32_t)in_out * (1 << 16)); // Q16 - data_ch1[n] = (int16_t) (b >> 16); // Save as Q0 - - // Process channel 2: - in_out = data_ch2[n]; - a = factor_ch2[0] * in_out; // Q15 * Q0 = Q15 - a *= 1 << 1; // Q15 -> Q16 - b = WebRtcSpl_AddSatW32(a, state0_ch2); // Q16 - a = -factor_ch2[0] * (int16_t)(b >> 16); // Q15 - state0_ch2 = - WebRtcSpl_AddSatW32(a * (1 << 1), (int32_t)in_out * (1 << 16)); // Q16 - in_out = (int16_t) (b >> 16); // Save as Q0 - - a = factor_ch2[1] * in_out; // Q15 * Q0 = Q15 - a *= (1 << 1); // Q15 -> Q16 - b = WebRtcSpl_AddSatW32(a, state1_ch2); // Q16 - a = -factor_ch2[1] * (int16_t)(b >> 16); // Q15 - state1_ch2 = - WebRtcSpl_AddSatW32(a * (1 << 1), (int32_t)in_out * (1 << 16)); // Q16 - data_ch2[n] = (int16_t) (b >> 16); // Save as Q0 - } - - filter_state_ch1[0] = state0_ch1; - filter_state_ch1[1] = state1_ch1; - filter_state_ch2[0] = state0_ch2; - filter_state_ch2[1] = state1_ch2; -} - -// Declare a function pointer. -HighpassFilterFixDec32 WebRtcIsacfix_HighpassFilterFixDec32; - -void WebRtcIsacfix_HighpassFilterFixDec32C(int16_t *io, - int16_t len, - const int16_t *coefficient, - int32_t *state) -{ - int k; - int32_t a1 = 0, b1 = 0, c = 0, in = 0; - int32_t a2 = 0, b2 = 0; - int32_t state0 = state[0]; - int32_t state1 = state[1]; - - for (k=0; k Q7 */ - a1 = WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[5], state0) + - (WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[4], state0) >> 16); - b1 = WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[7], state1) + - (WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[6], state1) >> 16); - - /* Q30 * Q4 = Q34 ; shift 32 bit => Q2 */ - a2 = WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[1], state0) + - (WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[0], state0) >> 16); - b2 = WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[3], state1) + - (WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[2], state1) >> 16); -#endif - - c = in + ((a1 + b1) >> 7); // Q0. - io[k] = (int16_t)WebRtcSpl_SatW32ToW16(c); // Write output as Q0. - - c = in * (1 << 2) - a2 - b2; // In Q2. - c = (int32_t)WEBRTC_SPL_SAT(536870911, c, -536870912); - - state1 = state0; - state0 = c * (1 << 2); // Write state as Q4 - } - state[0] = state0; - state[1] = state1; -} - - -void WebRtcIsacfix_SplitAndFilter1(int16_t *pin, - int16_t *LP16, - int16_t *HP16, - PreFiltBankstr *prefiltdata) -{ - /* Function WebRtcIsacfix_SplitAndFilter */ - /* This function creates low-pass and high-pass decimated versions of part of - the input signal, and part of the signal in the input 'lookahead buffer'. */ - - int k; - - int16_t tempin_ch1[FRAMESAMPLES/2 + QLOOKAHEAD]; - int16_t tempin_ch2[FRAMESAMPLES/2 + QLOOKAHEAD]; - int32_t tmpState_ch1[2 * (QORDER-1)]; /* 4 */ - int32_t tmpState_ch2[2 * (QORDER-1)]; /* 4 */ - - /* High pass filter */ - WebRtcIsacfix_HighpassFilterFixDec32(pin, FRAMESAMPLES, WebRtcIsacfix_kHpStCoeffInQ30, prefiltdata->HPstates_fix); - - - /* First Channel */ - for (k=0;kINLABUF1_fix[k]; - prefiltdata->INLABUF1_fix[k] = pin[FRAMESAMPLES + 1 - 2 * (QLOOKAHEAD - k)]; - } - - /* Second Channel. This is exactly like the first channel, except that the - even samples are now filtered instead (lower channel). */ - for (k=0;kINLABUF2_fix[k]; - prefiltdata->INLABUF2_fix[k] = pin[FRAMESAMPLES - 2 * (QLOOKAHEAD - k)]; - } - - - /*obtain polyphase components by forward all-pass filtering through each channel */ - /* The all pass filtering automatically updates the filter states which are exported in the - prefiltdata structure */ - WebRtcIsacfix_AllpassFilter2FixDec16(tempin_ch1, - tempin_ch2, - WebRtcIsacfix_kUpperApFactorsQ15, - WebRtcIsacfix_kLowerApFactorsQ15, - FRAMESAMPLES/2, - prefiltdata->INSTAT1_fix, - prefiltdata->INSTAT2_fix); - - for (k = 0; k < 2 * (QORDER - 1); k++) { - tmpState_ch1[k] = prefiltdata->INSTAT1_fix[k]; - tmpState_ch2[k] = prefiltdata->INSTAT2_fix[k]; - } - WebRtcIsacfix_AllpassFilter2FixDec16(tempin_ch1 + FRAMESAMPLES/2, - tempin_ch2 + FRAMESAMPLES/2, - WebRtcIsacfix_kUpperApFactorsQ15, - WebRtcIsacfix_kLowerApFactorsQ15, - QLOOKAHEAD, - tmpState_ch1, - tmpState_ch2); - - /* Now Construct low-pass and high-pass signals as combinations of polyphase components */ - for (k=0; k Q0 - tmp2 = (int32_t)tempin_ch2[k]; // Q0 -> Q0 - tmp3 = (tmp1 + tmp2) >> 1; /* Low pass signal. */ - LP16[k] = (int16_t)WebRtcSpl_SatW32ToW16(tmp3); /*low pass */ - tmp3 = (tmp1 - tmp2) >> 1; /* High pass signal. */ - HP16[k] = (int16_t)WebRtcSpl_SatW32ToW16(tmp3); /*high pass */ - } - -}/*end of WebRtcIsacfix_SplitAndFilter */ - - - -////////////////////////////////////////////////////////// -////////// Combining -/* Function WebRtcIsacfix_FilterAndCombine */ -/* This is a decoder function that takes the decimated - length FRAMESAMPLES/2 input low-pass and - high-pass signals and creates a reconstructed fullband - output signal of length FRAMESAMPLES. WebRtcIsacfix_FilterAndCombine - is the sibling function of WebRtcIsacfix_SplitAndFilter */ -/* INPUTS: - inLP: a length FRAMESAMPLES/2 array of input low-pass - samples. - inHP: a length FRAMESAMPLES/2 array of input high-pass - samples. - postfiltdata: input data structure containing the filterbank - states from the previous decoding iteration. - OUTPUTS: - Out: a length FRAMESAMPLES array of output reconstructed - samples (fullband) based on the input low-pass and - high-pass signals. - postfiltdata: the input data structure containing the filterbank - states is updated for the next decoding iteration */ -void WebRtcIsacfix_FilterAndCombine1(int16_t *tempin_ch1, - int16_t *tempin_ch2, - int16_t *out16, - PostFiltBankstr *postfiltdata) -{ - int k; - int16_t in[FRAMESAMPLES]; - - /* all-pass filter the new upper and lower channel signal. - For upper channel, use the all-pass filter factors that were used as a - lower channel at the encoding side. So at the decoder, the corresponding - all-pass filter factors for each channel are swapped. - For lower channel signal, since all-pass filter factors at the decoder are - swapped from the ones at the encoder, the 'upper' channel all-pass filter - factors (kUpperApFactors) are used to filter this new lower channel signal. - */ - WebRtcIsacfix_AllpassFilter2FixDec16(tempin_ch1, - tempin_ch2, - WebRtcIsacfix_kLowerApFactorsQ15, - WebRtcIsacfix_kUpperApFactorsQ15, - FRAMESAMPLES/2, - postfiltdata->STATE_0_UPPER_fix, - postfiltdata->STATE_0_LOWER_fix); - - /* Merge outputs to form the full length output signal.*/ - for (k=0;kHPstates1_fix); - WebRtcIsacfix_HighpassFilterFixDec32(in, FRAMESAMPLES, WebRtcIsacfix_kHPStCoeffOut2Q30, postfiltdata->HPstates2_fix); - - for (k=0;k - -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h" -#include "rtc_base/checks.h" - -void WebRtcIsacfix_AllpassFilter2FixDec16Neon( - int16_t* data_ch1, // Input and output in channel 1, in Q0 - int16_t* data_ch2, // Input and output in channel 2, in Q0 - const int16_t* factor_ch1, // Scaling factor for channel 1, in Q15 - const int16_t* factor_ch2, // Scaling factor for channel 2, in Q15 - const int length, // Length of the data buffers - int32_t* filter_state_ch1, // Filter state for channel 1, in Q16 - int32_t* filter_state_ch2) { // Filter state for channel 2, in Q16 - RTC_DCHECK_EQ(0, length % 2); - int n = 0; - int16x4_t factorv; - int16x4_t datav; - int32x4_t statev; - - // Load factor_ch1 and factor_ch2. - factorv = vld1_dup_s16(factor_ch1); - factorv = vld1_lane_s16(factor_ch1 + 1, factorv, 1); - factorv = vld1_lane_s16(factor_ch2, factorv, 2); - factorv = vld1_lane_s16(factor_ch2 + 1, factorv, 3); - - // Load filter_state_ch1[0] and filter_state_ch2[0]. - statev = vld1q_dup_s32(filter_state_ch1); - statev = vld1q_lane_s32(filter_state_ch2, statev, 2); - - // Loop unrolling preprocessing. - int32x4_t a; - int16x4_t tmp1, tmp2; - - // Load data_ch1[0] and data_ch2[0]. - datav = vld1_dup_s16(data_ch1); - datav = vld1_lane_s16(data_ch2, datav, 2); - - a = vqdmlal_s16(statev, datav, factorv); - tmp1 = vshrn_n_s32(a, 16); - - // Update filter_state_ch1[0] and filter_state_ch2[0]. - statev = vqdmlsl_s16(vshll_n_s16(datav, 16), tmp1, factorv); - - // Load filter_state_ch1[1] and filter_state_ch2[1]. - statev = vld1q_lane_s32(filter_state_ch1 + 1, statev, 1); - statev = vld1q_lane_s32(filter_state_ch2 + 1, statev, 3); - - // Load data_ch1[1] and data_ch2[1]. - tmp1 = vld1_lane_s16(data_ch1 + 1, tmp1, 1); - tmp1 = vld1_lane_s16(data_ch2 + 1, tmp1, 3); - datav = vrev32_s16(tmp1); - - // Loop unrolling processing. - for (n = 0; n < length - 2; n += 2) { - a = vqdmlal_s16(statev, datav, factorv); - tmp1 = vshrn_n_s32(a, 16); - // Store data_ch1[n] and data_ch2[n]. - vst1_lane_s16(data_ch1 + n, tmp1, 1); - vst1_lane_s16(data_ch2 + n, tmp1, 3); - - // Update filter_state_ch1[0], filter_state_ch1[1] - // and filter_state_ch2[0], filter_state_ch2[1]. - statev = vqdmlsl_s16(vshll_n_s16(datav, 16), tmp1, factorv); - - // Load data_ch1[n + 2] and data_ch2[n + 2]. - tmp1 = vld1_lane_s16(data_ch1 + n + 2, tmp1, 1); - tmp1 = vld1_lane_s16(data_ch2 + n + 2, tmp1, 3); - datav = vrev32_s16(tmp1); - - a = vqdmlal_s16(statev, datav, factorv); - tmp2 = vshrn_n_s32(a, 16); - // Store data_ch1[n + 1] and data_ch2[n + 1]. - vst1_lane_s16(data_ch1 + n + 1, tmp2, 1); - vst1_lane_s16(data_ch2 + n + 1, tmp2, 3); - - // Update filter_state_ch1[0], filter_state_ch1[1] - // and filter_state_ch2[0], filter_state_ch2[1]. - statev = vqdmlsl_s16(vshll_n_s16(datav, 16), tmp2, factorv); - - // Load data_ch1[n + 3] and data_ch2[n + 3]. - tmp2 = vld1_lane_s16(data_ch1 + n + 3, tmp2, 1); - tmp2 = vld1_lane_s16(data_ch2 + n + 3, tmp2, 3); - datav = vrev32_s16(tmp2); - } - - // Loop unrolling post-processing. - a = vqdmlal_s16(statev, datav, factorv); - tmp1 = vshrn_n_s32(a, 16); - // Store data_ch1[n] and data_ch2[n]. - vst1_lane_s16(data_ch1 + n, tmp1, 1); - vst1_lane_s16(data_ch2 + n, tmp1, 3); - - // Update filter_state_ch1[0], filter_state_ch1[1] - // and filter_state_ch2[0], filter_state_ch2[1]. - statev = vqdmlsl_s16(vshll_n_s16(datav, 16), tmp1, factorv); - // Store filter_state_ch1[0] and filter_state_ch2[0]. - vst1q_lane_s32(filter_state_ch1, statev, 0); - vst1q_lane_s32(filter_state_ch2, statev, 2); - - datav = vrev32_s16(tmp1); - a = vqdmlal_s16(statev, datav, factorv); - tmp2 = vshrn_n_s32(a, 16); - // Store data_ch1[n + 1] and data_ch2[n + 1]. - vst1_lane_s16(data_ch1 + n + 1, tmp2, 1); - vst1_lane_s16(data_ch2 + n + 1, tmp2, 3); - - // Update filter_state_ch1[1] and filter_state_ch2[1]. - statev = vqdmlsl_s16(vshll_n_s16(datav, 16), tmp2, factorv); - // Store filter_state_ch1[1] and filter_state_ch2[1]. - vst1q_lane_s32(filter_state_ch1 + 1, statev, 1); - vst1q_lane_s32(filter_state_ch2 + 1, statev, 3); -} - -// This function is the prototype for above neon optimized function. -//void AllpassFilter2FixDec16BothChannels( -// int16_t *data_ch1, // Input and output in channel 1, in Q0 -// int16_t *data_ch2, // Input and output in channel 2, in Q0 -// const int16_t *factor_ch1, // Scaling factor for channel 1, in Q15 -// const int16_t *factor_ch2, // Scaling factor for channel 2, in Q15 -// const int length, // Length of the data buffers -// int32_t *filter_state_ch1, // Filter state for channel 1, in Q16 -// int32_t *filter_state_ch2) { // Filter state for channel 2, in Q16 -// int n = 0; -// int32_t state0_ch1 = filter_state_ch1[0], state1_ch1 = filter_state_ch1[1]; -// int32_t state0_ch2 = filter_state_ch2[0], state1_ch2 = filter_state_ch2[1]; -// int16_t sample0_ch1 = 0, sample0_ch2 = 0; -// int16_t sample1_ch1 = 0, sample1_ch2 = 0; -// int32_t a0_ch1 = 0, a0_ch2 = 0; -// int32_t b0_ch1 = 0, b0_ch2 = 0; -// -// int32_t a1_ch1 = 0, a1_ch2 = 0; -// int32_t b1_ch1 = 0, b1_ch2 = 0; -// int32_t b2_ch1 = 0, b2_ch2 = 0; -// -// // Loop unrolling preprocessing. -// -// sample0_ch1 = data_ch1[n]; -// sample0_ch2 = data_ch2[n]; -// -// a0_ch1 = (factor_ch1[0] * sample0_ch1) << 1; -// a0_ch2 = (factor_ch2[0] * sample0_ch2) << 1; -// -// b0_ch1 = WebRtcSpl_AddSatW32(a0_ch1, state0_ch1); -// b0_ch2 = WebRtcSpl_AddSatW32(a0_ch2, state0_ch2); //Q16+Q16=Q16 -// -// a0_ch1 = -factor_ch1[0] * (int16_t)(b0_ch1 >> 16); -// a0_ch2 = -factor_ch2[0] * (int16_t)(b0_ch2 >> 16); -// -// state0_ch1 = WebRtcSpl_AddSatW32(a0_ch1 <<1, (uint32_t)sample0_ch1 << 16); -// state0_ch2 = WebRtcSpl_AddSatW32(a0_ch2 <<1, (uint32_t)sample0_ch2 << 16); -// -// sample1_ch1 = data_ch1[n + 1]; -// sample0_ch1 = (int16_t) (b0_ch1 >> 16); //Save as Q0 -// sample1_ch2 = data_ch2[n + 1]; -// sample0_ch2 = (int16_t) (b0_ch2 >> 16); //Save as Q0 -// -// -// for (n = 0; n < length - 2; n += 2) { -// a1_ch1 = (factor_ch1[0] * sample1_ch1) << 1; -// a0_ch1 = (factor_ch1[1] * sample0_ch1) << 1; -// a1_ch2 = (factor_ch2[0] * sample1_ch2) << 1; -// a0_ch2 = (factor_ch2[1] * sample0_ch2) << 1; -// -// b1_ch1 = WebRtcSpl_AddSatW32(a1_ch1, state0_ch1); -// b0_ch1 = WebRtcSpl_AddSatW32(a0_ch1, state1_ch1); //Q16+Q16=Q16 -// b1_ch2 = WebRtcSpl_AddSatW32(a1_ch2, state0_ch2); //Q16+Q16=Q16 -// b0_ch2 = WebRtcSpl_AddSatW32(a0_ch2, state1_ch2); //Q16+Q16=Q16 -// -// a1_ch1 = -factor_ch1[0] * (int16_t)(b1_ch1 >> 16); -// a0_ch1 = -factor_ch1[1] * (int16_t)(b0_ch1 >> 16); -// a1_ch2 = -factor_ch2[0] * (int16_t)(b1_ch2 >> 16); -// a0_ch2 = -factor_ch2[1] * (int16_t)(b0_ch2 >> 16); -// -// state0_ch1 = WebRtcSpl_AddSatW32(a1_ch1<<1, (uint32_t)sample1_ch1 <<16); -// state1_ch1 = WebRtcSpl_AddSatW32(a0_ch1<<1, (uint32_t)sample0_ch1 <<16); -// state0_ch2 = WebRtcSpl_AddSatW32(a1_ch2<<1, (uint32_t)sample1_ch2 <<16); -// state1_ch2 = WebRtcSpl_AddSatW32(a0_ch2<<1, (uint32_t)sample0_ch2 <<16); -// -// sample0_ch1 = data_ch1[n + 2]; -// sample1_ch1 = (int16_t) (b1_ch1 >> 16); //Save as Q0 -// sample0_ch2 = data_ch2[n + 2]; -// sample1_ch2 = (int16_t) (b1_ch2 >> 16); //Save as Q0 -// -// a0_ch1 = (factor_ch1[0] * sample0_ch1) << 1; -// a1_ch1 = (factor_ch1[1] * sample1_ch1) << 1; -// a0_ch2 = (factor_ch2[0] * sample0_ch2) << 1; -// a1_ch2 = (factor_ch2[1] * sample1_ch2) << 1; -// -// b2_ch1 = WebRtcSpl_AddSatW32(a0_ch1, state0_ch1); -// b1_ch1 = WebRtcSpl_AddSatW32(a1_ch1, state1_ch1); //Q16+Q16=Q16 -// b2_ch2 = WebRtcSpl_AddSatW32(a0_ch2, state0_ch2); //Q16+Q16=Q16 -// b1_ch2 = WebRtcSpl_AddSatW32(a1_ch2, state1_ch2); //Q16+Q16=Q16 -// -// a0_ch1 = -factor_ch1[0] * (int16_t)(b2_ch1 >> 16); -// a1_ch1 = -factor_ch1[1] * (int16_t)(b1_ch1 >> 16); -// a0_ch2 = -factor_ch2[0] * (int16_t)(b2_ch2 >> 16); -// a1_ch2 = -factor_ch2[1] * (int16_t)(b1_ch2 >> 16); -// -// state0_ch1 = WebRtcSpl_AddSatW32(a0_ch1<<1, (uint32_t)sample0_ch1<<16); -// state1_ch1 = WebRtcSpl_AddSatW32(a1_ch1<<1, (uint32_t)sample1_ch1<<16); -// state0_ch2 = WebRtcSpl_AddSatW32(a0_ch2<<1, (uint32_t)sample0_ch2<<16); -// state1_ch2 = WebRtcSpl_AddSatW32(a1_ch2<<1, (uint32_t)sample1_ch2<<16); -// -// -// sample1_ch1 = data_ch1[n + 3]; -// sample0_ch1 = (int16_t) (b2_ch1 >> 16); //Save as Q0 -// sample1_ch2 = data_ch2[n + 3]; -// sample0_ch2 = (int16_t) (b2_ch2 >> 16); //Save as Q0 -// -// data_ch1[n] = (int16_t) (b0_ch1 >> 16); //Save as Q0 -// data_ch1[n + 1] = (int16_t) (b1_ch1 >> 16); //Save as Q0 -// data_ch2[n] = (int16_t) (b0_ch2 >> 16); -// data_ch2[n + 1] = (int16_t) (b1_ch2 >> 16); -// } -// -// // Loop unrolling post-processing. -// -// a1_ch1 = (factor_ch1[0] * sample1_ch1) << 1; -// a0_ch1 = (factor_ch1[1] * sample0_ch1) << 1; -// a1_ch2 = (factor_ch2[0] * sample1_ch2) << 1; -// a0_ch2 = (factor_ch2[1] * sample0_ch2) << 1; -// -// b1_ch1 = WebRtcSpl_AddSatW32(a1_ch1, state0_ch1); -// b0_ch1 = WebRtcSpl_AddSatW32(a0_ch1, state1_ch1); -// b1_ch2 = WebRtcSpl_AddSatW32(a1_ch2, state0_ch2); -// b0_ch2 = WebRtcSpl_AddSatW32(a0_ch2, state1_ch2); -// -// a1_ch1 = -factor_ch1[0] * (int16_t)(b1_ch1 >> 16); -// a0_ch1 = -factor_ch1[1] * (int16_t)(b0_ch1 >> 16); -// a1_ch2 = -factor_ch2[0] * (int16_t)(b1_ch2 >> 16); -// a0_ch2 = -factor_ch2[1] * (int16_t)(b0_ch2 >> 16); -// -// state0_ch1 = WebRtcSpl_AddSatW32(a1_ch1<<1, (uint32_t)sample1_ch1 << 16); -// state1_ch1 = WebRtcSpl_AddSatW32(a0_ch1<<1, (uint32_t)sample0_ch1 << 16); -// state0_ch2 = WebRtcSpl_AddSatW32(a1_ch2<<1, (uint32_t)sample1_ch2 << 16); -// state1_ch2 = WebRtcSpl_AddSatW32(a0_ch2<<1, (uint32_t)sample0_ch2 << 16); -// -// data_ch1[n] = (int16_t) (b0_ch1 >> 16); //Save as Q0 -// data_ch2[n] = (int16_t) (b0_ch2 >> 16); -// -// sample1_ch1 = (int16_t) (b1_ch1 >> 16); //Save as Q0 -// sample1_ch2 = (int16_t) (b1_ch2 >> 16); //Save as Q0 -// -// a1_ch1 = (factor_ch1[1] * sample1_ch1) << 1; -// a1_ch2 = (factor_ch2[1] * sample1_ch2) << 1; -// -// b1_ch1 = WebRtcSpl_AddSatW32(a1_ch1, state1_ch1); //Q16+Q16=Q16 -// b1_ch2 = WebRtcSpl_AddSatW32(a1_ch2, state1_ch2); //Q16+Q16=Q16 -// -// a1_ch1 = -factor_ch1[1] * (int16_t)(b1_ch1 >> 16); -// a1_ch2 = -factor_ch2[1] * (int16_t)(b1_ch2 >> 16); -// -// state1_ch1 = WebRtcSpl_AddSatW32(a1_ch1<<1, (uint32_t)sample1_ch1<<16); -// state1_ch2 = WebRtcSpl_AddSatW32(a1_ch2<<1, (uint32_t)sample1_ch2<<16); -// -// data_ch1[n + 1] = (int16_t) (b1_ch1 >> 16); //Save as Q0 -// data_ch2[n + 1] = (int16_t) (b1_ch2 >> 16); -// -// filter_state_ch1[0] = state0_ch1; -// filter_state_ch1[1] = state1_ch1; -// filter_state_ch2[0] = state0_ch2; -// filter_state_ch2[1] = state1_ch2; -//} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc deleted file mode 100644 index 4a3db2324a13..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h" -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "rtc_base/sanitizer.h" -#include "system_wrappers/include/cpu_features_wrapper.h" -#include "test/gtest.h" - -class FilterBanksTest : public ::testing::Test { - protected: - // Pass a function pointer to the Tester function. - void RTC_NO_SANITIZE("signed-integer-overflow") // bugs.webrtc.org/5513 - CalculateResidualEnergyTester( - AllpassFilter2FixDec16 AllpassFilter2FixDec16Function) { - const int kSamples = QLOOKAHEAD; - const int kState = 2; - int16_t data_ch1[kSamples] = {0}; - int16_t data_ch2[kSamples] = {0}; - int32_t state_ch1[kState] = {0}; - int32_t state_ch2[kState] = {0}; - const int32_t out_state_ch1[kState] = {-809122714, 1645972152}; - const int32_t out_state_ch2[kState] = {428019288, 1057309936}; - const int32_t out_data_ch1[kSamples] = { - 0, 0, 347, 10618, 16718, -7089, 32767, 16913, - 27042, 8377, -22973, -28372, -27603, -14804, 398, -25332, - -11200, 18044, 25223, -6839, 1116, -23984, 32717, 7364}; - const int32_t out_data_ch2[kSamples] = { - 0, 0, 3010, 22351, 21106, 16969, -2095, -664, - 3513, -30980, 32767, -23839, 13335, 20289, -6831, 339, - -17207, 32767, 4959, 6177, 32767, 16599, -4747, 20504}; - int sign = 1; - - for (int i = 0; i < kSamples; i++) { - sign *= -1; - data_ch1[i] = sign * WEBRTC_SPL_WORD32_MAX / (i * i + 1); - data_ch2[i] = sign * WEBRTC_SPL_WORD32_MIN / (i * i + 1); - // UBSan: -1 * -2147483648 cannot be represented in type 'int' - }; - - AllpassFilter2FixDec16Function( - data_ch1, data_ch2, WebRtcIsacfix_kUpperApFactorsQ15, - WebRtcIsacfix_kLowerApFactorsQ15, kSamples, state_ch1, state_ch2); - - for (int i = 0; i < kSamples; i++) { - EXPECT_EQ(out_data_ch1[i], data_ch1[i]); - EXPECT_EQ(out_data_ch2[i], data_ch2[i]); - } - for (int i = 0; i < kState; i++) { - EXPECT_EQ(out_state_ch1[i], state_ch1[i]); - EXPECT_EQ(out_state_ch2[i], state_ch2[i]); - } - } -}; - -TEST_F(FilterBanksTest, AllpassFilter2FixDec16Test) { - CalculateResidualEnergyTester(WebRtcIsacfix_AllpassFilter2FixDec16C); -#if defined(WEBRTC_HAS_NEON) - CalculateResidualEnergyTester(WebRtcIsacfix_AllpassFilter2FixDec16Neon); -#endif -} - -TEST_F(FilterBanksTest, HighpassFilterFixDec32Test) { - const int kSamples = 20; - int16_t in[kSamples]; - int32_t state[2] = {12345, 987654}; -#ifdef WEBRTC_ARCH_ARM_V7 - int32_t out[kSamples] = {-1040, -1035, -22875, -1397, -27604, 20018, 7917, - -1279, -8552, -14494, -7558, -23537, -27258, -30554, - -32768, -3432, -32768, 25215, -27536, 22436}; -#else - int32_t out[kSamples] = {-1040, -1035, -22875, -1397, -27604, 20017, 7915, - -1280, -8554, -14496, -7561, -23541, -27263, -30560, - -32768, -3441, -32768, 25203, -27550, 22419}; -#endif - HighpassFilterFixDec32 WebRtcIsacfix_HighpassFilterFixDec32; -#if defined(MIPS_DSP_R1_LE) - WebRtcIsacfix_HighpassFilterFixDec32 = - WebRtcIsacfix_HighpassFilterFixDec32MIPS; -#else - WebRtcIsacfix_HighpassFilterFixDec32 = WebRtcIsacfix_HighpassFilterFixDec32C; -#endif - - for (int i = 0; i < kSamples; i++) { - in[i] = WEBRTC_SPL_WORD32_MAX / (i + 1); - } - - WebRtcIsacfix_HighpassFilterFixDec32(in, kSamples, - WebRtcIsacfix_kHPStCoeffOut1Q30, state); - - for (int i = 0; i < kSamples; i++) { - EXPECT_EQ(out[i], in[i]); - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filters.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filters.c deleted file mode 100644 index 838ba4b3e88f..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filters.c +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/checks.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" - -// Autocorrelation function in fixed point. -// NOTE! Different from SPLIB-version in how it scales the signal. -int WebRtcIsacfix_AutocorrC(int32_t* __restrict r, - const int16_t* __restrict x, - int16_t N, - int16_t order, - int16_t* __restrict scale) { - int i = 0; - int j = 0; - int16_t scaling = 0; - int32_t sum = 0; - uint32_t temp = 0; - int64_t prod = 0; - - // The ARM assembly code assumptoins. - RTC_DCHECK_EQ(0, N % 4); - RTC_DCHECK_GE(N, 8); - - // Calculate r[0]. - for (i = 0; i < N; i++) { - prod += x[i] * x[i]; - } - - // Calculate scaling (the value of shifting). - temp = (uint32_t)(prod >> 31); - if(temp == 0) { - scaling = 0; - } else { - scaling = 32 - WebRtcSpl_NormU32(temp); - } - r[0] = (int32_t)(prod >> scaling); - - // Perform the actual correlation calculation. - for (i = 1; i < order + 1; i++) { - prod = 0; - for (j = 0; j < N - i; j++) { - prod += x[j] * x[i + j]; - } - sum = (int32_t)(prod >> scaling); - r[i] = sum; - } - - *scale = scaling; - - return(order + 1); -} - -static const int32_t kApUpperQ15[ALLPASSSECTIONS] = { 1137, 12537 }; -static const int32_t kApLowerQ15[ALLPASSSECTIONS] = { 5059, 24379 }; - - -static void AllpassFilterForDec32(int16_t *InOut16, //Q0 - const int32_t *APSectionFactors, //Q15 - int16_t lengthInOut, - int32_t *FilterState) //Q16 -{ - int n, j; - int32_t a, b; - - for (j=0; j Q16 - b = WebRtcSpl_AddSatW32(a, FilterState[j]); //Q16+Q16=Q16 - // `a` in Q15 (Q0*Q31=Q31 shifted 16 gives Q15). - a = WEBRTC_SPL_MUL_16_32_RSFT16(b >> 16, -APSectionFactors[j]); - // FilterState[j]: Q15<<1 + Q0<<16 = Q16 + Q16 = Q16 - FilterState[j] = WebRtcSpl_AddSatW32(a << 1, (uint32_t)InOut16[n] << 16); - InOut16[n] = (int16_t)(b >> 16); // Save as Q0. - } - } -} - - - - -void WebRtcIsacfix_DecimateAllpass32(const int16_t *in, - int32_t *state_in, /* array of size: 2*ALLPASSSECTIONS+1 */ - int16_t N, /* number of input samples */ - int16_t *out) /* array of size N/2 */ -{ - int n; - int16_t data_vec[PITCH_FRAME_LEN]; - - /* copy input */ - memcpy(data_vec + 1, in, sizeof(int16_t) * (N - 1)); - - data_vec[0] = (int16_t)(state_in[2 * ALLPASSSECTIONS] >> 16); // z^-1 state. - state_in[2 * ALLPASSSECTIONS] = (uint32_t)in[N - 1] << 16; - - - - AllpassFilterForDec32(data_vec+1, kApUpperQ15, N, state_in); - AllpassFilterForDec32(data_vec, kApLowerQ15, N, state_in+ALLPASSSECTIONS); - - for (n=0;n> 3); - int count = (int)(N & 7); - // Declare temporary variables used as registry values. - int32_t r0, r1, r2, r3; -#if !defined(MIPS_DSP_R2_LE) - // For non-DSPR2 optimizations 4 more registers are used. - int32_t r4, r5, r6, r7; -#endif - - // Calculate r[0] and scaling needed. - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "mult $0, $0 \n\t" - // Loop is unrolled 8 times, set accumulator to zero in branch delay slot. - "beqz %[loop_size], 2f \n\t" - " mult $0, $0 \n\t" - "1: \n\t" - // Load 8 samples per loop iteration. -#if defined(MIPS_DSP_R2_LE) - "ulw %[r0], 0(%[in]) \n\t" - "ulw %[r1], 4(%[in]) \n\t" - "ulw %[r2], 8(%[in]) \n\t" - "ulw %[r3], 12(%[in]) \n\t" -#else - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 2(%[in]) \n\t" - "lh %[r2], 4(%[in]) \n\t" - "lh %[r3], 6(%[in]) \n\t" - "lh %[r4], 8(%[in]) \n\t" - "lh %[r5], 10(%[in]) \n\t" - "lh %[r6], 12(%[in]) \n\t" - "lh %[r7], 14(%[in]) \n\t" -#endif - "addiu %[loop_size], %[loop_size], -1 \n\t" - // Multiply and accumulate. -#if defined(MIPS_DSP_R2_LE) - "dpa.w.ph $ac0, %[r0], %[r0] \n\t" - "dpa.w.ph $ac0, %[r1], %[r1] \n\t" - "dpa.w.ph $ac0, %[r2], %[r2] \n\t" - "dpa.w.ph $ac0, %[r3], %[r3] \n\t" -#else - "madd %[r0], %[r0] \n\t" - "madd %[r1], %[r1] \n\t" - "madd %[r2], %[r2] \n\t" - "madd %[r3], %[r3] \n\t" - "madd %[r4], %[r4] \n\t" - "madd %[r5], %[r5] \n\t" - "madd %[r6], %[r6] \n\t" - "madd %[r7], %[r7] \n\t" -#endif - "bnez %[loop_size], 1b \n\t" - " addiu %[in], %[in], 16 \n\t" - "2: \n\t" - "beqz %[count], 4f \n\t" -#if defined(MIPS_DSP_R1_LE) - " extr.w %[r0], $ac0, 31 \n\t" -#else - " mfhi %[r2] \n\t" -#endif - // Process remaining samples (if any). - "3: \n\t" - "lh %[r0], 0(%[in]) \n\t" - "addiu %[count], %[count], -1 \n\t" - "madd %[r0], %[r0] \n\t" - "bnez %[count], 3b \n\t" - " addiu %[in], %[in], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "extr.w %[r0], $ac0, 31 \n\t" -#else - "mfhi %[r2] \n\t" -#endif - "4: \n\t" -#if !defined(MIPS_DSP_R1_LE) - "mflo %[r3] \n\t" - "sll %[r0], %[r2], 1 \n\t" - "srl %[r1], %[r3], 31 \n\t" - "addu %[r0], %[r0], %[r1] \n\t" -#endif - // Calculate scaling (the value of shifting). - "clz %[r1], %[r0] \n\t" - "addiu %[r1], %[r1], -32 \n\t" - "subu %[scaling], $0, %[r1] \n\t" - "slti %[r1], %[r0], 0x1 \n\t" - "movn %[scaling], $0, %[r1] \n\t" -#if defined(MIPS_DSP_R1_LE) - "extrv.w %[r0], $ac0, %[scaling] \n\t" - "mfhi %[r2], $ac0 \n\t" -#else - "addiu %[r1], %[scaling], -32 \n\t" - "subu %[r1], $0, %[r1] \n\t" - "sllv %[r1], %[r2], %[r1] \n\t" - "srlv %[r0], %[r3], %[scaling] \n\t" - "addu %[r0], %[r0], %[r1] \n\t" -#endif - "slti %[r1], %[scaling], 32 \n\t" - "movz %[r0], %[r2], %[r1] \n\t" - ".set pop \n\t" - : [loop_size] "+r" (loop_size), [in] "+r" (in), [r0] "=&r" (r0), - [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), -#if !defined(MIPS_DSP_R2_LE) - [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7), -#endif - [count] "+r" (count), [scaling] "=r" (scaling) - : [N] "r" (N) - : "memory", "hi", "lo" - ); - r[0] = r0; - - // Correlation calculation is divided in 3 cases depending on the scaling - // value (different accumulator manipulation needed). Three slightly different - // loops are written in order to avoid branches inside the loop. - if (scaling == 0) { - // In this case, the result will be in low part of the accumulator. - for (i = 1; i < order + 1; i++) { - in = (int16_t*)x; - int16_t* in1 = (int16_t*)x + i; - count = N - i; - loop_size = (count) >> 2; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "mult $0, $0 \n\t" - "beqz %[loop_size], 2f \n\t" - " andi %[count], %[count], 0x3 \n\t" - // Loop processing 4 pairs of samples per iteration. - "1: \n\t" -#if defined(MIPS_DSP_R2_LE) - "ulw %[r0], 0(%[in]) \n\t" - "ulw %[r1], 0(%[in1]) \n\t" - "ulw %[r2], 4(%[in]) \n\t" - "ulw %[r3], 4(%[in1]) \n\t" -#else - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 0(%[in1]) \n\t" - "lh %[r2], 2(%[in]) \n\t" - "lh %[r3], 2(%[in1]) \n\t" - "lh %[r4], 4(%[in]) \n\t" - "lh %[r5], 4(%[in1]) \n\t" - "lh %[r6], 6(%[in]) \n\t" - "lh %[r7], 6(%[in1]) \n\t" -#endif - "addiu %[loop_size], %[loop_size], -1 \n\t" -#if defined(MIPS_DSP_R2_LE) - "dpa.w.ph $ac0, %[r0], %[r1] \n\t" - "dpa.w.ph $ac0, %[r2], %[r3] \n\t" -#else - "madd %[r0], %[r1] \n\t" - "madd %[r2], %[r3] \n\t" - "madd %[r4], %[r5] \n\t" - "madd %[r6], %[r7] \n\t" -#endif - "addiu %[in], %[in], 8 \n\t" - "bnez %[loop_size], 1b \n\t" - " addiu %[in1], %[in1], 8 \n\t" - "2: \n\t" - "beqz %[count], 4f \n\t" - " mflo %[r0] \n\t" - // Process remaining samples (if any). - "3: \n\t" - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 0(%[in1]) \n\t" - "addiu %[count], %[count], -1 \n\t" - "addiu %[in], %[in], 2 \n\t" - "madd %[r0], %[r1] \n\t" - "bnez %[count], 3b \n\t" - " addiu %[in1], %[in1], 2 \n\t" - "mflo %[r0] \n\t" - "4: \n\t" - ".set pop \n\t" - : [loop_size] "+r" (loop_size), [in] "+r" (in), [in1] "+r" (in1), -#if !defined(MIPS_DSP_R2_LE) - [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7), -#endif - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [count] "+r" (count) - : - : "memory", "hi", "lo" - ); - r[i] = r0; - } - } else if (scaling == 32) { - // In this case, the result will be high part of the accumulator. - for (i = 1; i < order + 1; i++) { - in = (int16_t*)x; - int16_t* in1 = (int16_t*)x + i; - count = N - i; - loop_size = (count) >> 2; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "mult $0, $0 \n\t" - "beqz %[loop_size], 2f \n\t" - " andi %[count], %[count], 0x3 \n\t" - // Loop processing 4 pairs of samples per iteration. - "1: \n\t" -#if defined(MIPS_DSP_R2_LE) - "ulw %[r0], 0(%[in]) \n\t" - "ulw %[r1], 0(%[in1]) \n\t" - "ulw %[r2], 4(%[in]) \n\t" - "ulw %[r3], 4(%[in1]) \n\t" -#else - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 0(%[in1]) \n\t" - "lh %[r2], 2(%[in]) \n\t" - "lh %[r3], 2(%[in1]) \n\t" - "lh %[r4], 4(%[in]) \n\t" - "lh %[r5], 4(%[in1]) \n\t" - "lh %[r6], 6(%[in]) \n\t" - "lh %[r7], 6(%[in1]) \n\t" -#endif - "addiu %[loop_size], %[loop_size], -1 \n\t" -#if defined(MIPS_DSP_R2_LE) - "dpa.w.ph $ac0, %[r0], %[r1] \n\t" - "dpa.w.ph $ac0, %[r2], %[r3] \n\t" -#else - "madd %[r0], %[r1] \n\t" - "madd %[r2], %[r3] \n\t" - "madd %[r4], %[r5] \n\t" - "madd %[r6], %[r7] \n\t" -#endif - "addiu %[in], %[in], 8 \n\t" - "bnez %[loop_size], 1b \n\t" - " addiu %[in1], %[in1], 8 \n\t" - "2: \n\t" - "beqz %[count], 4f \n\t" - " mfhi %[r0] \n\t" - // Process remaining samples (if any). - "3: \n\t" - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 0(%[in1]) \n\t" - "addiu %[count], %[count], -1 \n\t" - "addiu %[in], %[in], 2 \n\t" - "madd %[r0], %[r1] \n\t" - "bnez %[count], 3b \n\t" - " addiu %[in1], %[in1], 2 \n\t" - "mfhi %[r0] \n\t" - "4: \n\t" - ".set pop \n\t" - : [loop_size] "+r" (loop_size), [in] "+r" (in), [in1] "+r" (in1), -#if !defined(MIPS_DSP_R2_LE) - [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7), -#endif - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [count] "+r" (count) - : - : "memory", "hi", "lo" - ); - r[i] = r0; - } - } else { - // In this case, the result is obtained by combining low and high parts - // of the accumulator. -#if !defined(MIPS_DSP_R1_LE) - int32_t tmp_shift = 32 - scaling; -#endif - for (i = 1; i < order + 1; i++) { - in = (int16_t*)x; - int16_t* in1 = (int16_t*)x + i; - count = N - i; - loop_size = (count) >> 2; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "mult $0, $0 \n\t" - "beqz %[loop_size], 2f \n\t" - " andi %[count], %[count], 0x3 \n\t" - "1: \n\t" -#if defined(MIPS_DSP_R2_LE) - "ulw %[r0], 0(%[in]) \n\t" - "ulw %[r1], 0(%[in1]) \n\t" - "ulw %[r2], 4(%[in]) \n\t" - "ulw %[r3], 4(%[in1]) \n\t" -#else - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 0(%[in1]) \n\t" - "lh %[r2], 2(%[in]) \n\t" - "lh %[r3], 2(%[in1]) \n\t" - "lh %[r4], 4(%[in]) \n\t" - "lh %[r5], 4(%[in1]) \n\t" - "lh %[r6], 6(%[in]) \n\t" - "lh %[r7], 6(%[in1]) \n\t" -#endif - "addiu %[loop_size], %[loop_size], -1 \n\t" -#if defined(MIPS_DSP_R2_LE) - "dpa.w.ph $ac0, %[r0], %[r1] \n\t" - "dpa.w.ph $ac0, %[r2], %[r3] \n\t" -#else - "madd %[r0], %[r1] \n\t" - "madd %[r2], %[r3] \n\t" - "madd %[r4], %[r5] \n\t" - "madd %[r6], %[r7] \n\t" -#endif - "addiu %[in], %[in], 8 \n\t" - "bnez %[loop_size], 1b \n\t" - " addiu %[in1], %[in1], 8 \n\t" - "2: \n\t" - "beqz %[count], 4f \n\t" -#if defined(MIPS_DSP_R1_LE) - " extrv.w %[r0], $ac0, %[scaling] \n\t" -#else - " mfhi %[r0] \n\t" -#endif - "3: \n\t" - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 0(%[in1]) \n\t" - "addiu %[count], %[count], -1 \n\t" - "addiu %[in], %[in], 2 \n\t" - "madd %[r0], %[r1] \n\t" - "bnez %[count], 3b \n\t" - " addiu %[in1], %[in1], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "extrv.w %[r0], $ac0, %[scaling] \n\t" -#else - "mfhi %[r0] \n\t" -#endif - "4: \n\t" -#if !defined(MIPS_DSP_R1_LE) - "mflo %[r1] \n\t" - "sllv %[r0], %[r0], %[tmp_shift] \n\t" - "srlv %[r1], %[r1], %[scaling] \n\t" - "addu %[r0], %[r0], %[r1] \n\t" -#endif - ".set pop \n\t" - : [loop_size] "+r" (loop_size), [in] "+r" (in), [in1] "+r" (in1), -#if !defined(MIPS_DSP_R2_LE) - [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7), -#endif - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [count] "+r" (count) - : [scaling] "r" (scaling) -#if !defined(MIPS_DSP_R1_LE) - , [tmp_shift] "r" (tmp_shift) -#endif - : "memory", "hi", "lo" - ); - r[i] = r0; - } - } - *scale = scaling; - - return (order + 1); -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filters_neon.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filters_neon.c deleted file mode 100644 index 1734a969cb08..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filters_neon.c +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "rtc_base/checks.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" - -// Autocorrelation function in fixed point. -// NOTE! Different from SPLIB-version in how it scales the signal. -int WebRtcIsacfix_AutocorrNeon(int32_t* __restrict r, - const int16_t* x, - int16_t n, - int16_t order, - int16_t* __restrict scale) { - int i = 0; - int16_t scaling = 0; - uint32_t temp = 0; - int64_t prod = 0; - int64_t prod_tail = 0; - - RTC_DCHECK_EQ(0, n % 4); - RTC_DCHECK_GE(n, 8); - - // Calculate r[0]. - int16x4_t x0_v; - int32x4_t tmpa0_v; - int64x2_t tmpb_v; - - tmpb_v = vdupq_n_s64(0); - const int16_t* x_start = x; - const int16_t* x_end0 = x_start + n; - while (x_start < x_end0) { - x0_v = vld1_s16(x_start); - tmpa0_v = vmull_s16(x0_v, x0_v); - tmpb_v = vpadalq_s32(tmpb_v, tmpa0_v); - x_start += 4; - } - -#ifdef WEBRTC_ARCH_ARM64 - prod = vaddvq_s64(tmpb_v); -#else - prod = vget_lane_s64(vadd_s64(vget_low_s64(tmpb_v), vget_high_s64(tmpb_v)), - 0); -#endif - // Calculate scaling (the value of shifting). - temp = (uint32_t)(prod >> 31); - - scaling = temp ? 32 - WebRtcSpl_NormU32(temp) : 0; - r[0] = (int32_t)(prod >> scaling); - - int16x8_t x1_v; - int16x8_t y_v; - int32x4_t tmpa1_v; - // Perform the actual correlation calculation. - for (i = 1; i < order + 1; i++) { - tmpb_v = vdupq_n_s64(0); - int rest = (n - i) % 8; - x_start = x; - x_end0 = x_start + n - i - rest; - const int16_t* y_start = x_start + i; - while (x_start < x_end0) { - x1_v = vld1q_s16(x_start); - y_v = vld1q_s16(y_start); - tmpa0_v = vmull_s16(vget_low_s16(x1_v), vget_low_s16(y_v)); -#ifdef WEBRTC_ARCH_ARM64 - tmpa1_v = vmull_high_s16(x1_v, y_v); -#else - tmpa1_v = vmull_s16(vget_high_s16(x1_v), vget_high_s16(y_v)); -#endif - tmpb_v = vpadalq_s32(tmpb_v, tmpa0_v); - tmpb_v = vpadalq_s32(tmpb_v, tmpa1_v); - x_start += 8; - y_start += 8; - } - // The remaining calculation. - const int16_t* x_end1 = x + n - i; - if (rest >= 4) { - int16x4_t x2_v = vld1_s16(x_start); - int16x4_t y2_v = vld1_s16(y_start); - tmpa0_v = vmull_s16(x2_v, y2_v); - tmpb_v = vpadalq_s32(tmpb_v, tmpa0_v); - x_start += 4; - y_start += 4; - } -#ifdef WEBRTC_ARCH_ARM64 - prod = vaddvq_s64(tmpb_v); -#else - prod = vget_lane_s64(vadd_s64(vget_low_s64(tmpb_v), vget_high_s64(tmpb_v)), - 0); -#endif - - prod_tail = 0; - while (x_start < x_end1) { - prod_tail += *x_start * *y_start; - ++x_start; - ++y_start; - } - - r[i] = (int32_t)((prod + prod_tail) >> scaling); - } - - *scale = scaling; - - return order + 1; -} - diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc deleted file mode 100644 index 192ef89f9f4d..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "system_wrappers/include/cpu_features_wrapper.h" -#include "test/gtest.h" - -class FiltersTest : public ::testing::Test { - protected: - // Pass a function pointer to the Tester function. - void FiltersTester(AutocorrFix WebRtcIsacfix_AutocorrFixFunction) { - const int kOrder = 12; - const int kBuffer = 40; - int16_t scale = 0; - int32_t r_buffer[kOrder + 2] = {0}; - - // Test an overflow case. - const int16_t x_buffer_0[kBuffer] = { - 0, 0, 3010, 22351, 21106, 16969, -2095, -664, - 3513, -30980, 32767, -23839, 13335, 20289, -6831, 339, - -17207, 32767, 4959, 6177, 32767, 16599, -4747, 20504, - 3513, -30980, 32767, -23839, 13335, 20289, 0, -16969, - -2095, -664, 3513, 31981, 32767, -13839, 23336, 30281}; - const int32_t r_expected_0[kOrder + 2] = { - 1872498461, -224288754, 203789985, 483400487, -208272635, - 2436500, 137785322, 266600814, -208486262, 329510080, - 137949184, -161738972, -26894267, 237630192}; - - WebRtcIsacfix_AutocorrFixFunction(r_buffer, x_buffer_0, kBuffer, kOrder + 1, - &scale); - for (int i = 0; i < kOrder + 2; i++) { - EXPECT_EQ(r_expected_0[i], r_buffer[i]); - } - EXPECT_EQ(3, scale); - - // Test a no-overflow case. - const int16_t x_buffer_1[kBuffer] = { - 0, 0, 300, 21, 206, 169, -295, -664, 3513, -300, - 327, -29, 15, 289, -6831, 339, -107, 37, 59, 6177, - 327, 169, -4747, 204, 313, -980, 767, -9, 135, 289, - 0, -6969, -2095, -664, 0, 1, 7, -39, 236, 281}; - const int32_t r_expected_1[kOrder + 2] = { - 176253864, 8126617, 1983287, -26196788, -3487363, - -42839676, -24644043, 3469813, 30559879, 31905045, - 5101567, 29328896, -55787438, -13163978}; - - WebRtcIsacfix_AutocorrFixFunction(r_buffer, x_buffer_1, kBuffer, kOrder + 1, - &scale); - for (int i = 0; i < kOrder + 2; i++) { - EXPECT_EQ(r_expected_1[i], r_buffer[i]); - } - EXPECT_EQ(0, scale); - } -}; - -TEST_F(FiltersTest, AutocorrFixTest) { - FiltersTester(WebRtcIsacfix_AutocorrC); -#if defined(WEBRTC_HAS_NEON) - FiltersTester(WebRtcIsacfix_AutocorrNeon); -#endif -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/initialize.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/initialize.c deleted file mode 100644 index 1b82958883fc..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/initialize.c +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * initialize.c - * - * Internal initfunctions - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" - - -void WebRtcIsacfix_InitMaskingEnc(MaskFiltstr_enc *maskdata) { - - int k; - - for (k = 0; k < WINLEN; k++) { - maskdata->DataBufferLoQ0[k] = (int16_t) 0; - maskdata->DataBufferHiQ0[k] = (int16_t) 0; - } - for (k = 0; k < ORDERLO+1; k++) { - maskdata->CorrBufLoQQ[k] = (int32_t) 0; - maskdata->CorrBufLoQdom[k] = 0; - - maskdata->PreStateLoGQ15[k] = 0; - - } - for (k = 0; k < ORDERHI+1; k++) { - maskdata->CorrBufHiQQ[k] = (int32_t) 0; - maskdata->CorrBufHiQdom[k] = 0; - maskdata->PreStateHiGQ15[k] = 0; - } - - maskdata->OldEnergy = 10; - - return; -} - -void WebRtcIsacfix_InitMaskingDec(MaskFiltstr_dec *maskdata) { - - int k; - - for (k = 0; k < ORDERLO+1; k++) - { - maskdata->PostStateLoGQ0[k] = 0; - } - for (k = 0; k < ORDERHI+1; k++) - { - maskdata->PostStateHiGQ0[k] = 0; - } - - maskdata->OldEnergy = 10; - - return; -} - - - - - - - -void WebRtcIsacfix_InitPreFilterbank(PreFiltBankstr *prefiltdata) -{ - int k; - - for (k = 0; k < QLOOKAHEAD; k++) { - prefiltdata->INLABUF1_fix[k] = 0; - prefiltdata->INLABUF2_fix[k] = 0; - } - for (k = 0; k < 2 * (QORDER - 1); k++) { - prefiltdata->INSTAT1_fix[k] = 0; - prefiltdata->INSTAT2_fix[k] = 0; - } - - /* High pass filter states */ - prefiltdata->HPstates_fix[0] = 0; - prefiltdata->HPstates_fix[1] = 0; - - return; -} - -void WebRtcIsacfix_InitPostFilterbank(PostFiltBankstr *postfiltdata) -{ - int k; - - for (k = 0; k < 2 * POSTQORDER; k++) { - postfiltdata->STATE_0_LOWER_fix[k] = 0; - postfiltdata->STATE_0_UPPER_fix[k] = 0; - } - - /* High pass filter states */ - - postfiltdata->HPstates1_fix[0] = 0; - postfiltdata->HPstates1_fix[1] = 0; - - postfiltdata->HPstates2_fix[0] = 0; - postfiltdata->HPstates2_fix[1] = 0; - - return; -} - - -void WebRtcIsacfix_InitPitchFilter(PitchFiltstr *pitchfiltdata) -{ - int k; - - for (k = 0; k < PITCH_BUFFSIZE; k++) - pitchfiltdata->ubufQQ[k] = 0; - for (k = 0; k < (PITCH_DAMPORDER); k++) - pitchfiltdata->ystateQQ[k] = 0; - - pitchfiltdata->oldlagQ7 = 6400; /* 50.0 in Q7 */ - pitchfiltdata->oldgainQ12 = 0; -} - -void WebRtcIsacfix_InitPitchAnalysis(PitchAnalysisStruct *State) -{ - int k; - - for (k = 0; k < PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2; k++) { - State->dec_buffer16[k] = 0; - } - for (k = 0; k < 2 * ALLPASSSECTIONS + 1; k++) { - State->decimator_state32[k] = 0; - } - - for (k = 0; k < QLOOKAHEAD; k++) - State->inbuf[k] = 0; - - WebRtcIsacfix_InitPitchFilter(&(State->PFstr_wght)); - - WebRtcIsacfix_InitPitchFilter(&(State->PFstr)); -} - - -void WebRtcIsacfix_InitPlc( PLCstr *State ) -{ - State->decayCoeffPriodic = WEBRTC_SPL_WORD16_MAX; - State->decayCoeffNoise = WEBRTC_SPL_WORD16_MAX; - - State->used = PLC_WAS_USED; - - WebRtcSpl_ZerosArrayW16(State->overlapLP, RECOVERY_OVERLAP); - WebRtcSpl_ZerosArrayW16(State->lofilt_coefQ15, ORDERLO); - WebRtcSpl_ZerosArrayW16(State->hifilt_coefQ15, ORDERHI ); - - State->AvgPitchGain_Q12 = 0; - State->lastPitchGain_Q12 = 0; - State->lastPitchLag_Q7 = 0; - State->gain_lo_hiQ17[0]=State->gain_lo_hiQ17[1] = 0; - WebRtcSpl_ZerosArrayW16(State->prevPitchInvIn, FRAMESAMPLES/2); - WebRtcSpl_ZerosArrayW16(State->prevPitchInvOut, PITCH_MAX_LAG + 10 ); - WebRtcSpl_ZerosArrayW32(State->prevHP, PITCH_MAX_LAG + 10 ); - State->pitchCycles = 0; - State->A = 0; - State->B = 0; - State->pitchIndex = 0; - State->stretchLag = 240; - State->seed = 4447; - - -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/isac_fix_type.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/isac_fix_type.h deleted file mode 100644 index 512911a8bbae..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/isac_fix_type.h +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ISAC_FIX_TYPE_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ISAC_FIX_TYPE_H_ - -#include "modules/audio_coding/codecs/isac/fix/include/isacfix.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -class IsacFix { - public: - using instance_type = ISACFIX_MainStruct; - static const bool has_swb = false; - static inline int16_t Control(instance_type* inst, - int32_t rate, - int framesize) { - return WebRtcIsacfix_Control(inst, rate, framesize); - } - static inline int16_t ControlBwe(instance_type* inst, - int32_t rate_bps, - int frame_size_ms, - int16_t enforce_frame_size) { - return WebRtcIsacfix_ControlBwe(inst, rate_bps, frame_size_ms, - enforce_frame_size); - } - static inline int16_t Create(instance_type** inst) { - return WebRtcIsacfix_Create(inst); - } - static inline int DecodeInternal(instance_type* inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speech_type) { - return WebRtcIsacfix_Decode(inst, encoded, len, decoded, speech_type); - } - static inline size_t DecodePlc(instance_type* inst, - int16_t* decoded, - size_t num_lost_frames) { - return WebRtcIsacfix_DecodePlc(inst, decoded, num_lost_frames); - } - static inline void DecoderInit(instance_type* inst) { - WebRtcIsacfix_DecoderInit(inst); - } - static inline int Encode(instance_type* inst, - const int16_t* speech_in, - uint8_t* encoded) { - return WebRtcIsacfix_Encode(inst, speech_in, encoded); - } - static inline int16_t EncoderInit(instance_type* inst, int16_t coding_mode) { - return WebRtcIsacfix_EncoderInit(inst, coding_mode); - } - static inline uint16_t EncSampRate(instance_type* inst) { - return kFixSampleRate; - } - - static inline int16_t Free(instance_type* inst) { - return WebRtcIsacfix_Free(inst); - } - static inline int16_t GetErrorCode(instance_type* inst) { - return WebRtcIsacfix_GetErrorCode(inst); - } - - static inline int16_t GetNewFrameLen(instance_type* inst) { - return WebRtcIsacfix_GetNewFrameLen(inst); - } - static inline int16_t SetDecSampRate(instance_type* inst, - uint16_t sample_rate_hz) { - RTC_DCHECK_EQ(sample_rate_hz, kFixSampleRate); - return 0; - } - static inline int16_t SetEncSampRate(instance_type* inst, - uint16_t sample_rate_hz) { - RTC_DCHECK_EQ(sample_rate_hz, kFixSampleRate); - return 0; - } - static inline void SetEncSampRateInDecoder(instance_type* inst, - uint16_t sample_rate_hz) { - RTC_DCHECK_EQ(sample_rate_hz, kFixSampleRate); - } - static inline void SetInitialBweBottleneck(instance_type* inst, - int bottleneck_bits_per_second) { - WebRtcIsacfix_SetInitialBweBottleneck(inst, bottleneck_bits_per_second); - } - static inline int16_t SetMaxPayloadSize(instance_type* inst, - int16_t max_payload_size_bytes) { - return WebRtcIsacfix_SetMaxPayloadSize(inst, max_payload_size_bytes); - } - static inline int16_t SetMaxRate(instance_type* inst, int32_t max_bit_rate) { - return WebRtcIsacfix_SetMaxRate(inst, max_bit_rate); - } - - private: - enum { kFixSampleRate = 16000 }; -}; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ISAC_FIX_TYPE_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c deleted file mode 100644 index a7d44e883d61..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c +++ /dev/null @@ -1,1230 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * isacfix.c - * - * This C file contains the functions for the ISAC API - * - */ - -#include "modules/audio_coding/codecs/isac/fix/include/isacfix.h" - -#include - -#include "rtc_base/checks.h" -#include "modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h" -#include "modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -// Declare function pointers. -FilterMaLoopFix WebRtcIsacfix_FilterMaLoopFix; -Spec2Time WebRtcIsacfix_Spec2Time; -Time2Spec WebRtcIsacfix_Time2Spec; -MatrixProduct1 WebRtcIsacfix_MatrixProduct1; -MatrixProduct2 WebRtcIsacfix_MatrixProduct2; - -/* This method assumes that `stream_size_bytes` is in valid range, - * i.e. >= 0 && <= STREAM_MAXW16_60MS - */ -static void InitializeDecoderBitstream(size_t stream_size_bytes, - Bitstr_dec* bitstream) { - bitstream->W_upper = 0xFFFFFFFF; - bitstream->streamval = 0; - bitstream->stream_index = 0; - bitstream->full = 1; - bitstream->stream_size = (stream_size_bytes + 1) >> 1; - memset(bitstream->stream, 0, sizeof(bitstream->stream)); -} - -/**************************************************************************** - * WebRtcIsacfix_Create(...) - * - * This function creates a ISAC instance, which will contain the state - * information for one coding/decoding channel. - * - * Input: - * - *ISAC_main_inst : a pointer to the coder instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_Create(ISACFIX_MainStruct **ISAC_main_inst) -{ - ISACFIX_SubStruct *tempo; - tempo = malloc(1 * sizeof(ISACFIX_SubStruct)); - *ISAC_main_inst = (ISACFIX_MainStruct *)tempo; - if (*ISAC_main_inst!=NULL) { - (*(ISACFIX_SubStruct**)ISAC_main_inst)->errorcode = 0; - (*(ISACFIX_SubStruct**)ISAC_main_inst)->initflag = 0; - (*(ISACFIX_SubStruct**)ISAC_main_inst)->ISACenc_obj.SaveEnc_ptr = NULL; - WebRtcIsacfix_InitBandwidthEstimator(&tempo->bwestimator_obj); - return(0); - } else { - return(-1); - } -} - - -/**************************************************************************** - * WebRtcIsacfix_CreateInternal(...) - * - * This function creates the memory that is used to store data in the encoder - * - * Input: - * - *ISAC_main_inst : a pointer to the coder instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_CreateInternal(ISACFIX_MainStruct *ISAC_main_inst) -{ - ISACFIX_SubStruct *ISAC_inst; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Allocate memory for storing encoder data */ - ISAC_inst->ISACenc_obj.SaveEnc_ptr = malloc(1 * sizeof(IsacSaveEncoderData)); - - if (ISAC_inst->ISACenc_obj.SaveEnc_ptr!=NULL) { - return(0); - } else { - return(-1); - } -} - - -/**************************************************************************** - * WebRtcIsacfix_Free(...) - * - * This function frees the ISAC instance created at the beginning. - * - * Input: - * - ISAC_main_inst : a ISAC instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_Free(ISACFIX_MainStruct *ISAC_main_inst) -{ - free(ISAC_main_inst); - return(0); -} - -/**************************************************************************** - * WebRtcIsacfix_FreeInternal(...) - * - * This function frees the internal memory for storing encoder data. - * - * Input: - * - ISAC_main_inst : a ISAC instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct *ISAC_main_inst) -{ - ISACFIX_SubStruct *ISAC_inst; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Release memory */ - free(ISAC_inst->ISACenc_obj.SaveEnc_ptr); - - return(0); -} - -/**************************************************************************** - * WebRtcIsacfix_InitNeon(...) - * - * This function initializes function pointers for ARM Neon platform. - */ - -#if defined(WEBRTC_HAS_NEON) -static void WebRtcIsacfix_InitNeon(void) { - WebRtcIsacfix_AutocorrFix = WebRtcIsacfix_AutocorrNeon; - WebRtcIsacfix_FilterMaLoopFix = WebRtcIsacfix_FilterMaLoopNeon; - WebRtcIsacfix_Spec2Time = WebRtcIsacfix_Spec2TimeNeon; - WebRtcIsacfix_Time2Spec = WebRtcIsacfix_Time2SpecNeon; - WebRtcIsacfix_AllpassFilter2FixDec16 = - WebRtcIsacfix_AllpassFilter2FixDec16Neon; - WebRtcIsacfix_MatrixProduct1 = WebRtcIsacfix_MatrixProduct1Neon; - WebRtcIsacfix_MatrixProduct2 = WebRtcIsacfix_MatrixProduct2Neon; -} -#endif - -/**************************************************************************** - * WebRtcIsacfix_InitMIPS(...) - * - * This function initializes function pointers for MIPS platform. - */ - -#if defined(MIPS32_LE) -static void WebRtcIsacfix_InitMIPS(void) { - WebRtcIsacfix_AutocorrFix = WebRtcIsacfix_AutocorrMIPS; - WebRtcIsacfix_FilterMaLoopFix = WebRtcIsacfix_FilterMaLoopMIPS; - WebRtcIsacfix_Spec2Time = WebRtcIsacfix_Spec2TimeMIPS; - WebRtcIsacfix_Time2Spec = WebRtcIsacfix_Time2SpecMIPS; - WebRtcIsacfix_MatrixProduct1 = WebRtcIsacfix_MatrixProduct1MIPS; - WebRtcIsacfix_MatrixProduct2 = WebRtcIsacfix_MatrixProduct2MIPS; -#if defined(MIPS_DSP_R1_LE) - WebRtcIsacfix_AllpassFilter2FixDec16 = - WebRtcIsacfix_AllpassFilter2FixDec16MIPS; - WebRtcIsacfix_HighpassFilterFixDec32 = - WebRtcIsacfix_HighpassFilterFixDec32MIPS; -#endif -#if defined(MIPS_DSP_R2_LE) - WebRtcIsacfix_CalculateResidualEnergy = - WebRtcIsacfix_CalculateResidualEnergyMIPS; -#endif -} -#endif - -static void InitFunctionPointers(void) { - WebRtcIsacfix_AutocorrFix = WebRtcIsacfix_AutocorrC; - WebRtcIsacfix_FilterMaLoopFix = WebRtcIsacfix_FilterMaLoopC; - WebRtcIsacfix_CalculateResidualEnergy = - WebRtcIsacfix_CalculateResidualEnergyC; - WebRtcIsacfix_AllpassFilter2FixDec16 = WebRtcIsacfix_AllpassFilter2FixDec16C; - WebRtcIsacfix_HighpassFilterFixDec32 = WebRtcIsacfix_HighpassFilterFixDec32C; - WebRtcIsacfix_Time2Spec = WebRtcIsacfix_Time2SpecC; - WebRtcIsacfix_Spec2Time = WebRtcIsacfix_Spec2TimeC; - WebRtcIsacfix_MatrixProduct1 = WebRtcIsacfix_MatrixProduct1C; - WebRtcIsacfix_MatrixProduct2 = WebRtcIsacfix_MatrixProduct2C; - -#if defined(WEBRTC_HAS_NEON) - WebRtcIsacfix_InitNeon(); -#endif - -#if defined(MIPS32_LE) - WebRtcIsacfix_InitMIPS(); -#endif -} - -/**************************************************************************** - * WebRtcIsacfix_EncoderInit(...) - * - * This function initializes a ISAC instance prior to the encoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - CodingMode : 0 -> Bit rate and frame length are automatically - * adjusted to available bandwidth on - * transmission channel. - * 1 -> User sets a frame length and a target bit - * rate which is taken as the maximum short-term - * average bit rate. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_EncoderInit(ISACFIX_MainStruct *ISAC_main_inst, - int16_t CodingMode) -{ - int k; - int16_t statusInit; - ISACFIX_SubStruct *ISAC_inst; - - statusInit = 0; - /* typecast pointer to rela structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* flag encoder init */ - ISAC_inst->initflag |= 2; - - if (CodingMode == 0) - /* Adaptive mode */ - ISAC_inst->ISACenc_obj.new_framelength = INITIAL_FRAMESAMPLES; - else if (CodingMode == 1) - /* Instantaneous mode */ - ISAC_inst->ISACenc_obj.new_framelength = 480; /* default for I-mode */ - else { - ISAC_inst->errorcode = ISAC_DISALLOWED_CODING_MODE; - statusInit = -1; - } - - ISAC_inst->CodingMode = CodingMode; - - WebRtcIsacfix_InitMaskingEnc(&ISAC_inst->ISACenc_obj.maskfiltstr_obj); - WebRtcIsacfix_InitPreFilterbank(&ISAC_inst->ISACenc_obj.prefiltbankstr_obj); - WebRtcIsacfix_InitPitchFilter(&ISAC_inst->ISACenc_obj.pitchfiltstr_obj); - WebRtcIsacfix_InitPitchAnalysis(&ISAC_inst->ISACenc_obj.pitchanalysisstr_obj); - - WebRtcIsacfix_InitRateModel(&ISAC_inst->ISACenc_obj.rate_data_obj); - - - ISAC_inst->ISACenc_obj.buffer_index = 0; - ISAC_inst->ISACenc_obj.frame_nb = 0; - ISAC_inst->ISACenc_obj.BottleNeck = 32000; /* default for I-mode */ - ISAC_inst->ISACenc_obj.MaxDelay = 10; /* default for I-mode */ - ISAC_inst->ISACenc_obj.current_framesamples = 0; - ISAC_inst->ISACenc_obj.s2nr = 0; - ISAC_inst->ISACenc_obj.MaxBits = 0; - ISAC_inst->ISACenc_obj.bitstr_seed = 4447; - ISAC_inst->ISACenc_obj.payloadLimitBytes30 = STREAM_MAXW16_30MS << 1; - ISAC_inst->ISACenc_obj.payloadLimitBytes60 = STREAM_MAXW16_60MS << 1; - ISAC_inst->ISACenc_obj.maxPayloadBytes = STREAM_MAXW16_60MS << 1; - ISAC_inst->ISACenc_obj.maxRateInBytes = STREAM_MAXW16_30MS << 1; - ISAC_inst->ISACenc_obj.enforceFrameSize = 0; - - /* Init the bistream data area to zero */ - for (k=0; kISACenc_obj.bitstr_obj.stream[k] = 0; - } - - InitFunctionPointers(); - - return statusInit; -} - -/* Read the given number of bytes of big-endian 16-bit integers from `src` and - write them to `dest` in host endian. If `nbytes` is odd, the number of - output elements is rounded up, and the least significant byte of the last - element is set to 0. */ -static void read_be16(const uint8_t* src, size_t nbytes, uint16_t* dest) { - size_t i; - for (i = 0; i < nbytes / 2; ++i) - dest[i] = src[2 * i] << 8 | src[2 * i + 1]; - if (nbytes % 2 == 1) - dest[nbytes / 2] = src[nbytes - 1] << 8; -} - -/* Read the given number of bytes of host-endian 16-bit integers from `src` and - write them to `dest` in big endian. If `nbytes` is odd, the number of source - elements is rounded up (but only the most significant byte of the last - element is used), and the number of output bytes written will be - nbytes + 1. */ -static void write_be16(const uint16_t* src, size_t nbytes, uint8_t* dest) { - size_t i; - for (i = 0; i < nbytes / 2; ++i) { - dest[2 * i] = src[i] >> 8; - dest[2 * i + 1] = src[i]; - } - if (nbytes % 2 == 1) { - dest[nbytes - 1] = src[nbytes / 2] >> 8; - dest[nbytes] = 0; - } -} - -/**************************************************************************** - * WebRtcIsacfix_Encode(...) - * - * This function encodes 10ms frame(s) and inserts it into a package. - * Input speech length has to be 160 samples (10ms). The encoder buffers those - * 10ms frames until it reaches the chosen Framesize (480 or 960 samples - * corresponding to 30 or 60 ms frames), and then proceeds to the encoding. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - speechIn : input speech vector. - * - * Output: - * - encoded : the encoded data vector - * - * Return value: - * : >0 - Length (in bytes) of coded data - * : 0 - The buffer didn't reach the chosen framesize - * so it keeps buffering speech samples. - * : -1 - Error - */ - -int WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst, - const int16_t *speechIn, - uint8_t* encoded) -{ - ISACFIX_SubStruct *ISAC_inst; - int stream_len; - - /* typecast pointer to rela structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - - /* check if encoder initiated */ - if ((ISAC_inst->initflag & 2) != 2) { - ISAC_inst->errorcode = ISAC_ENCODER_NOT_INITIATED; - return (-1); - } - - stream_len = WebRtcIsacfix_EncodeImpl((int16_t*)speechIn, - &ISAC_inst->ISACenc_obj, - &ISAC_inst->bwestimator_obj, - ISAC_inst->CodingMode); - if (stream_len<0) { - ISAC_inst->errorcode = -(int16_t)stream_len; - return -1; - } - - write_be16(ISAC_inst->ISACenc_obj.bitstr_obj.stream, (size_t)stream_len, - encoded); - return stream_len; - -} - - -/**************************************************************************** - * WebRtcIsacfix_GetNewBitStream(...) - * - * This function returns encoded data, with the received bwe-index in the - * stream. It should always return a complete packet, i.e. only called once - * even for 60 msec frames - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - bweIndex : index of bandwidth estimate to put in new bitstream - * - * Output: - * - encoded : the encoded data vector - * - * Return value: - * : >0 - Length (in bytes) of coded data - * : -1 - Error - */ - -int16_t WebRtcIsacfix_GetNewBitStream(ISACFIX_MainStruct *ISAC_main_inst, - int16_t bweIndex, - float scale, - uint8_t* encoded) -{ - ISACFIX_SubStruct *ISAC_inst; - int16_t stream_len; - - /* typecast pointer to rela structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - - /* check if encoder initiated */ - if ((ISAC_inst->initflag & 2) != 2) { - ISAC_inst->errorcode = ISAC_ENCODER_NOT_INITIATED; - return (-1); - } - - stream_len = WebRtcIsacfix_EncodeStoredData(&ISAC_inst->ISACenc_obj, - bweIndex, - scale); - if (stream_len<0) { - ISAC_inst->errorcode = - stream_len; - return -1; - } - - write_be16(ISAC_inst->ISACenc_obj.bitstr_obj.stream, stream_len, encoded); - return stream_len; -} - - - -/**************************************************************************** - * WebRtcIsacfix_DecoderInit(...) - * - * This function initializes a ISAC instance prior to the decoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - */ - -void WebRtcIsacfix_DecoderInit(ISACFIX_MainStruct *ISAC_main_inst) -{ - ISACFIX_SubStruct *ISAC_inst; - - InitFunctionPointers(); - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* flag decoder init */ - ISAC_inst->initflag |= 1; - - WebRtcIsacfix_InitMaskingDec(&ISAC_inst->ISACdec_obj.maskfiltstr_obj); - WebRtcIsacfix_InitPostFilterbank(&ISAC_inst->ISACdec_obj.postfiltbankstr_obj); - WebRtcIsacfix_InitPitchFilter(&ISAC_inst->ISACdec_obj.pitchfiltstr_obj); - - /* TS */ - WebRtcIsacfix_InitPlc( &ISAC_inst->ISACdec_obj.plcstr_obj ); -} - - -/**************************************************************************** - * WebRtcIsacfix_UpdateBwEstimate1(...) - * - * This function updates the estimate of the bandwidth. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - packet_size : size of the packet. - * - rtp_seq_number : the RTP number of the packet. - * - arr_ts : the arrival time of the packet (from NetEq) - * in samples. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst, - const uint8_t* encoded, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t arr_ts) -{ - ISACFIX_SubStruct *ISAC_inst; - Bitstr_dec streamdata; - int16_t err; - const size_t kRequiredEncodedLenBytes = 10; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Sanity check of packet length */ - if (packet_size == 0) { - /* return error code if the packet length is null or less */ - ISAC_inst->errorcode = ISAC_EMPTY_PACKET; - return -1; - } else if (packet_size > (STREAM_MAXW16<<1)) { - /* return error code if length of stream is too long */ - ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH; - return -1; - } - - /* check if decoder initiated */ - if ((ISAC_inst->initflag & 1) != 1) { - ISAC_inst->errorcode = ISAC_DECODER_NOT_INITIATED; - return (-1); - } - - InitializeDecoderBitstream(packet_size, &streamdata); - - read_be16(encoded, kRequiredEncodedLenBytes, streamdata.stream); - - err = WebRtcIsacfix_EstimateBandwidth(&ISAC_inst->bwestimator_obj, - &streamdata, - packet_size, - rtp_seq_number, - 0, - arr_ts); - - - if (err < 0) - { - /* return error code if something went wrong */ - ISAC_inst->errorcode = -err; - return -1; - } - - - return 0; -} - -/**************************************************************************** - * WebRtcIsacfix_UpdateBwEstimate(...) - * - * This function updates the estimate of the bandwidth. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - packet_size : size of the packet. - * - rtp_seq_number : the RTP number of the packet. - * - send_ts : Send Time Stamp from RTP header - * - arr_ts : the arrival time of the packet (from NetEq) - * in samples. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst, - const uint8_t* encoded, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts) -{ - ISACFIX_SubStruct *ISAC_inst; - Bitstr_dec streamdata; - int16_t err; - const size_t kRequiredEncodedLenBytes = 10; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Sanity check of packet length */ - if (packet_size == 0) { - /* return error code if the packet length is null or less */ - ISAC_inst->errorcode = ISAC_EMPTY_PACKET; - return -1; - } else if (packet_size < kRequiredEncodedLenBytes) { - ISAC_inst->errorcode = ISAC_PACKET_TOO_SHORT; - return -1; - } else if (packet_size > (STREAM_MAXW16<<1)) { - /* return error code if length of stream is too long */ - ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH; - return -1; - } - - /* check if decoder initiated */ - if ((ISAC_inst->initflag & 1) != 1) { - ISAC_inst->errorcode = ISAC_DECODER_NOT_INITIATED; - return (-1); - } - - InitializeDecoderBitstream(packet_size, &streamdata); - - read_be16(encoded, kRequiredEncodedLenBytes, streamdata.stream); - - err = WebRtcIsacfix_EstimateBandwidth(&ISAC_inst->bwestimator_obj, - &streamdata, - packet_size, - rtp_seq_number, - send_ts, - arr_ts); - - if (err < 0) - { - /* return error code if something went wrong */ - ISAC_inst->errorcode = -err; - return -1; - } - - - return 0; -} - -/**************************************************************************** - * WebRtcIsacfix_Decode(...) - * - * This function decodes a ISAC frame. Output speech length - * will be a multiple of 480 samples: 480 or 960 samples, - * depending on the framesize (30 or 60 ms). - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s) - * - len : bytes in encoded vector - * - * Output: - * - decoded : The decoded vector - * - * Return value : >0 - number of samples in decoded vector - * -1 - Error - */ - - -int WebRtcIsacfix_Decode(ISACFIX_MainStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType) -{ - ISACFIX_SubStruct *ISAC_inst; - /* number of samples (480 or 960), output from decoder */ - /* that were actually used in the encoder/decoder (determined on the fly) */ - size_t number_of_samples; - int declen_int = 0; - size_t declen; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* check if decoder initiated */ - if ((ISAC_inst->initflag & 1) != 1) { - ISAC_inst->errorcode = ISAC_DECODER_NOT_INITIATED; - return (-1); - } - - /* Sanity check of packet length */ - if (len == 0) { - /* return error code if the packet length is null or less */ - ISAC_inst->errorcode = ISAC_EMPTY_PACKET; - return -1; - } else if (len > (STREAM_MAXW16<<1)) { - /* return error code if length of stream is too long */ - ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH; - return -1; - } - - InitializeDecoderBitstream(len, &ISAC_inst->ISACdec_obj.bitstr_obj); - - read_be16(encoded, len, ISAC_inst->ISACdec_obj.bitstr_obj.stream); - - /* added for NetEq purposes (VAD/DTX related) */ - *speechType=1; - - declen_int = WebRtcIsacfix_DecodeImpl(decoded, &ISAC_inst->ISACdec_obj, - &number_of_samples); - if (declen_int < 0) { - /* Some error inside the decoder */ - ISAC_inst->errorcode = -(int16_t)declen_int; - memset(decoded, 0, sizeof(int16_t) * MAX_FRAMESAMPLES); - return -1; - } - declen = (size_t)declen_int; - - /* error check */ - - if (declen & 1) { - if (len != declen && - len != declen + - ((ISAC_inst->ISACdec_obj.bitstr_obj.stream[declen >> 1]) & 0xFF)) { - ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH; - memset(decoded, 0, sizeof(int16_t) * number_of_samples); - return -1; - } - } else { - if (len != declen && - len != declen + - ((ISAC_inst->ISACdec_obj.bitstr_obj.stream[declen >> 1]) >> 8)) { - ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH; - memset(decoded, 0, sizeof(int16_t) * number_of_samples); - return -1; - } - } - - return (int)number_of_samples; -} - - -/**************************************************************************** - * WebRtcIsacfix_DecodePlc(...) - * - * This function conducts PLC for ISAC frame(s) in wide-band (16kHz sampling). - * Output speech length will be "480*noOfLostFrames" samples - * that is equevalent of "30*noOfLostFrames" millisecond. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - noOfLostFrames : Number of PLC frames (480sample = 30ms) - * to produce - * - * Output: - * - decoded : The decoded vector - * - * Return value : Number of samples in decoded PLC vector - */ - -size_t WebRtcIsacfix_DecodePlc(ISACFIX_MainStruct* ISAC_main_inst, - int16_t* decoded, - size_t noOfLostFrames) -{ - - size_t no_of_samples, declen, k; - int16_t outframe16[MAX_FRAMESAMPLES]; - - ISACFIX_SubStruct *ISAC_inst; - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Limit number of frames to two = 60 msec. Otherwise we exceed data vectors */ - if (noOfLostFrames > 2) { - noOfLostFrames = 2; - } - k = 0; - declen = 0; - while( noOfLostFrames > 0 ) - { - WebRtcIsacfix_DecodePlcImpl(&(outframe16[k*480]), &ISAC_inst->ISACdec_obj, - &no_of_samples); - declen += no_of_samples; - noOfLostFrames--; - k++; - } - - for (k=0;kCodingMode == 0) - { - /* in adaptive mode */ - ISAC_inst->errorcode = ISAC_MODE_MISMATCH; - return -1; - } - - - if (rate >= 10000 && rate <= 32000) - ISAC_inst->ISACenc_obj.BottleNeck = rate; - else { - ISAC_inst->errorcode = ISAC_DISALLOWED_BOTTLENECK; - return -1; - } - - - - if (framesize == 30 || framesize == 60) - ISAC_inst->ISACenc_obj.new_framelength = (int16_t)((FS/1000) * framesize); - else { - ISAC_inst->errorcode = ISAC_DISALLOWED_FRAME_LENGTH; - return -1; - } - - return 0; -} - -void WebRtcIsacfix_SetInitialBweBottleneck(ISACFIX_MainStruct* ISAC_main_inst, - int bottleneck_bits_per_second) { - ISACFIX_SubStruct* inst = (ISACFIX_SubStruct*)ISAC_main_inst; - RTC_DCHECK_GE(bottleneck_bits_per_second, 10000); - RTC_DCHECK_LE(bottleneck_bits_per_second, 32000); - inst->bwestimator_obj.sendBwAvg = ((uint32_t)bottleneck_bits_per_second) << 7; -} - -/**************************************************************************** - * WebRtcIsacfix_ControlBwe(...) - * - * This function sets the initial values of bottleneck and frame-size if - * iSAC is used in channel-adaptive mode. Through this API, users can - * enforce a frame-size for all values of bottleneck. Then iSAC will not - * automatically change the frame-size. - * - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rateBPS : initial value of bottleneck in bits/second - * 10000 <= rateBPS <= 32000 is accepted - * For default bottleneck set rateBPS = 0 - * - frameSizeMs : number of milliseconds per frame (30 or 60) - * - enforceFrameSize : 1 to enforce the given frame-size through out - * the adaptation process, 0 to let iSAC change - * the frame-size if required. - * - * Return value : 0 - ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct *ISAC_main_inst, - int16_t rateBPS, - int frameSizeMs, - int16_t enforceFrameSize) -{ - ISACFIX_SubStruct *ISAC_inst; - /* Typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* check if encoder initiated */ - if ((ISAC_inst->initflag & 2) != 2) { - ISAC_inst->errorcode = ISAC_ENCODER_NOT_INITIATED; - return (-1); - } - - /* Check that we are in channel-adaptive mode, otherwise, return -1 */ - if (ISAC_inst->CodingMode != 0) { - ISAC_inst->errorcode = ISAC_MODE_MISMATCH; - return (-1); - } - - /* Set struct variable if enforceFrameSize is set. ISAC will then keep the */ - /* chosen frame size. */ - ISAC_inst->ISACenc_obj.enforceFrameSize = (enforceFrameSize != 0)? 1:0; - - /* Set initial rate, if value between 10000 and 32000, */ - /* if rateBPS is 0, keep the default initial bottleneck value (15000) */ - if ((rateBPS >= 10000) && (rateBPS <= 32000)) { - ISAC_inst->bwestimator_obj.sendBwAvg = (((uint32_t)rateBPS) << 7); - } else if (rateBPS != 0) { - ISAC_inst->errorcode = ISAC_DISALLOWED_BOTTLENECK; - return -1; - } - - /* Set initial framesize. If enforceFrameSize is set the frame size will not change */ - if ((frameSizeMs == 30) || (frameSizeMs == 60)) { - ISAC_inst->ISACenc_obj.new_framelength = (int16_t)((FS/1000) * frameSizeMs); - } else { - ISAC_inst->errorcode = ISAC_DISALLOWED_FRAME_LENGTH; - return -1; - } - - return 0; -} - - - - - -/**************************************************************************** - * WebRtcIsacfix_GetDownLinkBwIndex(...) - * - * This function returns index representing the Bandwidth estimate from - * other side to this side. - * - * Input: - * - ISAC_main_inst: iSAC struct - * - * Output: - * - rateIndex : Bandwidth estimate to transmit to other side. - * - */ - -int16_t WebRtcIsacfix_GetDownLinkBwIndex(ISACFIX_MainStruct* ISAC_main_inst, - int16_t* rateIndex) -{ - ISACFIX_SubStruct *ISAC_inst; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Call function to get Bandwidth Estimate */ - *rateIndex = WebRtcIsacfix_GetDownlinkBwIndexImpl(&ISAC_inst->bwestimator_obj); - - return 0; -} - - -/**************************************************************************** - * WebRtcIsacfix_UpdateUplinkBw(...) - * - * This function takes an index representing the Bandwidth estimate from - * this side to other side and updates BWE. - * - * Input: - * - ISAC_main_inst: iSAC struct - * - rateIndex : Bandwidth estimate from other side. - * - */ - -int16_t WebRtcIsacfix_UpdateUplinkBw(ISACFIX_MainStruct* ISAC_main_inst, - int16_t rateIndex) -{ - int16_t err = 0; - ISACFIX_SubStruct *ISAC_inst; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Call function to update BWE with received Bandwidth Estimate */ - err = WebRtcIsacfix_UpdateUplinkBwRec(&ISAC_inst->bwestimator_obj, rateIndex); - if (err < 0) { - ISAC_inst->errorcode = -err; - return (-1); - } - - return 0; -} - -/**************************************************************************** - * WebRtcIsacfix_ReadFrameLen(...) - * - * This function returns the length of the frame represented in the packet. - * - * Input: - * - encoded : Encoded bitstream - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - */ - -int16_t WebRtcIsacfix_ReadFrameLen(const uint8_t* encoded, - size_t encoded_len_bytes, - size_t* frameLength) -{ - Bitstr_dec streamdata; - int16_t err; - const size_t kRequiredEncodedLenBytes = 10; - - if (encoded_len_bytes < kRequiredEncodedLenBytes) { - return -1; - } - - InitializeDecoderBitstream(encoded_len_bytes, &streamdata); - - read_be16(encoded, kRequiredEncodedLenBytes, streamdata.stream); - - /* decode frame length */ - err = WebRtcIsacfix_DecodeFrameLen(&streamdata, frameLength); - if (err<0) // error check - return err; - - return 0; -} - - -/**************************************************************************** - * WebRtcIsacfix_ReadBwIndex(...) - * - * This function returns the index of the Bandwidth estimate from the bitstream. - * - * Input: - * - encoded : Encoded bitstream - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - rateIndex : Bandwidth estimate in bitstream - * - */ - -int16_t WebRtcIsacfix_ReadBwIndex(const uint8_t* encoded, - size_t encoded_len_bytes, - int16_t* rateIndex) -{ - Bitstr_dec streamdata; - int16_t err; - const size_t kRequiredEncodedLenBytes = 10; - - if (encoded_len_bytes < kRequiredEncodedLenBytes) { - return -1; - } - - InitializeDecoderBitstream(encoded_len_bytes, &streamdata); - - read_be16(encoded, kRequiredEncodedLenBytes, streamdata.stream); - - /* decode frame length, needed to get to the rateIndex in the bitstream */ - size_t frameLength; - err = WebRtcIsacfix_DecodeFrameLen(&streamdata, &frameLength); - if (err<0) // error check - return err; - - /* decode BW estimation */ - err = WebRtcIsacfix_DecodeSendBandwidth(&streamdata, rateIndex); - if (err<0) // error check - return err; - - return 0; -} - - - - -/**************************************************************************** - * WebRtcIsacfix_GetErrorCode(...) - * - * This function can be used to check the error code of an iSAC instance. When - * a function returns -1 a error code will be set for that instance. The - * function below extract the code of the last error that occured in the - * specified instance. - * - * Input: - * - ISAC_main_inst : ISAC instance - * - * Return value : Error code - */ - -int16_t WebRtcIsacfix_GetErrorCode(ISACFIX_MainStruct *ISAC_main_inst) -{ - ISACFIX_SubStruct *ISAC_inst; - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - return ISAC_inst->errorcode; -} - - - -/**************************************************************************** - * WebRtcIsacfix_GetUplinkBw(...) - * - * This function returns the inst quantized iSAC send bitrate - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : bitrate - */ - -int32_t WebRtcIsacfix_GetUplinkBw(ISACFIX_MainStruct *ISAC_main_inst) -{ - ISACFIX_SubStruct *ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - BwEstimatorstr * bw = (BwEstimatorstr*)&(ISAC_inst->bwestimator_obj); - - return (int32_t) WebRtcIsacfix_GetUplinkBandwidth(bw); -} - -/**************************************************************************** - * WebRtcIsacfix_GetNewFrameLen(...) - * - * This function return the next frame length (in samples) of iSAC. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : frame lenght in samples - */ - -int16_t WebRtcIsacfix_GetNewFrameLen(ISACFIX_MainStruct *ISAC_main_inst) -{ - ISACFIX_SubStruct *ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - return ISAC_inst->ISACenc_obj.new_framelength; -} - - -/**************************************************************************** - * WebRtcIsacfix_SetMaxPayloadSize(...) - * - * This function sets a limit for the maximum payload size of iSAC. The same - * value is used both for 30 and 60 msec packets. - * The absolute max will be valid until next time the function is called. - * NOTE! This function may override the function WebRtcIsacfix_SetMaxRate() - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxPayloadBytes : maximum size of the payload in bytes - * valid values are between 100 and 400 bytes - * - * - * Return value : 0 if sucessful - * -1 if error happens - */ - -int16_t WebRtcIsacfix_SetMaxPayloadSize(ISACFIX_MainStruct *ISAC_main_inst, - int16_t maxPayloadBytes) -{ - ISACFIX_SubStruct *ISAC_inst; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - if((maxPayloadBytes < 100) || (maxPayloadBytes > 400)) - { - /* maxPayloadBytes is out of valid range */ - return -1; - } - else - { - /* Set new absolute max, which will not change unless this function - is called again with a new value */ - ISAC_inst->ISACenc_obj.maxPayloadBytes = maxPayloadBytes; - - /* Set new maximum values for 30 and 60 msec packets */ - if (maxPayloadBytes < ISAC_inst->ISACenc_obj.maxRateInBytes) { - ISAC_inst->ISACenc_obj.payloadLimitBytes30 = maxPayloadBytes; - } else { - ISAC_inst->ISACenc_obj.payloadLimitBytes30 = ISAC_inst->ISACenc_obj.maxRateInBytes; - } - - if ( maxPayloadBytes < (ISAC_inst->ISACenc_obj.maxRateInBytes << 1)) { - ISAC_inst->ISACenc_obj.payloadLimitBytes60 = maxPayloadBytes; - } else { - ISAC_inst->ISACenc_obj.payloadLimitBytes60 = (ISAC_inst->ISACenc_obj.maxRateInBytes << 1); - } - } - return 0; -} - - -/**************************************************************************** - * WebRtcIsacfix_SetMaxRate(...) - * - * This function sets the maximum rate which the codec may not exceed for a - * singel packet. The maximum rate is set in bits per second. - * The codec has an absolute maximum rate of 53400 bits per second (200 bytes - * per 30 msec). - * It is possible to set a maximum rate between 32000 and 53400 bits per second. - * - * The rate limit is valid until next time the function is called. - * - * NOTE! Packet size will never go above the value set if calling - * WebRtcIsacfix_SetMaxPayloadSize() (default max packet size is 400 bytes). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxRateInBytes : maximum rate in bits per second, - * valid values are 32000 to 53400 bits - * - * Return value : 0 if sucessful - * -1 if error happens - */ - -int16_t WebRtcIsacfix_SetMaxRate(ISACFIX_MainStruct *ISAC_main_inst, - int32_t maxRate) -{ - ISACFIX_SubStruct *ISAC_inst; - int16_t maxRateInBytes; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - if((maxRate < 32000) || (maxRate > 53400)) - { - /* maxRate is out of valid range */ - return -1; - } - else - { - /* Calculate maximum number of bytes per 30 msec packets for the given - maximum rate. Multiply with 30/1000 to get number of bits per 30 msec, - divide by 8 to get number of bytes per 30 msec: - maxRateInBytes = floor((maxRate * 30/1000) / 8); */ - maxRateInBytes = (int16_t)( WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_MUL(maxRate, 3), 800) ); - - /* Store the value for usage in the WebRtcIsacfix_SetMaxPayloadSize-function */ - ISAC_inst->ISACenc_obj.maxRateInBytes = maxRateInBytes; - - /* For 30 msec packets: if the new limit is below the maximum - payload size, set a new limit */ - if (maxRateInBytes < ISAC_inst->ISACenc_obj.maxPayloadBytes) { - ISAC_inst->ISACenc_obj.payloadLimitBytes30 = maxRateInBytes; - } else { - ISAC_inst->ISACenc_obj.payloadLimitBytes30 = ISAC_inst->ISACenc_obj.maxPayloadBytes; - } - - /* For 60 msec packets: if the new limit (times 2) is below the - maximum payload size, set a new limit */ - if ( (maxRateInBytes << 1) < ISAC_inst->ISACenc_obj.maxPayloadBytes) { - ISAC_inst->ISACenc_obj.payloadLimitBytes60 = (maxRateInBytes << 1); - } else { - ISAC_inst->ISACenc_obj.payloadLimitBytes60 = ISAC_inst->ISACenc_obj.maxPayloadBytes; - } - } - - return 0; -} - - - -/**************************************************************************** - * WebRtcIsacfix_version(...) - * - * This function returns the version number. - * - * Output: - * - version : Pointer to character string - * - */ - -void WebRtcIsacfix_version(char *version) -{ - strcpy(version, "3.6.0"); -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice.c deleted file mode 100644 index 7bbf4e054a90..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice.c +++ /dev/null @@ -1,321 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lattice.c - * - * Contains the normalized lattice filter routines (MA and AR) for iSAC codec - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "rtc_base/sanitizer.h" - -#define LATTICE_MUL_32_32_RSFT16(a32a, a32b, b32) \ - ((int32_t)(WEBRTC_SPL_MUL(a32a, b32) + (WEBRTC_SPL_MUL_16_32_RSFT16(a32b, b32)))) -/* This macro is FORBIDDEN to use elsewhere than in a function in this file and - its corresponding neon version. It might give unpredictable results, since a - general int32_t*int32_t multiplication results in a 64 bit value. - The result is then shifted just 16 steps to the right, giving need for 48 - bits, i.e. in the generel case, it will NOT fit in a int32_t. In the - cases used in here, the int32_t will be enough, since (for a good - reason) the involved multiplicands aren't big enough to overflow a - int32_t after shifting right 16 bits. I have compared the result of a - multiplication between t32 and tmp32, done in two ways: - 1) Using (int32_t) (((float)(tmp32))*((float)(tmp32b))/65536.0); - 2) Using LATTICE_MUL_32_32_RSFT16(t16a, t16b, tmp32b); - By running 25 files, I haven't found any bigger diff than 64 - this was in the - case when method 1) gave 650235648 and 2) gave 650235712. -*/ - -/* Function prototype: filtering ar_g_Q0[] and ar_f_Q0[] through an AR filter - with coefficients cth_Q15[] and sth_Q15[]. - Implemented for both generic and ARMv7 platforms. - */ -void WebRtcIsacfix_FilterArLoop(int16_t* ar_g_Q0, - int16_t* ar_f_Q0, - int16_t* cth_Q15, - int16_t* sth_Q15, - size_t order_coef); - -/* Inner loop used for function WebRtcIsacfix_NormLatticeFilterMa(). It does: - for 0 <= n < HALF_SUBFRAMELEN - 1: - *ptr2 = input2 * (*ptr2) + input0 * (*ptr0)); - *ptr1 = input1 * (*ptr0) + input0 * (*ptr2); - Note, function WebRtcIsacfix_FilterMaLoopNeon and WebRtcIsacfix_FilterMaLoopC - are not bit-exact. The accuracy by the ARM Neon function is same or better. -*/ -void WebRtcIsacfix_FilterMaLoopC(int16_t input0, // Filter coefficient - int16_t input1, // Filter coefficient - int32_t input2, // Inverse coeff. (1/input1) - int32_t* ptr0, // Sample buffer - int32_t* ptr1, // Sample buffer - int32_t* ptr2) { // Sample buffer - int n = 0; - - // Separate the 32-bit variable input2 into two 16-bit integers (high 16 and - // low 16 bits), for using LATTICE_MUL_32_32_RSFT16 in the loop. - int16_t t16a = (int16_t)(input2 >> 16); - int16_t t16b = (int16_t)input2; - if (t16b < 0) t16a++; - - // The loop filtering the samples *ptr0, *ptr1, *ptr2 with filter coefficients - // input0, input1, and input2. - for(n = 0; n < HALF_SUBFRAMELEN - 1; n++, ptr0++, ptr1++, ptr2++) { - int32_t tmp32a = 0; - int32_t tmp32b = 0; - - // Calculate *ptr2 = input2 * (*ptr2 + input0 * (*ptr0)); - tmp32a = WEBRTC_SPL_MUL_16_32_RSFT15(input0, *ptr0); // Q15 * Q15 >> 15 = Q15 - tmp32b = *ptr2 + tmp32a; // Q15 + Q15 = Q15 - *ptr2 = LATTICE_MUL_32_32_RSFT16(t16a, t16b, tmp32b); - - // Calculate *ptr1 = input1 * (*ptr0) + input0 * (*ptr2); - tmp32a = WEBRTC_SPL_MUL_16_32_RSFT15(input1, *ptr0); // Q15*Q15>>15 = Q15 - tmp32b = WEBRTC_SPL_MUL_16_32_RSFT15(input0, *ptr2); // Q15*Q15>>15 = Q15 - *ptr1 = tmp32a + tmp32b; // Q15 + Q15 = Q15 - } -} - -/* filter the signal using normalized lattice filter */ -/* MA filter */ -void WebRtcIsacfix_NormLatticeFilterMa(size_t orderCoef, - int32_t *stateGQ15, - int16_t *lat_inQ0, - int16_t *filt_coefQ15, - int32_t *gain_lo_hiQ17, - int16_t lo_hi, - int16_t *lat_outQ9) -{ - int16_t sthQ15[MAX_AR_MODEL_ORDER]; - int16_t cthQ15[MAX_AR_MODEL_ORDER]; - - int u, n; - size_t i, k; - int16_t temp2,temp3; - size_t ord_1 = orderCoef+1; - int32_t inv_cthQ16[MAX_AR_MODEL_ORDER]; - - int32_t gain32, fQtmp; - int16_t gain16; - int16_t gain_sh; - - int32_t tmp32, tmp32b; - int32_t fQ15vec[HALF_SUBFRAMELEN]; - int32_t gQ15[MAX_AR_MODEL_ORDER+1][HALF_SUBFRAMELEN]; - int16_t sh; - int16_t t16a; - int16_t t16b; - - for (u=0;u>15 = Q(17+gain_sh) - inv_cthQ16[k] = WebRtcSpl_DivW32W16((int32_t)2147483647, cthQ15[k]); // 1/cth[k] in Q31/Q15 = Q16 - } - gain16 = (int16_t)(gain32 >> 16); // Q(1+gain_sh). - - /* normalized lattice filter */ - /*****************************/ - - /* initial conditions */ - for (i=0;i>15 = Q15 - tmp32b= fQtmp + tmp32; //Q15+Q15=Q15 - tmp32 = inv_cthQ16[i-1]; //Q16 - t16a = (int16_t)(tmp32 >> 16); - t16b = (int16_t)(tmp32 - (t16a << 16)); - if (t16b<0) t16a++; - tmp32 = LATTICE_MUL_32_32_RSFT16(t16a, t16b, tmp32b); - fQtmp = tmp32; // Q15 - - // Calculate g[i][0] = cth[i-1]*stateG[i-1] + sth[i-1]* f[i][0]; - tmp32 = WEBRTC_SPL_MUL_16_32_RSFT15(cthQ15[i-1], stateGQ15[i-1]); //Q15*Q15>>15 = Q15 - tmp32b = WEBRTC_SPL_MUL_16_32_RSFT15(sthQ15[i-1], fQtmp); //Q15*Q15>>15 = Q15 - tmp32 = tmp32 + tmp32b;//Q15+Q15 = Q15 - gQ15[i][0] = tmp32; // Q15 - } - - /* filtering */ - /* save the states */ - for(k=0;k>= gain_sh; // Q(17+gain_sh) -> Q17 - tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(gain16, fQ15vec[n]); //Q(1+gain_sh)*Q15>>16 = Q(gain_sh) - sh = 9-gain_sh; //number of needed shifts to reach Q9 - t16a = (int16_t) WEBRTC_SPL_SHIFT_W32(tmp32, sh); - lat_outQ9[n + temp1] = t16a; - } - - /* save the states */ - for (i=0;i>15 = Q27 - } - - sh = WebRtcSpl_NormW32(tmp32); // tmp32 is the gain - den16 = (int16_t) WEBRTC_SPL_SHIFT_W32(tmp32, sh-16); //Q(27+sh-16) = Q(sh+11) (all 16 bits are value bits) - inv_gain32 = WebRtcSpl_DivW32W16((int32_t)2147483647, den16); // 1/gain in Q31/Q(sh+11) = Q(20-sh) - - //initial conditions - inv_gain16 = (int16_t)(inv_gain32 >> 2); // 1/gain in Q(20-sh-2) = Q(18-sh) - - for (i=0;iQ26 - tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(inv_gain16, tmp32); //lat_in[]*inv_gain in (Q(18-sh)*Q26)>>16 = Q(28-sh) - tmp32 = WEBRTC_SPL_SHIFT_W32(tmp32, -(28-sh)); // lat_in[]*inv_gain in Q0 - - ARfQ0vec[i] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0 - } - - // Get the state of f & g for the first input, for all orders. - for (i = orderCoef; i > 0; i--) - { - tmp32 = (cthQ15[i - 1] * ARfQ0vec[0] - sthQ15[i - 1] * stateGQ0[i - 1] + - 16384) >> 15; - tmpAR = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0 - - tmp32 = (sthQ15[i - 1] * ARfQ0vec[0] + cthQ15[i - 1] * stateGQ0[i - 1] + - 16384) >> 15; - ARgQ0vec[i] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0 - ARfQ0vec[0] = tmpAR; - } - ARgQ0vec[0] = ARfQ0vec[0]; - - // Filter ARgQ0vec[] and ARfQ0vec[] through coefficients cthQ15[] and sthQ15[]. - WebRtcIsacfix_FilterArLoop(ARgQ0vec, ARfQ0vec, cthQ15, sthQ15, orderCoef); - - for(n=0;n 0; k--) - - ldrh r7, [r3, #-2]! @ sth_Q15[k - 1] - ldrh r6, [r2, #-2]! @ cth_Q15[k - 1] - - ldrh r8, [r0, #-2] @ ar_g_Q0[k - 1] - smlabb r11, r7, r5, r12 @ sth_Q15[k - 1] * tmpAR + 16384 - smlabb r10, r6, r5, r12 @ cth_Q15[k - 1] * tmpAR + 16384 - smulbb r7, r7, r8 @ sth_Q15[k - 1] * ar_g_Q0[k - 1] - smlabb r11, r6, r8, r11 @ cth_Q15[k - 1] * ar_g_Q0[k - 1] + - @ (sth_Q15[k - 1] * tmpAR + 16384) - - sub r10, r10, r7 @ cth_Q15[k - 1] * tmpAR + 16384 - - @ (sth_Q15[k - 1] * ar_g_Q0[k - 1]) - ssat r11, #16, r11, asr #15 - ssat r5, #16, r10, asr #15 - strh r11, [r0], #-2 @ Output: ar_g_Q0[k] - - subs r9, #1 - bgt ORDER_COEF_LOOP - - strh r5, [r0] @ Output: ar_g_Q0[0] = tmpAR; - strh r5, [r1], #2 @ Output: ar_f_Q0[n+1] = tmpAR; - - subs r4, #1 - bne HALF_SUBFRAME_LOOP - - pop {r4-r11} - bx lr diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice_c.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice_c.c deleted file mode 100644 index 43406612e81f..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice_c.c +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * Contains the core loop function for the lattice filter AR routine - * for iSAC codec. - * - */ - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/* Filter ar_g_Q0[] and ar_f_Q0[] through an AR filter with coefficients - * cth_Q15[] and sth_Q15[]. - */ -void WebRtcIsacfix_FilterArLoop(int16_t* ar_g_Q0, // Input samples - int16_t* ar_f_Q0, // Input samples - int16_t* cth_Q15, // Filter coefficients - int16_t* sth_Q15, // Filter coefficients - size_t order_coef) { // order of the filter - int n = 0; - - for (n = 0; n < HALF_SUBFRAMELEN - 1; n++) { - size_t k = 0; - int16_t tmpAR = 0; - int32_t tmp32 = 0; - int32_t tmp32_2 = 0; - - tmpAR = ar_f_Q0[n + 1]; - for (k = order_coef; k > 0; k--) { - tmp32 = (cth_Q15[k - 1] * tmpAR - sth_Q15[k - 1] * ar_g_Q0[k - 1] + - 16384) >> 15; - tmp32_2 = (sth_Q15[k - 1] * tmpAR + cth_Q15[k - 1] * ar_g_Q0[k - 1] + - 16384) >> 15; - tmpAR = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); - ar_g_Q0[k] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32_2); - } - ar_f_Q0[n + 1] = tmpAR; - ar_g_Q0[0] = tmpAR; - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c deleted file mode 100644 index 318972662916..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c +++ /dev/null @@ -1,329 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -// Filter ar_g_Q0[] and ar_f_Q0[] through an AR filter with coefficients -// cth_Q15[] and sth_Q15[]. -void WebRtcIsacfix_FilterArLoop(int16_t* ar_g_Q0, // Input samples - int16_t* ar_f_Q0, // Input samples - int16_t* cth_Q15, // Filter coefficients - int16_t* sth_Q15, // Filter coefficients - size_t order_coef) { // order of the filter - int n = 0; - - for (n = 0; n < HALF_SUBFRAMELEN - 1; n++) { - int count = (int)(order_coef - 1); - int offset; -#if !defined(MIPS_DSP_R1_LE) - int16_t* tmp_cth; - int16_t* tmp_sth; - int16_t* tmp_arg; - int32_t max_q16 = 0x7fff; - int32_t min_q16 = 0xffff8000; -#endif - // Declare variables used as temporary registers. - int32_t r0, r1, r2, t0, t1, t2, t_ar; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "bltz %[count], 2f \n\t" - " lh %[t_ar], 0(%[tmp]) \n\t" - // Inner loop - "1: \n\t" - "sll %[offset], %[count], 1 \n\t" -#if defined(MIPS_DSP_R1_LE) - "lhx %[r0], %[offset](%[cth_Q15]) \n\t" - "lhx %[r1], %[offset](%[sth_Q15]) \n\t" - "lhx %[r2], %[offset](%[ar_g_Q0]) \n\t" -#else - "addu %[tmp_cth], %[cth_Q15], %[offset] \n\t" - "addu %[tmp_sth], %[sth_Q15], %[offset] \n\t" - "addu %[tmp_arg], %[ar_g_Q0], %[offset] \n\t" - "lh %[r0], 0(%[tmp_cth]) \n\t" - "lh %[r1], 0(%[tmp_sth]) \n\t" - "lh %[r2], 0(%[tmp_arg]) \n\t" -#endif - "mul %[t0], %[r0], %[t_ar] \n\t" - "mul %[t1], %[r1], %[t_ar] \n\t" - "mul %[t2], %[r1], %[r2] \n\t" - "mul %[r0], %[r0], %[r2] \n\t" - "subu %[t0], %[t0], %[t2] \n\t" - "addu %[t1], %[t1], %[r0] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[t1], %[t1], 15 \n\t" - "shra_r.w %[t0], %[t0], 15 \n\t" -#else - "addiu %[t1], %[t1], 0x4000 \n\t" - "sra %[t1], %[t1], 15 \n\t" - "addiu %[t0], %[t0], 0x4000 \n\t" - "sra %[t0], %[t0], 15 \n\t" -#endif - "addiu %[offset], %[offset], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shll_s.w %[t1], %[t1], 16 \n\t" - "shll_s.w %[t_ar], %[t0], 16 \n\t" -#else - "slt %[r0], %[t1], %[max_q16] \n\t" - "slt %[r1], %[t0], %[max_q16] \n\t" - "movz %[t1], %[max_q16], %[r0] \n\t" - "movz %[t0], %[max_q16], %[r1] \n\t" -#endif - "addu %[offset], %[offset], %[ar_g_Q0] \n\t" -#if defined(MIPS_DSP_R1_LE) - "sra %[t1], %[t1], 16 \n\t" - "sra %[t_ar], %[t_ar], 16 \n\t" -#else - "slt %[r0], %[t1], %[min_q16] \n\t" - "slt %[r1], %[t0], %[min_q16] \n\t" - "movn %[t1], %[min_q16], %[r0] \n\t" - "movn %[t0], %[min_q16], %[r1] \n\t" - "addu %[t_ar], $zero, %[t0] \n\t" -#endif - "sh %[t1], 0(%[offset]) \n\t" - "bgtz %[count], 1b \n\t" - " addiu %[count], %[count], -1 \n\t" - "2: \n\t" - "sh %[t_ar], 0(%[tmp]) \n\t" - "sh %[t_ar], 0(%[ar_g_Q0]) \n\t" - ".set pop \n\t" - : [t_ar] "=&r" (t_ar), [count] "+r" (count), [offset] "=&r" (offset), - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [t0] "=&r" (t0), -#if !defined(MIPS_DSP_R1_LE) - [tmp_cth] "=&r" (tmp_cth), [tmp_sth] "=&r" (tmp_sth), - [tmp_arg] "=&r" (tmp_arg), -#endif - [t1] "=&r" (t1), [t2] "=&r" (t2) - : [tmp] "r" (&ar_f_Q0[n+1]), [cth_Q15] "r" (cth_Q15), -#if !defined(MIPS_DSP_R1_LE) - [max_q16] "r" (max_q16), [min_q16] "r" (min_q16), -#endif - [sth_Q15] "r" (sth_Q15), [ar_g_Q0] "r" (ar_g_Q0) - : "memory", "hi", "lo" - ); - } -} - -// MIPS optimization of the inner loop used for function -// WebRtcIsacfix_NormLatticeFilterMa(). It does: -// -// for 0 <= n < HALF_SUBFRAMELEN - 1: -// *ptr2 = input2 * (*ptr2) + input0 * (*ptr0)); -// *ptr1 = input1 * (*ptr0) + input0 * (*ptr2); -// -// Note, function WebRtcIsacfix_FilterMaLoopMIPS and WebRtcIsacfix_FilterMaLoopC -// are not bit-exact. The accuracy of the MIPS function is same or better. -void WebRtcIsacfix_FilterMaLoopMIPS(int16_t input0, // Filter coefficient - int16_t input1, // Filter coefficient - int32_t input2, // Inverse coeff (1/input1) - int32_t* ptr0, // Sample buffer - int32_t* ptr1, // Sample buffer - int32_t* ptr2) { // Sample buffer -#if defined(MIPS_DSP_R2_LE) - // MIPS DSPR2 version. 4 available accumulators allows loop unrolling 4 times. - // This variant is not bit-exact with WebRtcIsacfix_FilterMaLoopC, since we - // are exploiting 64-bit accumulators. The accuracy of the MIPS DSPR2 function - // is same or better. - int n = (HALF_SUBFRAMELEN - 1) >> 2; - int m = (HALF_SUBFRAMELEN - 1) & 3; - - int r0, r1, r2, r3; - int t0, t1, t2, t3; - int s0, s1, s2, s3; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "1: \n\t" - "lw %[r0], 0(%[ptr0]) \n\t" - "lw %[r1], 4(%[ptr0]) \n\t" - "lw %[r2], 8(%[ptr0]) \n\t" - "lw %[r3], 12(%[ptr0]) \n\t" - "mult $ac0, %[r0], %[input0] \n\t" - "mult $ac1, %[r1], %[input0] \n\t" - "mult $ac2, %[r2], %[input0] \n\t" - "mult $ac3, %[r3], %[input0] \n\t" - "lw %[t0], 0(%[ptr2]) \n\t" - "extr_rs.w %[s0], $ac0, 15 \n\t" - "extr_rs.w %[s1], $ac1, 15 \n\t" - "extr_rs.w %[s2], $ac2, 15 \n\t" - "extr_rs.w %[s3], $ac3, 15 \n\t" - "lw %[t1], 4(%[ptr2]) \n\t" - "lw %[t2], 8(%[ptr2]) \n\t" - "lw %[t3], 12(%[ptr2]) \n\t" - "addu %[t0], %[t0], %[s0] \n\t" - "addu %[t1], %[t1], %[s1] \n\t" - "addu %[t2], %[t2], %[s2] \n\t" - "addu %[t3], %[t3], %[s3] \n\t" - "mult $ac0, %[t0], %[input2] \n\t" - "mult $ac1, %[t1], %[input2] \n\t" - "mult $ac2, %[t2], %[input2] \n\t" - "mult $ac3, %[t3], %[input2] \n\t" - "addiu %[ptr0], %[ptr0], 16 \n\t" - "extr_rs.w %[t0], $ac0, 16 \n\t" - "extr_rs.w %[t1], $ac1, 16 \n\t" - "extr_rs.w %[t2], $ac2, 16 \n\t" - "extr_rs.w %[t3], $ac3, 16 \n\t" - "addiu %[n], %[n], -1 \n\t" - "mult $ac0, %[r0], %[input1] \n\t" - "mult $ac1, %[r1], %[input1] \n\t" - "mult $ac2, %[r2], %[input1] \n\t" - "mult $ac3, %[r3], %[input1] \n\t" - "sw %[t0], 0(%[ptr2]) \n\t" - "extr_rs.w %[s0], $ac0, 15 \n\t" - "extr_rs.w %[s1], $ac1, 15 \n\t" - "extr_rs.w %[s2], $ac2, 15 \n\t" - "extr_rs.w %[s3], $ac3, 15 \n\t" - "sw %[t1], 4(%[ptr2]) \n\t" - "sw %[t2], 8(%[ptr2]) \n\t" - "sw %[t3], 12(%[ptr2]) \n\t" - "mult $ac0, %[t0], %[input0] \n\t" - "mult $ac1, %[t1], %[input0] \n\t" - "mult $ac2, %[t2], %[input0] \n\t" - "mult $ac3, %[t3], %[input0] \n\t" - "addiu %[ptr2], %[ptr2], 16 \n\t" - "extr_rs.w %[t0], $ac0, 15 \n\t" - "extr_rs.w %[t1], $ac1, 15 \n\t" - "extr_rs.w %[t2], $ac2, 15 \n\t" - "extr_rs.w %[t3], $ac3, 15 \n\t" - "addu %[t0], %[t0], %[s0] \n\t" - "addu %[t1], %[t1], %[s1] \n\t" - "addu %[t2], %[t2], %[s2] \n\t" - "addu %[t3], %[t3], %[s3] \n\t" - "sw %[t0], 0(%[ptr1]) \n\t" - "sw %[t1], 4(%[ptr1]) \n\t" - "sw %[t2], 8(%[ptr1]) \n\t" - "sw %[t3], 12(%[ptr1]) \n\t" - "bgtz %[n], 1b \n\t" - " addiu %[ptr1], %[ptr1], 16 \n\t" - "beq %[m], %0, 3f \n\t" - " nop \n\t" - "2: \n\t" - "lw %[r0], 0(%[ptr0]) \n\t" - "lw %[t0], 0(%[ptr2]) \n\t" - "addiu %[ptr0], %[ptr0], 4 \n\t" - "mult $ac0, %[r0], %[input0] \n\t" - "mult $ac1, %[r0], %[input1] \n\t" - "extr_rs.w %[r1], $ac0, 15 \n\t" - "extr_rs.w %[t1], $ac1, 15 \n\t" - "addu %[t0], %[t0], %[r1] \n\t" - "mult $ac0, %[t0], %[input2] \n\t" - "extr_rs.w %[t0], $ac0, 16 \n\t" - "sw %[t0], 0(%[ptr2]) \n\t" - "mult $ac0, %[t0], %[input0] \n\t" - "addiu %[ptr2], %[ptr2], 4 \n\t" - "addiu %[m], %[m], -1 \n\t" - "extr_rs.w %[t0], $ac0, 15 \n\t" - "addu %[t0], %[t0], %[t1] \n\t" - "sw %[t0], 0(%[ptr1]) \n\t" - "bgtz %[m], 2b \n\t" - " addiu %[ptr1], %[ptr1], 4 \n\t" - "3: \n\t" - ".set pop \n\t" - : [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), - [r3] "=&r" (r3), [t0] "=&r" (t0), [t1] "=&r" (t1), - [t2] "=&r" (t2), [t3] "=&r" (t3), [s0] "=&r" (s0), - [s1] "=&r" (s1), [s2] "=&r" (s2), [s3] "=&r" (s3), - [ptr0] "+r" (ptr0), [ptr1] "+r" (ptr1), [m] "+r" (m), - [ptr2] "+r" (ptr2), [n] "+r" (n) - : [input0] "r" (input0), [input1] "r" (input1), - [input2] "r" (input2) - : "memory", "hi", "lo", "$ac1hi", "$ac1lo", "$ac2hi", - "$ac2lo", "$ac3hi", "$ac3lo" - ); -#else - // Non-DSPR2 version of the function. Avoiding the accumulator usage due to - // large latencies. This variant is bit-exact with C code. - int n = HALF_SUBFRAMELEN - 1; - int32_t t16a, t16b; - int32_t r0, r1, r2, r3, r4; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "sra %[t16a], %[input2], 16 \n\t" - "andi %[t16b], %[input2], 0xFFFF \n\t" -#if defined(MIPS32R2_LE) - "seh %[t16b], %[t16b] \n\t" - "seh %[input0], %[input0] \n\t" - "seh %[input1], %[input1] \n\t" -#else - "sll %[t16b], %[t16b], 16 \n\t" - "sra %[t16b], %[t16b], 16 \n\t" - "sll %[input0], %[input0], 16 \n\t" - "sra %[input0], %[input0], 16 \n\t" - "sll %[input1], %[input1], 16 \n\t" - "sra %[input1], %[input1], 16 \n\t" -#endif - "addiu %[r0], %[t16a], 1 \n\t" - "slt %[r1], %[t16b], $zero \n\t" - "movn %[t16a], %[r0], %[r1] \n\t" - "1: \n\t" - "lw %[r0], 0(%[ptr0]) \n\t" - "lw %[r1], 0(%[ptr2]) \n\t" - "addiu %[ptr0], %[ptr0], 4 \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "mul %[r3], %[r2], %[input0] \n\t" - "mul %[r4], %[r0], %[input0] \n\t" - "mul %[r2], %[r2], %[input1] \n\t" - "mul %[r0], %[r0], %[input1] \n\t" - "addiu %[ptr2], %[ptr2], 4 \n\t" - "sll %[r3], %[r3], 1 \n\t" - "sra %[r4], %[r4], 1 \n\t" - "addiu %[r4], %[r4], 0x2000 \n\t" - "sra %[r4], %[r4], 14 \n\t" - "addu %[r3], %[r3], %[r4] \n\t" - "addu %[r1], %[r1], %[r3] \n\t" - "sra %[r3], %[r1], 16 \n\t" - "andi %[r4], %[r1], 0xFFFF \n\t" - "sra %[r4], %[r4], 1 \n\t" - "mul %[r1], %[r1], %[t16a] \n\t" - "mul %[r3], %[r3], %[t16b] \n\t" - "mul %[r4], %[r4], %[t16b] \n\t" - "sll %[r2], %[r2], 1 \n\t" - "sra %[r0], %[r0], 1 \n\t" - "addiu %[r0], %[r0], 0x2000 \n\t" - "sra %[r0], %[r0], 14 \n\t" - "addu %[r0], %[r0], %[r2] \n\t" - "addiu %[n], %[n], -1 \n\t" - "addu %[r1], %[r1], %[r3] \n\t" - "addiu %[r4], %[r4], 0x4000 \n\t" - "sra %[r4], %[r4], 15 \n\t" - "addu %[r1], %[r1], %[r4] \n\t" - "sra %[r2], %[r1], 16 \n\t" - "andi %[r3], %[r1], 0xFFFF \n\t" - "mul %[r3], %[r3], %[input0] \n\t" - "mul %[r2], %[r2], %[input0] \n\t" - "sw %[r1], -4(%[ptr2]) \n\t" - "sra %[r3], %[r3], 1 \n\t" - "addiu %[r3], %[r3], 0x2000 \n\t" - "sra %[r3], %[r3], 14 \n\t" - "addu %[r0], %[r0], %[r3] \n\t" - "sll %[r2], %[r2], 1 \n\t" - "addu %[r0], %[r0], %[r2] \n\t" - "sw %[r0], 0(%[ptr1]) \n\t" - "bgtz %[n], 1b \n\t" - " addiu %[ptr1], %[ptr1], 4 \n\t" - ".set pop \n\t" - : [t16a] "=&r" (t16a), [t16b] "=&r" (t16b), [r0] "=&r" (r0), - [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [r4] "=&r" (r4), [ptr0] "+r" (ptr0), [ptr1] "+r" (ptr1), - [ptr2] "+r" (ptr2), [n] "+r" (n) - : [input0] "r" (input0), [input1] "r" (input1), - [input2] "r" (input2) - : "hi", "lo", "memory" - ); -#endif -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice_neon.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice_neon.c deleted file mode 100644 index 8ea9b63578c2..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice_neon.c +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -// Contains a function for the core loop in the normalized lattice MA -// filter routine for iSAC codec, optimized for ARM Neon platform. -// It does: -// for 0 <= n < HALF_SUBFRAMELEN - 1: -// *ptr2 = input2 * ((*ptr2) + input0 * (*ptr0)); -// *ptr1 = input1 * (*ptr0) + input0 * (*ptr2); -// Output is not bit-exact with the reference C code, due to the replacement -// of WEBRTC_SPL_MUL_16_32_RSFT15 and LATTICE_MUL_32_32_RSFT16 with Neon -// instructions. The difference should not be bigger than 1. -void WebRtcIsacfix_FilterMaLoopNeon(int16_t input0, // Filter coefficient - int16_t input1, // Filter coefficient - int32_t input2, // Inverse coefficient - int32_t* ptr0, // Sample buffer - int32_t* ptr1, // Sample buffer - int32_t* ptr2) // Sample buffer -{ - int n = 0; - int loop = (HALF_SUBFRAMELEN - 1) >> 3; - int loop_tail = (HALF_SUBFRAMELEN - 1) & 0x7; - - int32x4_t input0_v = vdupq_n_s32((int32_t)input0 << 16); - int32x4_t input1_v = vdupq_n_s32((int32_t)input1 << 16); - int32x4_t input2_v = vdupq_n_s32(input2); - int32x4_t tmp0a, tmp1a, tmp2a, tmp3a; - int32x4_t tmp0b, tmp1b, tmp2b, tmp3b; - int32x4_t ptr0va, ptr1va, ptr2va; - int32x4_t ptr0vb, ptr1vb, ptr2vb; - - int64x2_t tmp2al_low, tmp2al_high, tmp2bl_low, tmp2bl_high; - // Unroll to process 8 samples at once. - for (n = 0; n < loop; n++) { - ptr0va = vld1q_s32(ptr0); - ptr0vb = vld1q_s32(ptr0 + 4); - ptr0 += 8; - - ptr2va = vld1q_s32(ptr2); - ptr2vb = vld1q_s32(ptr2 + 4); - - // Calculate tmp0 = (*ptr0) * input0. - tmp0a = vqrdmulhq_s32(ptr0va, input0_v); - tmp0b = vqrdmulhq_s32(ptr0vb, input0_v); - - // Calculate tmp1 = (*ptr0) * input1. - tmp1a = vqrdmulhq_s32(ptr0va, input1_v); - tmp1b = vqrdmulhq_s32(ptr0vb, input1_v); - - // Calculate tmp2 = tmp0 + *(ptr2). - tmp2a = vaddq_s32(tmp0a, ptr2va); - tmp2b = vaddq_s32(tmp0b, ptr2vb); - - // Calculate *ptr2 = input2 * tmp2. - tmp2al_low = vmull_s32(vget_low_s32(tmp2a), vget_low_s32(input2_v)); -#if defined(WEBRTC_ARCH_ARM64) - tmp2al_high = vmull_high_s32(tmp2a, input2_v); -#else - tmp2al_high = vmull_s32(vget_high_s32(tmp2a), vget_high_s32(input2_v)); -#endif - ptr2va = vcombine_s32(vrshrn_n_s64(tmp2al_low, 16), - vrshrn_n_s64(tmp2al_high, 16)); - - tmp2bl_low = vmull_s32(vget_low_s32(tmp2b), vget_low_s32(input2_v)); -#if defined(WEBRTC_ARCH_ARM64) - tmp2bl_high = vmull_high_s32(tmp2b, input2_v); -#else - tmp2bl_high = vmull_s32(vget_high_s32(tmp2b), vget_high_s32(input2_v)); -#endif - ptr2vb = vcombine_s32(vrshrn_n_s64(tmp2bl_low, 16), - vrshrn_n_s64(tmp2bl_high, 16)); - - vst1q_s32(ptr2, ptr2va); - vst1q_s32(ptr2 + 4, ptr2vb); - ptr2 += 8; - - // Calculate tmp3 = ptr2v * input0. - tmp3a = vqrdmulhq_s32(ptr2va, input0_v); - tmp3b = vqrdmulhq_s32(ptr2vb, input0_v); - - // Calculate *ptr1 = tmp1 + tmp3. - ptr1va = vaddq_s32(tmp1a, tmp3a); - ptr1vb = vaddq_s32(tmp1b, tmp3b); - - vst1q_s32(ptr1, ptr1va); - vst1q_s32(ptr1 + 4, ptr1vb); - ptr1 += 8; - } - - // Process four more samples. - if (loop_tail & 0x4) { - ptr0va = vld1q_s32(ptr0); - ptr2va = vld1q_s32(ptr2); - ptr0 += 4; - - // Calculate tmp0 = (*ptr0) * input0. - tmp0a = vqrdmulhq_s32(ptr0va, input0_v); - - // Calculate tmp1 = (*ptr0) * input1. - tmp1a = vqrdmulhq_s32(ptr0va, input1_v); - - // Calculate tmp2 = tmp0 + *(ptr2). - tmp2a = vaddq_s32(tmp0a, ptr2va); - - // Calculate *ptr2 = input2 * tmp2. - tmp2al_low = vmull_s32(vget_low_s32(tmp2a), vget_low_s32(input2_v)); - -#if defined(WEBRTC_ARCH_ARM64) - tmp2al_high = vmull_high_s32(tmp2a, input2_v); -#else - tmp2al_high = vmull_s32(vget_high_s32(tmp2a), vget_high_s32(input2_v)); -#endif - ptr2va = vcombine_s32(vrshrn_n_s64(tmp2al_low, 16), - vrshrn_n_s64(tmp2al_high, 16)); - - vst1q_s32(ptr2, ptr2va); - ptr2 += 4; - - // Calculate tmp3 = *(ptr2) * input0. - tmp3a = vqrdmulhq_s32(ptr2va, input0_v); - - // Calculate *ptr1 = tmp1 + tmp3. - ptr1va = vaddq_s32(tmp1a, tmp3a); - - vst1q_s32(ptr1, ptr1va); - ptr1 += 4; - } - - // Process two more samples. - if (loop_tail & 0x2) { - int32x2_t ptr0v_tail, ptr2v_tail, ptr1v_tail; - int32x2_t tmp0_tail, tmp1_tail, tmp2_tail, tmp3_tail; - int64x2_t tmp2l_tail; - ptr0v_tail = vld1_s32(ptr0); - ptr2v_tail = vld1_s32(ptr2); - ptr0 += 2; - - // Calculate tmp0 = (*ptr0) * input0. - tmp0_tail = vqrdmulh_s32(ptr0v_tail, vget_low_s32(input0_v)); - - // Calculate tmp1 = (*ptr0) * input1. - tmp1_tail = vqrdmulh_s32(ptr0v_tail, vget_low_s32(input1_v)); - - // Calculate tmp2 = tmp0 + *(ptr2). - tmp2_tail = vadd_s32(tmp0_tail, ptr2v_tail); - - // Calculate *ptr2 = input2 * tmp2. - tmp2l_tail = vmull_s32(tmp2_tail, vget_low_s32(input2_v)); - ptr2v_tail = vrshrn_n_s64(tmp2l_tail, 16); - - vst1_s32(ptr2, ptr2v_tail); - ptr2 += 2; - - // Calculate tmp3 = *(ptr2) * input0. - tmp3_tail = vqrdmulh_s32(ptr2v_tail, vget_low_s32(input0_v)); - - // Calculate *ptr1 = tmp1 + tmp3. - ptr1v_tail = vadd_s32(tmp1_tail, tmp3_tail); - - vst1_s32(ptr1, ptr1v_tail); - ptr1 += 2; - } - - // Process one more sample. - if (loop_tail & 0x1) { - int16_t t16a = (int16_t)(input2 >> 16); - int16_t t16b = (int16_t)input2; - if (t16b < 0) t16a++; - int32_t tmp32a; - int32_t tmp32b; - - // Calculate *ptr2 = input2 * (*ptr2 + input0 * (*ptr0)). - tmp32a = WEBRTC_SPL_MUL_16_32_RSFT15(input0, *ptr0); - tmp32b = *ptr2 + tmp32a; - *ptr2 = (int32_t)(WEBRTC_SPL_MUL(t16a, tmp32b) + - (WEBRTC_SPL_MUL_16_32_RSFT16(t16b, tmp32b))); - - // Calculate *ptr1 = input1 * (*ptr0) + input0 * (*ptr2). - tmp32a = WEBRTC_SPL_MUL_16_32_RSFT15(input1, *ptr0); - tmp32b = WEBRTC_SPL_MUL_16_32_RSFT15(input0, *ptr2); - *ptr1 = tmp32a + tmp32b; - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c deleted file mode 100644 index f151cd1c8865..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c +++ /dev/null @@ -1,949 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_masking_model.c - * - * LPC analysis and filtering functions - * - */ - -#include "lpc_masking_model.h" - -#include /* For LLONG_MAX and LLONG_MIN. */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/* The conversion is implemented by the step-down algorithm */ -void WebRtcSpl_AToK_JSK( - int16_t *a16, /* Q11 */ - int16_t useOrder, - int16_t *k16 /* Q15 */ - ) -{ - int m, k; - int32_t tmp32[MAX_AR_MODEL_ORDER]; - int32_t tmp32b; - int32_t tmp_inv_denum32; - int16_t tmp_inv_denum16; - - k16[useOrder-1] = a16[useOrder] << 4; // Q11<<4 => Q15 - - for (m=useOrder-1; m>0; m--) { - // (1 - k^2) in Q30 - tmp_inv_denum32 = 1073741823 - k16[m] * k16[m]; - tmp_inv_denum16 = (int16_t)(tmp_inv_denum32 >> 15); // (1 - k^2) in Q15. - - for (k=1; k<=m; k++) { - tmp32b = (a16[k] << 16) - ((k16[m] * a16[m - k + 1]) << 1); - - tmp32[k] = WebRtcSpl_DivW32W16(tmp32b, tmp_inv_denum16); //Q27/Q15 = Q12 - } - - for (k=1; k> 1); // Q12>>1 => Q11 - } - - tmp32[m] = WEBRTC_SPL_SAT(4092, tmp32[m], -4092); - k16[m - 1] = (int16_t)(tmp32[m] << 3); // Q12<<3 => Q15 - } - - return; -} - - - - - -int16_t WebRtcSpl_LevinsonW32_JSK( - int32_t *R, /* (i) Autocorrelation of length >= order+1 */ - int16_t *A, /* (o) A[0..order] LPC coefficients (Q11) */ - int16_t *K, /* (o) K[0...order-1] Reflection coefficients (Q15) */ - int16_t order /* (i) filter order */ - ) { - int16_t i, j; - int16_t R_hi[LEVINSON_MAX_ORDER+1], R_low[LEVINSON_MAX_ORDER+1]; - /* Aurocorr coefficients in high precision */ - int16_t A_hi[LEVINSON_MAX_ORDER+1], A_low[LEVINSON_MAX_ORDER+1]; - /* LPC coefficients in high precicion */ - int16_t A_upd_hi[LEVINSON_MAX_ORDER+1], A_upd_low[LEVINSON_MAX_ORDER+1]; - /* LPC coefficients for next iteration */ - int16_t K_hi, K_low; /* reflection coefficient in high precision */ - int16_t Alpha_hi, Alpha_low, Alpha_exp; /* Prediction gain Alpha in high precision - and with scale factor */ - int16_t tmp_hi, tmp_low; - int32_t temp1W32, temp2W32, temp3W32; - int16_t norm; - - /* Normalize the autocorrelation R[0]...R[order+1] */ - - norm = WebRtcSpl_NormW32(R[0]); - - for (i=order;i>=0;i--) { - temp1W32 = R[i] << norm; - /* Put R in hi and low format */ - R_hi[i] = (int16_t)(temp1W32 >> 16); - R_low[i] = (int16_t)((temp1W32 - ((int32_t)R_hi[i] << 16)) >> 1); - } - - /* K = A[1] = -R[1] / R[0] */ - - temp2W32 = (R_hi[1] << 16) + (R_low[1] << 1); /* R[1] in Q31 */ - temp3W32 = WEBRTC_SPL_ABS_W32(temp2W32); /* abs R[1] */ - temp1W32 = WebRtcSpl_DivW32HiLow(temp3W32, R_hi[0], R_low[0]); /* abs(R[1])/R[0] in Q31 */ - /* Put back the sign on R[1] */ - if (temp2W32 > 0) { - temp1W32 = -temp1W32; - } - - /* Put K in hi and low format */ - K_hi = (int16_t)(temp1W32 >> 16); - K_low = (int16_t)((temp1W32 - ((int32_t)K_hi << 16)) >> 1); - - /* Store first reflection coefficient */ - K[0] = K_hi; - - temp1W32 >>= 4; /* A[1] in Q27. */ - - /* Put A[1] in hi and low format */ - A_hi[1] = (int16_t)(temp1W32 >> 16); - A_low[1] = (int16_t)((temp1W32 - ((int32_t)A_hi[1] << 16)) >> 1); - - /* Alpha = R[0] * (1-K^2) */ - - temp1W32 = (((K_hi * K_low) >> 14) + K_hi * K_hi) << 1; /* = k^2 in Q31 */ - - temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32); /* Guard against <0 */ - temp1W32 = (int32_t)0x7fffffffL - temp1W32; /* temp1W32 = (1 - K[0]*K[0]) in Q31 */ - - /* Store temp1W32 = 1 - K[0]*K[0] on hi and low format */ - tmp_hi = (int16_t)(temp1W32 >> 16); - tmp_low = (int16_t)((temp1W32 - ((int32_t)tmp_hi << 16)) >> 1); - - /* Calculate Alpha in Q31 */ - temp1W32 = (R_hi[0] * tmp_hi + ((R_hi[0] * tmp_low) >> 15) + - ((R_low[0] * tmp_hi) >> 15)) << 1; - - /* Normalize Alpha and put it in hi and low format */ - - Alpha_exp = WebRtcSpl_NormW32(temp1W32); - temp1W32 <<= Alpha_exp; - Alpha_hi = (int16_t)(temp1W32 >> 16); - Alpha_low = (int16_t)((temp1W32 - ((int32_t)Alpha_hi<< 16)) >> 1); - - /* Perform the iterative calculations in the - Levinson Durbin algorithm */ - - for (i=2; i<=order; i++) - { - - /* ---- - \ - temp1W32 = R[i] + > R[j]*A[i-j] - / - ---- - j=1..i-1 - */ - - temp1W32 = 0; - - for(j=1; j> 15) + - ((R_low[j] * A_hi[i - j]) >> 15)) << 1); - } - - temp1W32 <<= 4; - temp1W32 += (R_hi[i] << 16) + (R_low[i] << 1); - - /* K = -temp1W32 / Alpha */ - temp2W32 = WEBRTC_SPL_ABS_W32(temp1W32); /* abs(temp1W32) */ - temp3W32 = WebRtcSpl_DivW32HiLow(temp2W32, Alpha_hi, Alpha_low); /* abs(temp1W32)/Alpha */ - - /* Put the sign of temp1W32 back again */ - if (temp1W32 > 0) { - temp3W32 = -temp3W32; - } - - /* Use the Alpha shifts from earlier to denormalize */ - norm = WebRtcSpl_NormW32(temp3W32); - if ((Alpha_exp <= norm)||(temp3W32==0)) { - temp3W32 <<= Alpha_exp; - } else { - if (temp3W32 > 0) - { - temp3W32 = (int32_t)0x7fffffffL; - } else - { - temp3W32 = (int32_t)0x80000000L; - } - } - - /* Put K on hi and low format */ - K_hi = (int16_t)(temp3W32 >> 16); - K_low = (int16_t)((temp3W32 - ((int32_t)K_hi << 16)) >> 1); - - /* Store Reflection coefficient in Q15 */ - K[i-1] = K_hi; - - /* Test for unstable filter. If unstable return 0 and let the - user decide what to do in that case - */ - - if ((int32_t)WEBRTC_SPL_ABS_W16(K_hi) > (int32_t)32740) { - return(-i); /* Unstable filter */ - } - - /* - Compute updated LPC coefficient: Anew[i] - Anew[j]= A[j] + K*A[i-j] for j=1..i-1 - Anew[i]= K - */ - - for(j=1; j> 15) + - ((K_low * A_hi[i - j]) >> 15)) << 1; // temp1W32 += K*A[i-j] in Q27. - - /* Put Anew in hi and low format */ - A_upd_hi[j] = (int16_t)(temp1W32 >> 16); - A_upd_low[j] = (int16_t)((temp1W32 - ((int32_t)A_upd_hi[j] << 16)) >> 1); - } - - temp3W32 >>= 4; /* temp3W32 = K in Q27 (Convert from Q31 to Q27) */ - - /* Store Anew in hi and low format */ - A_upd_hi[i] = (int16_t)(temp3W32 >> 16); - A_upd_low[i] = (int16_t)((temp3W32 - ((int32_t)A_upd_hi[i] << 16)) >> 1); - - /* Alpha = Alpha * (1-K^2) */ - - temp1W32 = (((K_hi * K_low) >> 14) + K_hi * K_hi) << 1; /* K*K in Q31 */ - - temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32); /* Guard against <0 */ - temp1W32 = (int32_t)0x7fffffffL - temp1W32; /* 1 - K*K in Q31 */ - - /* Convert 1- K^2 in hi and low format */ - tmp_hi = (int16_t)(temp1W32 >> 16); - tmp_low = (int16_t)((temp1W32 - ((int32_t)tmp_hi << 16)) >> 1); - - /* Calculate Alpha = Alpha * (1-K^2) in Q31 */ - temp1W32 = (Alpha_hi * tmp_hi + ((Alpha_hi * tmp_low) >> 15) + - ((Alpha_low * tmp_hi) >> 15)) << 1; - - /* Normalize Alpha and store it on hi and low format */ - - norm = WebRtcSpl_NormW32(temp1W32); - temp1W32 <<= norm; - - Alpha_hi = (int16_t)(temp1W32 >> 16); - Alpha_low = (int16_t)((temp1W32 - ((int32_t)Alpha_hi << 16)) >> 1); - - /* Update the total nomalization of Alpha */ - Alpha_exp = Alpha_exp + norm; - - /* Update A[] */ - - for(j=1; j<=i; j++) - { - A_hi[j] =A_upd_hi[j]; - A_low[j] =A_upd_low[j]; - } - } - - /* - Set A[0] to 1.0 and store the A[i] i=1...order in Q12 - (Convert from Q27 and use rounding) - */ - - A[0] = 2048; - - for(i=1; i<=order; i++) { - /* temp1W32 in Q27 */ - temp1W32 = (A_hi[i] << 16) + (A_low[i] << 1); - /* Round and store upper word */ - A[i] = (int16_t)((temp1W32 + 32768) >> 16); - } - return(1); /* Stable filters */ -} - - - - - -/* window */ -/* Matlab generation of floating point code: - * t = (1:256)/257; r = 1-(1-t).^.45; w = sin(r*pi).^3; w = w/sum(w); plot((1:256)/8, w); grid; - * for k=1:16, fprintf(1, '%.8f, ', w(k*16 + (-15:0))); fprintf(1, '\n'); end - * All values are multiplyed with 2^21 in fixed point code. - */ -static const int16_t kWindowAutocorr[WINLEN] = { - 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 5, 6, - 8, 10, 12, 14, 17, 20, 24, 28, 33, 38, 43, 49, - 56, 63, 71, 79, 88, 98, 108, 119, 131, 143, 157, 171, - 186, 202, 219, 237, 256, 275, 296, 318, 341, 365, 390, 416, - 444, 472, 502, 533, 566, 600, 635, 671, 709, 748, 789, 831, - 875, 920, 967, 1015, 1065, 1116, 1170, 1224, 1281, 1339, 1399, 1461, - 1525, 1590, 1657, 1726, 1797, 1870, 1945, 2021, 2100, 2181, 2263, 2348, - 2434, 2523, 2614, 2706, 2801, 2898, 2997, 3099, 3202, 3307, 3415, 3525, - 3637, 3751, 3867, 3986, 4106, 4229, 4354, 4481, 4611, 4742, 4876, 5012, - 5150, 5291, 5433, 5578, 5725, 5874, 6025, 6178, 6333, 6490, 6650, 6811, - 6974, 7140, 7307, 7476, 7647, 7820, 7995, 8171, 8349, 8529, 8711, 8894, - 9079, 9265, 9453, 9642, 9833, 10024, 10217, 10412, 10607, 10803, 11000, 11199, - 11398, 11597, 11797, 11998, 12200, 12401, 12603, 12805, 13008, 13210, 13412, 13614, - 13815, 14016, 14216, 14416, 14615, 14813, 15009, 15205, 15399, 15591, 15782, 15971, - 16157, 16342, 16524, 16704, 16881, 17056, 17227, 17395, 17559, 17720, 17877, 18030, - 18179, 18323, 18462, 18597, 18727, 18851, 18970, 19082, 19189, 19290, 19384, 19471, - 19551, 19623, 19689, 19746, 19795, 19835, 19867, 19890, 19904, 19908, 19902, 19886, - 19860, 19823, 19775, 19715, 19644, 19561, 19465, 19357, 19237, 19102, 18955, 18793, - 18618, 18428, 18223, 18004, 17769, 17518, 17252, 16970, 16672, 16357, 16025, 15677, - 15311, 14929, 14529, 14111, 13677, 13225, 12755, 12268, 11764, 11243, 10706, 10152, - 9583, 8998, 8399, 7787, 7162, 6527, 5883, 5231, 4576, 3919, 3265, 2620, - 1990, 1386, 825, 333 -}; - - -/* By using a hearing threshold level in dB of -28 dB (higher value gives more noise), - the H_T_H (in float) can be calculated as: - H_T_H = pow(10.0, 0.05 * (-28.0)) = 0.039810717055350 - In Q19, H_T_H becomes round(0.039810717055350*2^19) ~= 20872, i.e. - H_T_H = 20872/524288.0, and H_T_HQ19 = 20872; -*/ - - -/* The bandwidth expansion vectors are created from: - kPolyVecLo=[0.900000,0.810000,0.729000,0.656100,0.590490,0.531441,0.478297,0.430467,0.387420,0.348678,0.313811,0.282430]; - kPolyVecHi=[0.800000,0.640000,0.512000,0.409600,0.327680,0.262144]; - round(kPolyVecLo*32768) - round(kPolyVecHi*32768) -*/ -static const int16_t kPolyVecLo[12] = { - 29491, 26542, 23888, 21499, 19349, 17414, 15673, 14106, 12695, 11425, 10283, 9255 -}; -static const int16_t kPolyVecHi[6] = { - 26214, 20972, 16777, 13422, 10737, 8590 -}; - -static __inline int32_t log2_Q8_LPC( uint32_t x ) { - - int32_t zeros; - int16_t frac; - - zeros=WebRtcSpl_NormU32(x); - frac = (int16_t)(((x << zeros) & 0x7FFFFFFF) >> 23); - - /* log2(x) */ - return ((31 - zeros) << 8) + frac; -} - -static const int16_t kMulPitchGain = -25; /* 200/256 in Q5 */ -static const int16_t kChngFactor = 3523; /* log10(2)*10/4*0.4/1.4=log10(2)/1.4= 0.2150 in Q14 */ -static const int16_t kExp2 = 11819; /* 1/log(2) */ -const int kShiftLowerBand = 11; /* Shift value for lower band in Q domain. */ -const int kShiftHigherBand = 12; /* Shift value for higher band in Q domain. */ - -void WebRtcIsacfix_GetVars(const int16_t *input, const int16_t *pitchGains_Q12, - uint32_t *oldEnergy, int16_t *varscale) -{ - int k; - uint32_t nrgQ[4]; - int16_t nrgQlog[4]; - int16_t tmp16, chng1, chng2, chng3, chng4, tmp, chngQ, oldNrgQlog, pgQ, pg3; - int32_t expPg32; - int16_t expPg, divVal; - int16_t tmp16_1, tmp16_2; - - /* Calculate energies of first and second frame halfs */ - nrgQ[0]=0; - for (k = QLOOKAHEAD/2; k < (FRAMESAMPLES/4 + QLOOKAHEAD) / 2; k++) { - nrgQ[0] += (uint32_t)(input[k] * input[k]); - } - nrgQ[1]=0; - for ( ; k < (FRAMESAMPLES/2 + QLOOKAHEAD) / 2; k++) { - nrgQ[1] += (uint32_t)(input[k] * input[k]); - } - nrgQ[2]=0; - for ( ; k < (FRAMESAMPLES * 3 / 4 + QLOOKAHEAD) / 2; k++) { - nrgQ[2] += (uint32_t)(input[k] * input[k]); - } - nrgQ[3]=0; - for ( ; k < (FRAMESAMPLES + QLOOKAHEAD) / 2; k++) { - nrgQ[3] += (uint32_t)(input[k] * input[k]); - } - - for ( k=0; k<4; k++) { - nrgQlog[k] = (int16_t)log2_Q8_LPC(nrgQ[k]); /* log2(nrgQ) */ - } - oldNrgQlog = (int16_t)log2_Q8_LPC(*oldEnergy); - - /* Calculate average level change */ - chng1 = WEBRTC_SPL_ABS_W16(nrgQlog[3]-nrgQlog[2]); - chng2 = WEBRTC_SPL_ABS_W16(nrgQlog[2]-nrgQlog[1]); - chng3 = WEBRTC_SPL_ABS_W16(nrgQlog[1]-nrgQlog[0]); - chng4 = WEBRTC_SPL_ABS_W16(nrgQlog[0]-oldNrgQlog); - tmp = chng1+chng2+chng3+chng4; - chngQ = (int16_t)(tmp * kChngFactor >> 10); /* Q12 */ - chngQ += 2926; /* + 1.0/1.4 in Q12 */ - - /* Find average pitch gain */ - pgQ = 0; - for (k=0; k<4; k++) - { - pgQ += pitchGains_Q12[k]; - } - - pg3 = (int16_t)(pgQ * pgQ >> 11); // pgQ in Q(12+2)=Q14. Q14*Q14>>11 => Q17 - pg3 = (int16_t)(pgQ * pg3 >> 13); /* Q14*Q17>>13 =>Q18 */ - /* kMulPitchGain = -25 = -200 in Q-3. */ - pg3 = (int16_t)(pg3 * kMulPitchGain >> 5); // Q10 - tmp16=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(kExp2,pg3,13);/* Q13*Q10>>13 => Q10*/ - if (tmp16<0) { - tmp16_2 = (0x0400 | (tmp16 & 0x03FF)); - tmp16_1 = ((uint16_t)(tmp16 ^ 0xFFFF) >> 10) - 3; /* Gives result in Q14 */ - if (tmp16_1<0) - expPg = -(tmp16_2 << -tmp16_1); - else - expPg = -(tmp16_2 >> tmp16_1); - } else - expPg = (int16_t) -16384; /* 1 in Q14, since 2^0=1 */ - - expPg32 = (int32_t)expPg << 8; /* Q22 */ - divVal = WebRtcSpl_DivW32W16ResW16(expPg32, chngQ); /* Q22/Q12=Q10 */ - - tmp16=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(kExp2,divVal,13);/* Q13*Q10>>13 => Q10*/ - if (tmp16<0) { - tmp16_2 = (0x0400 | (tmp16 & 0x03FF)); - tmp16_1 = ((uint16_t)(tmp16 ^ 0xFFFF) >> 10) - 3; /* Gives result in Q14 */ - if (tmp16_1<0) - expPg = tmp16_2 << -tmp16_1; - else - expPg = tmp16_2 >> tmp16_1; - } else - expPg = (int16_t) 16384; /* 1 in Q14, since 2^0=1 */ - - *varscale = expPg-1; - *oldEnergy = nrgQ[3]; -} - - - -static __inline int16_t exp2_Q10_T(int16_t x) { // Both in and out in Q10 - - int16_t tmp16_1, tmp16_2; - - tmp16_2=(int16_t)(0x0400|(x&0x03FF)); - tmp16_1 = -(x >> 10); - if(tmp16_1>0) - return tmp16_2 >> tmp16_1; - else - return tmp16_2 << -tmp16_1; - -} - - -// Declare function pointers. -AutocorrFix WebRtcIsacfix_AutocorrFix; -CalculateResidualEnergy WebRtcIsacfix_CalculateResidualEnergy; - -/* This routine calculates the residual energy for LPC. - * Formula as shown in comments inside. - */ -int32_t WebRtcIsacfix_CalculateResidualEnergyC(int lpc_order, - int32_t q_val_corr, - int q_val_polynomial, - int16_t* a_polynomial, - int32_t* corr_coeffs, - int* q_val_residual_energy) { - int i = 0, j = 0; - int shift_internal = 0, shift_norm = 0; - int32_t tmp32 = 0, word32_high = 0, word32_low = 0, residual_energy = 0; - int64_t sum64 = 0, sum64_tmp = 0; - - for (i = 0; i <= lpc_order; i++) { - for (j = i; j <= lpc_order; j++) { - /* For the case of i == 0: residual_energy += - * a_polynomial[j] * corr_coeffs[i] * a_polynomial[j - i]; - * For the case of i != 0: residual_energy += - * a_polynomial[j] * corr_coeffs[i] * a_polynomial[j - i] * 2; - */ - - tmp32 = a_polynomial[j] * a_polynomial[j - i]; - /* tmp32 in Q(q_val_polynomial * 2). */ - if (i != 0) { - tmp32 <<= 1; - } - sum64_tmp = (int64_t)tmp32 * (int64_t)corr_coeffs[i]; - sum64_tmp >>= shift_internal; - - /* Test overflow and sum the result. */ - if(((sum64_tmp > 0 && sum64 > 0) && (LLONG_MAX - sum64 < sum64_tmp)) || - ((sum64_tmp < 0 && sum64 < 0) && (LLONG_MIN - sum64 > sum64_tmp))) { - /* Shift right for overflow. */ - shift_internal += 1; - sum64 >>= 1; - sum64 += sum64_tmp >> 1; - } else { - sum64 += sum64_tmp; - } - } - } - - word32_high = (int32_t)(sum64 >> 32); - word32_low = (int32_t)sum64; - - // Calculate the value of shifting (shift_norm) for the 64-bit sum. - if(word32_high != 0) { - shift_norm = 32 - WebRtcSpl_NormW32(word32_high); - residual_energy = (int32_t)(sum64 >> shift_norm); - } else { - if((word32_low & 0x80000000) != 0) { - shift_norm = 1; - residual_energy = (uint32_t)word32_low >> 1; - } else { - shift_norm = WebRtcSpl_NormW32(word32_low); - residual_energy = word32_low << shift_norm; - shift_norm = -shift_norm; - } - } - - /* Q(q_val_polynomial * 2) * Q(q_val_corr) >> shift_internal >> shift_norm - * = Q(q_val_corr - shift_internal - shift_norm + q_val_polynomial * 2) - */ - *q_val_residual_energy = q_val_corr - shift_internal - shift_norm - + q_val_polynomial * 2; - - return residual_energy; -} - -void WebRtcIsacfix_GetLpcCoef(int16_t *inLoQ0, - int16_t *inHiQ0, - MaskFiltstr_enc *maskdata, - int16_t snrQ10, - const int16_t *pitchGains_Q12, - int32_t *gain_lo_hiQ17, - int16_t *lo_coeffQ15, - int16_t *hi_coeffQ15) -{ - int k, n, ii; - int pos1, pos2; - int sh_lo, sh_hi, sh, ssh, shMem; - int16_t varscaleQ14; - - int16_t tmpQQlo, tmpQQhi; - int32_t tmp32; - int16_t tmp16,tmp16b; - - int16_t polyHI[ORDERHI+1]; - int16_t rcQ15_lo[ORDERLO], rcQ15_hi[ORDERHI]; - - - int16_t DataLoQ6[WINLEN], DataHiQ6[WINLEN]; - int32_t corrloQQ[ORDERLO+2]; - int32_t corrhiQQ[ORDERHI+1]; - int32_t corrlo2QQ[ORDERLO+1]; - int16_t scale; - int16_t QdomLO, QdomHI, newQdomHI, newQdomLO; - - int32_t res_nrgQQ; - int32_t sqrt_nrg; - - /* less-noise-at-low-frequencies factor */ - int16_t aaQ14; - - /* Multiplication with 1/sqrt(12) ~= 0.28901734104046 can be done by convertion to - Q15, i.e. round(0.28901734104046*32768) = 9471, and use 9471/32768.0 ~= 0.289032 - */ - int16_t snrq; - int shft; - - int16_t tmp16a; - int32_t tmp32a, tmp32b, tmp32c; - - int16_t a_LOQ11[ORDERLO+1]; - int16_t k_vecloQ15[ORDERLO]; - int16_t a_HIQ12[ORDERHI+1]; - int16_t k_vechiQ15[ORDERHI]; - - int16_t stab; - - snrq=snrQ10; - - /* SNR= C * 2 ^ (D * snrq) ; C=0.289, D=0.05*log2(10)=0.166 (~=172 in Q10)*/ - tmp16 = (int16_t)(snrq * 172 >> 10); // Q10 - tmp16b = exp2_Q10_T(tmp16); // Q10 - snrq = (int16_t)(tmp16b * 285 >> 10); // Q10 - - /* change quallevel depending on pitch gains and level fluctuations */ - WebRtcIsacfix_GetVars(inLoQ0, pitchGains_Q12, &(maskdata->OldEnergy), &varscaleQ14); - - /* less-noise-at-low-frequencies factor */ - /* Calculation of 0.35 * (0.5 + 0.5 * varscale) in fixpoint: - With 0.35 in Q16 (0.35 ~= 22938/65536.0 = 0.3500061) and varscaleQ14 in Q14, - we get Q16*Q14>>16 = Q14 - */ - aaQ14 = (int16_t)((22938 * (8192 + (varscaleQ14 >> 1)) + 32768) >> 16); - - /* Calculate tmp = (1.0 + aa*aa); in Q12 */ - tmp16 = (int16_t)(aaQ14 * aaQ14 >> 15); // Q14*Q14>>15 = Q13 - tmpQQlo = 4096 + (tmp16 >> 1); // Q12 + Q13>>1 = Q12. - - /* Calculate tmp = (1.0+aa) * (1.0+aa); */ - tmp16 = 8192 + (aaQ14 >> 1); // 1+a in Q13. - tmpQQhi = (int16_t)(tmp16 * tmp16 >> 14); // Q13*Q13>>14 = Q12 - - /* replace data in buffer by new look-ahead data */ - for (pos1 = 0; pos1 < QLOOKAHEAD; pos1++) { - maskdata->DataBufferLoQ0[pos1 + WINLEN - QLOOKAHEAD] = inLoQ0[pos1]; - } - - for (k = 0; k < SUBFRAMES; k++) { - - /* Update input buffer and multiply signal with window */ - for (pos1 = 0; pos1 < WINLEN - UPDATE/2; pos1++) { - maskdata->DataBufferLoQ0[pos1] = maskdata->DataBufferLoQ0[pos1 + UPDATE/2]; - maskdata->DataBufferHiQ0[pos1] = maskdata->DataBufferHiQ0[pos1 + UPDATE/2]; - DataLoQ6[pos1] = (int16_t)(maskdata->DataBufferLoQ0[pos1] * - kWindowAutocorr[pos1] >> 15); // Q0*Q21>>15 = Q6 - DataHiQ6[pos1] = (int16_t)(maskdata->DataBufferHiQ0[pos1] * - kWindowAutocorr[pos1] >> 15); // Q0*Q21>>15 = Q6 - } - pos2 = (int16_t)(k * UPDATE / 2); - for (n = 0; n < UPDATE/2; n++, pos1++) { - maskdata->DataBufferLoQ0[pos1] = inLoQ0[QLOOKAHEAD + pos2]; - maskdata->DataBufferHiQ0[pos1] = inHiQ0[pos2++]; - DataLoQ6[pos1] = (int16_t)(maskdata->DataBufferLoQ0[pos1] * - kWindowAutocorr[pos1] >> 15); // Q0*Q21>>15 = Q6 - DataHiQ6[pos1] = (int16_t)(maskdata->DataBufferHiQ0[pos1] * - kWindowAutocorr[pos1] >> 15); // Q0*Q21>>15 = Q6 - } - - /* Get correlation coefficients */ - /* The highest absolute value measured inside DataLo in the test set - For DataHi, corresponding value was 160. - - This means that it should be possible to represent the input values - to WebRtcSpl_AutoCorrelation() as Q6 values (since 307*2^6 = - 19648). Of course, Q0 will also work, but due to the low energy in - DataLo and DataHi, the outputted autocorrelation will be more accurate - and mimic the floating point code better, by being in an high as possible - Q-domain. - */ - - WebRtcIsacfix_AutocorrFix(corrloQQ,DataLoQ6,WINLEN, ORDERLO+1, &scale); - QdomLO = 12-scale; // QdomLO is the Q-domain of corrloQQ - sh_lo = WebRtcSpl_NormW32(corrloQQ[0]); - QdomLO += sh_lo; - for (ii=0; ii> 1) - - (WEBRTC_SPL_MUL_16_32_RSFT16(aaQ14, corrloQQ[1]) >> 2); - - /* Calculate corrlo2[n] = tmpQQlo * corrlo[n] - tmpQQlo * (corrlo[n-1] + corrlo[n+1]);*/ - for (n = 1; n <= ORDERLO; n++) { - - tmp32 = (corrloQQ[n - 1] >> 1) + (corrloQQ[n + 1] >> 1); // Q(QdomLO-1). - corrlo2QQ[n] = (WEBRTC_SPL_MUL_16_32_RSFT16(tmpQQlo, corrloQQ[n]) >> 1) - - (WEBRTC_SPL_MUL_16_32_RSFT16(aaQ14, tmp32) >> 2); - } - QdomLO -= 5; - - /* Calculate corrhi[n] = tmpQQhi * corrhi[n]; */ - for (n = 0; n <= ORDERHI; n++) { - corrhiQQ[n] = WEBRTC_SPL_MUL_16_32_RSFT16(tmpQQhi, corrhiQQ[n]); // Q(12+QdomHI-16) = Q(QdomHI-4) - } - QdomHI -= 4; - - /* add white noise floor */ - /* corrlo2QQ is in Q(QdomLO) and corrhiQQ is in Q(QdomHI) */ - /* Calculate corrlo2[0] += 9.5367431640625e-7; and - corrhi[0] += 9.5367431640625e-7, where the constant is 1/2^20 */ - - tmp32 = WEBRTC_SPL_SHIFT_W32((int32_t) 1, QdomLO-20); - corrlo2QQ[0] += tmp32; - tmp32 = WEBRTC_SPL_SHIFT_W32((int32_t) 1, QdomHI-20); - corrhiQQ[0] += tmp32; - - /* corrlo2QQ is in Q(QdomLO) and corrhiQQ is in Q(QdomHI) before the following - code segment, where we want to make sure we get a 1-bit margin */ - for (n = 0; n <= ORDERLO; n++) { - corrlo2QQ[n] >>= 1; // Make sure we have a 1-bit margin. - } - QdomLO -= 1; // Now, corrlo2QQ is in Q(QdomLO), with a 1-bit margin - - for (n = 0; n <= ORDERHI; n++) { - corrhiQQ[n] >>= 1; // Make sure we have a 1-bit margin. - } - QdomHI -= 1; // Now, corrhiQQ is in Q(QdomHI), with a 1-bit margin - - - newQdomLO = QdomLO; - - for (n = 0; n <= ORDERLO; n++) { - int32_t tmp, tmpB, tmpCorr; - int16_t alpha=328; //0.01 in Q15 - int16_t beta=324; //(1-0.01)*0.01=0.0099 in Q15 - int16_t gamma=32440; //(1-0.01)=0.99 in Q15 - - if (maskdata->CorrBufLoQQ[n] != 0) { - shMem=WebRtcSpl_NormW32(maskdata->CorrBufLoQQ[n]); - sh = QdomLO - maskdata->CorrBufLoQdom[n]; - if (sh<=shMem) { - tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufLoQQ[n], sh); // Get CorrBufLoQQ to same domain as corrlo2 - tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha, tmp); - } else if ((sh-shMem)<7){ - tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufLoQQ[n], shMem); // Shift up CorrBufLoQQ as much as possible - // Shift `alpha` the number of times required to get `tmp` in QdomLO. - tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha << (sh - shMem), tmp); - } else { - tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufLoQQ[n], shMem); // Shift up CorrBufHiQQ as much as possible - // Shift `alpha` as much as possible without overflow the number of - // times required to get `tmp` in QdomLO. - tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha << 6, tmp); - tmpCorr = corrloQQ[n] >> (sh - shMem - 6); - tmp = tmp + tmpCorr; - maskdata->CorrBufLoQQ[n] = tmp; - newQdomLO = QdomLO-(sh-shMem-6); - maskdata->CorrBufLoQdom[n] = newQdomLO; - } - } else - tmp = 0; - - tmp = tmp + corrlo2QQ[n]; - - maskdata->CorrBufLoQQ[n] = tmp; - maskdata->CorrBufLoQdom[n] = QdomLO; - - tmp=WEBRTC_SPL_MUL_16_32_RSFT15(beta, tmp); - tmpB=WEBRTC_SPL_MUL_16_32_RSFT15(gamma, corrlo2QQ[n]); - corrlo2QQ[n] = tmp + tmpB; - } - if( newQdomLO!=QdomLO) { - for (n = 0; n <= ORDERLO; n++) { - if (maskdata->CorrBufLoQdom[n] != newQdomLO) - corrloQQ[n] >>= maskdata->CorrBufLoQdom[n] - newQdomLO; - } - QdomLO = newQdomLO; - } - - - newQdomHI = QdomHI; - - for (n = 0; n <= ORDERHI; n++) { - int32_t tmp, tmpB, tmpCorr; - int16_t alpha=328; //0.01 in Q15 - int16_t beta=324; //(1-0.01)*0.01=0.0099 in Q15 - int16_t gamma=32440; //(1-0.01)=0.99 in Q1 - if (maskdata->CorrBufHiQQ[n] != 0) { - shMem=WebRtcSpl_NormW32(maskdata->CorrBufHiQQ[n]); - sh = QdomHI - maskdata->CorrBufHiQdom[n]; - if (sh<=shMem) { - tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufHiQQ[n], sh); // Get CorrBufHiQQ to same domain as corrhi - tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha, tmp); - tmpCorr = corrhiQQ[n]; - tmp = tmp + tmpCorr; - maskdata->CorrBufHiQQ[n] = tmp; - maskdata->CorrBufHiQdom[n] = QdomHI; - } else if ((sh-shMem)<7) { - tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufHiQQ[n], shMem); // Shift up CorrBufHiQQ as much as possible - // Shift `alpha` the number of times required to get `tmp` in QdomHI. - tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha << (sh - shMem), tmp); - tmpCorr = corrhiQQ[n]; - tmp = tmp + tmpCorr; - maskdata->CorrBufHiQQ[n] = tmp; - maskdata->CorrBufHiQdom[n] = QdomHI; - } else { - tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufHiQQ[n], shMem); // Shift up CorrBufHiQQ as much as possible - // Shift `alpha` as much as possible without overflow the number of - // times required to get `tmp` in QdomHI. - tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha << 6, tmp); - tmpCorr = corrhiQQ[n] >> (sh - shMem - 6); - tmp = tmp + tmpCorr; - maskdata->CorrBufHiQQ[n] = tmp; - newQdomHI = QdomHI-(sh-shMem-6); - maskdata->CorrBufHiQdom[n] = newQdomHI; - } - } else { - tmp = corrhiQQ[n]; - tmpCorr = tmp; - maskdata->CorrBufHiQQ[n] = tmp; - maskdata->CorrBufHiQdom[n] = QdomHI; - } - - tmp=WEBRTC_SPL_MUL_16_32_RSFT15(beta, tmp); - tmpB=WEBRTC_SPL_MUL_16_32_RSFT15(gamma, tmpCorr); - corrhiQQ[n] = tmp + tmpB; - } - - if( newQdomHI!=QdomHI) { - for (n = 0; n <= ORDERHI; n++) { - if (maskdata->CorrBufHiQdom[n] != newQdomHI) - corrhiQQ[n] >>= maskdata->CorrBufHiQdom[n] - newQdomHI; - } - QdomHI = newQdomHI; - } - - stab=WebRtcSpl_LevinsonW32_JSK(corrlo2QQ, a_LOQ11, k_vecloQ15, ORDERLO); - - if (stab<0) { // If unstable use lower order - a_LOQ11[0]=2048; - for (n = 1; n <= ORDERLO; n++) { - a_LOQ11[n]=0; - } - - stab=WebRtcSpl_LevinsonW32_JSK(corrlo2QQ, a_LOQ11, k_vecloQ15, 8); - } - - - WebRtcSpl_LevinsonDurbin(corrhiQQ, a_HIQ12, k_vechiQ15, ORDERHI); - - /* bandwidth expansion */ - for (n = 1; n <= ORDERLO; n++) { - a_LOQ11[n] = (int16_t)((kPolyVecLo[n - 1] * a_LOQ11[n] + (1 << 14)) >> - 15); - } - - - polyHI[0] = a_HIQ12[0]; - for (n = 1; n <= ORDERHI; n++) { - a_HIQ12[n] = (int16_t)(((int32_t)(kPolyVecHi[n - 1] * a_HIQ12[n]) + - (1 << 14)) >> 15); - polyHI[n] = a_HIQ12[n]; - } - - /* Normalize the corrlo2 vector */ - sh = WebRtcSpl_NormW32(corrlo2QQ[0]); - for (n = 0; n <= ORDERLO; n++) { - corrlo2QQ[n] <<= sh; - } - QdomLO += sh; /* Now, corrlo2QQ is still in Q(QdomLO) */ - - - /* residual energy */ - - sh_lo = 31; - res_nrgQQ = WebRtcIsacfix_CalculateResidualEnergy(ORDERLO, QdomLO, - kShiftLowerBand, a_LOQ11, corrlo2QQ, &sh_lo); - - /* Convert to reflection coefficients */ - WebRtcSpl_AToK_JSK(a_LOQ11, ORDERLO, rcQ15_lo); - - if (sh_lo & 0x0001) { - res_nrgQQ >>= 1; - sh_lo-=1; - } - - - if( res_nrgQQ > 0 ) - { - sqrt_nrg=WebRtcSpl_Sqrt(res_nrgQQ); - - /* add hearing threshold and compute the gain */ - /* lo_coeff = varscale * S_N_R / (sqrt_nrg + varscale * H_T_H); */ - - tmp32a = varscaleQ14 >> 1; // H_T_HQ19=65536 (16-17=-1) - ssh = sh_lo >> 1; // sqrt_nrg is in Qssh. - sh = ssh - 14; - tmp32b = WEBRTC_SPL_SHIFT_W32(tmp32a, sh); // Q14->Qssh - tmp32c = sqrt_nrg + tmp32b; // Qssh (denominator) - tmp32a = varscaleQ14 * snrq; // Q24 (numerator) - - sh = WebRtcSpl_NormW32(tmp32c); - shft = 16 - sh; - tmp16a = (int16_t) WEBRTC_SPL_SHIFT_W32(tmp32c, -shft); // Q(ssh-shft) (denominator) - - tmp32b = WebRtcSpl_DivW32W16(tmp32a, tmp16a); // Q(24-ssh+shft) - sh = ssh-shft-7; - *gain_lo_hiQ17 = WEBRTC_SPL_SHIFT_W32(tmp32b, sh); // Gains in Q17 - } - else - { - *gain_lo_hiQ17 = 100; // Gains in Q17 - } - gain_lo_hiQ17++; - - /* copy coefficients to output array */ - for (n = 0; n < ORDERLO; n++) { - *lo_coeffQ15 = (int16_t) (rcQ15_lo[n]); - lo_coeffQ15++; - } - /* residual energy */ - sh_hi = 31; - res_nrgQQ = WebRtcIsacfix_CalculateResidualEnergy(ORDERHI, QdomHI, - kShiftHigherBand, a_HIQ12, corrhiQQ, &sh_hi); - - /* Convert to reflection coefficients */ - WebRtcSpl_LpcToReflCoef(polyHI, ORDERHI, rcQ15_hi); - - if (sh_hi & 0x0001) { - res_nrgQQ >>= 1; - sh_hi-=1; - } - - - if( res_nrgQQ > 0 ) - { - sqrt_nrg=WebRtcSpl_Sqrt(res_nrgQQ); - - - /* add hearing threshold and compute the gain */ - /* hi_coeff = varscale * S_N_R / (sqrt_nrg + varscale * H_T_H); */ - - tmp32a = varscaleQ14 >> 1; // H_T_HQ19=65536 (16-17=-1) - - ssh = sh_hi >> 1; // `sqrt_nrg` is in Qssh. - sh = ssh - 14; - tmp32b = WEBRTC_SPL_SHIFT_W32(tmp32a, sh); // Q14->Qssh - tmp32c = sqrt_nrg + tmp32b; // Qssh (denominator) - tmp32a = varscaleQ14 * snrq; // Q24 (numerator) - - sh = WebRtcSpl_NormW32(tmp32c); - shft = 16 - sh; - tmp16a = (int16_t) WEBRTC_SPL_SHIFT_W32(tmp32c, -shft); // Q(ssh-shft) (denominator) - - tmp32b = WebRtcSpl_DivW32W16(tmp32a, tmp16a); // Q(24-ssh+shft) - sh = ssh-shft-7; - *gain_lo_hiQ17 = WEBRTC_SPL_SHIFT_W32(tmp32b, sh); // Gains in Q17 - } - else - { - *gain_lo_hiQ17 = 100; // Gains in Q17 - } - gain_lo_hiQ17++; - - - /* copy coefficients to output array */ - for (n = 0; n < ORDERHI; n++) { - *hi_coeffQ15 = rcQ15_hi[n]; - hi_coeffQ15++; - } - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h deleted file mode 100644 index 40a99e8a77a2..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_masking_model.h - * - * LPC functions - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_MASKING_MODEL_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_MASKING_MODEL_H_ - -#ifdef __cplusplus -extern "C" { -#endif - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -void WebRtcIsacfix_GetVars(const int16_t* input, - const int16_t* pitchGains_Q12, - uint32_t* oldEnergy, - int16_t* varscale); - -void WebRtcIsacfix_GetLpcCoef(int16_t* inLoQ0, - int16_t* inHiQ0, - MaskFiltstr_enc* maskdata, - int16_t snrQ10, - const int16_t* pitchGains_Q12, - int32_t* gain_lo_hiQ17, - int16_t* lo_coeffQ15, - int16_t* hi_coeffQ15); - -typedef int32_t (*CalculateResidualEnergy)(int lpc_order, - int32_t q_val_corr, - int q_val_polynomial, - int16_t* a_polynomial, - int32_t* corr_coeffs, - int* q_val_residual_energy); -extern CalculateResidualEnergy WebRtcIsacfix_CalculateResidualEnergy; - -int32_t WebRtcIsacfix_CalculateResidualEnergyC(int lpc_order, - int32_t q_val_corr, - int q_val_polynomial, - int16_t* a_polynomial, - int32_t* corr_coeffs, - int* q_val_residual_energy); - -#if defined(MIPS_DSP_R2_LE) -int32_t WebRtcIsacfix_CalculateResidualEnergyMIPS(int lpc_order, - int32_t q_val_corr, - int q_val_polynomial, - int16_t* a_polynomial, - int32_t* corr_coeffs, - int* q_val_residual_energy); -#endif - -#ifdef __cplusplus -} /* extern "C" */ -#endif - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_MASKING_MODEL_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model_mips.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model_mips.c deleted file mode 100644 index 727008da3294..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model_mips.c +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h" - -// MIPS DSPR2 optimization for function WebRtcIsacfix_CalculateResidualEnergy -// Bit-exact with WebRtcIsacfix_CalculateResidualEnergyC from file -// lpc_masking_model.c -int32_t WebRtcIsacfix_CalculateResidualEnergyMIPS(int lpc_order, - int32_t q_val_corr, - int q_val_polynomial, - int16_t* a_polynomial, - int32_t* corr_coeffs, - int* q_val_residual_energy) { - - int i = 0, j = 0; - int shift_internal = 0, shift_norm = 0; - int32_t tmp32 = 0, word32_high = 0, word32_low = 0, residual_energy = 0; - int32_t tmp_corr_c = corr_coeffs[0]; - int16_t* tmp_a_poly = &a_polynomial[0]; - int32_t sum64_hi = 0; - int32_t sum64_lo = 0; - - for (j = 0; j <= lpc_order; j++) { - // For the case of i == 0: - // residual_energy += - // a_polynomial[j] * corr_coeffs[i] * a_polynomial[j - i]; - - int32_t tmp2, tmp3; - int16_t sign_1; - int16_t sign_2; - int16_t sign_3; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "lh %[tmp2], 0(%[tmp_a_poly]) \n\t" - "mul %[tmp32], %[tmp2], %[tmp2] \n\t" - "addiu %[tmp_a_poly], %[tmp_a_poly], 2 \n\t" - "sra %[sign_2], %[sum64_hi], 31 \n\t" - "mult $ac0, %[tmp32], %[tmp_corr_c] \n\t" - "shilov $ac0, %[shift_internal] \n\t" - "mfhi %[tmp2], $ac0 \n\t" - "mflo %[tmp3], $ac0 \n\t" - "sra %[sign_1], %[tmp2], 31 \n\t" - "xor %[sign_3], %[sign_1], %[sign_2] \n\t" - ".set pop \n\t" - : [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), [tmp32] "=&r" (tmp32), - [tmp_a_poly] "+r" (tmp_a_poly), [sign_1] "=&r" (sign_1), - [sign_3] "=&r" (sign_3), [sign_2] "=&r" (sign_2), - [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : [tmp_corr_c] "r" (tmp_corr_c), [shift_internal] "r" (shift_internal) - : "hi", "lo", "memory" - ); - - if (sign_3 != 0) { - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addsc %[sum64_lo], %[sum64_lo], %[tmp3] \n\t" - "addwc %[sum64_hi], %[sum64_hi], %[tmp2] \n\t" - ".set pop \n\t" - : [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : [tmp2] "r" (tmp2), [tmp3] "r" (tmp3) - : "hi", "lo", "memory" - ); - } else { - if (((!(sign_1 || sign_2)) && (0x7FFFFFFF - sum64_hi < tmp2)) || - ((sign_1 && sign_2) && (sum64_hi + tmp2 > 0))) { - // Shift right for overflow. - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[shift_internal], %[shift_internal], 1 \n\t" - "prepend %[sum64_lo], %[sum64_hi], 1 \n\t" - "sra %[sum64_hi], %[sum64_hi], 1 \n\t" - "prepend %[tmp3], %[tmp2], 1 \n\t" - "sra %[tmp2], %[tmp2], 1 \n\t" - "addsc %[sum64_lo], %[sum64_lo], %[tmp3] \n\t" - "addwc %[sum64_hi], %[sum64_hi], %[tmp2] \n\t" - ".set pop \n\t" - : [tmp2] "+r" (tmp2), [tmp3] "+r" (tmp3), - [shift_internal] "+r" (shift_internal), - [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : - : "hi", "lo", "memory" - ); - } else { - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addsc %[sum64_lo], %[sum64_lo], %[tmp3] \n\t" - "addwc %[sum64_hi], %[sum64_hi], %[tmp2] \n\t" - ".set pop \n\t" - : [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : [tmp2] "r" (tmp2), [tmp3] "r" (tmp3) - : "hi", "lo", "memory" - ); - } - } - } - - for (i = 1; i <= lpc_order; i++) { - tmp_corr_c = corr_coeffs[i]; - int16_t* tmp_a_poly_j = &a_polynomial[i]; - int16_t* tmp_a_poly_j_i = &a_polynomial[0]; - for (j = i; j <= lpc_order; j++) { - // For the case of i = 1 .. lpc_order: - // residual_energy += - // a_polynomial[j] * corr_coeffs[i] * a_polynomial[j - i] * 2; - - int32_t tmp2, tmp3; - int16_t sign_1; - int16_t sign_2; - int16_t sign_3; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "lh %[tmp3], 0(%[tmp_a_poly_j]) \n\t" - "lh %[tmp2], 0(%[tmp_a_poly_j_i]) \n\t" - "addiu %[tmp_a_poly_j], %[tmp_a_poly_j], 2 \n\t" - "addiu %[tmp_a_poly_j_i], %[tmp_a_poly_j_i], 2 \n\t" - "mul %[tmp32], %[tmp3], %[tmp2] \n\t" - "sll %[tmp32], %[tmp32], 1 \n\t" - "mult $ac0, %[tmp32], %[tmp_corr_c] \n\t" - "shilov $ac0, %[shift_internal] \n\t" - "mfhi %[tmp2], $ac0 \n\t" - "mflo %[tmp3], $ac0 \n\t" - "sra %[sign_1], %[tmp2], 31 \n\t" - "sra %[sign_2], %[sum64_hi], 31 \n\t" - "xor %[sign_3], %[sign_1], %[sign_2] \n\t" - ".set pop \n\t" - : [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), [tmp32] "=&r" (tmp32), - [tmp_a_poly_j] "+r" (tmp_a_poly_j), [sign_1] "=&r" (sign_1), - [tmp_a_poly_j_i] "+r" (tmp_a_poly_j_i), [sign_2] "=&r" (sign_2), - [sign_3] "=&r" (sign_3), [sum64_hi] "+r" (sum64_hi), - [sum64_lo] "+r" (sum64_lo) - : [tmp_corr_c] "r" (tmp_corr_c), [shift_internal] "r" (shift_internal) - : "hi", "lo", "memory" - ); - if (sign_3 != 0) { - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addsc %[sum64_lo], %[sum64_lo], %[tmp3] \n\t" - "addwc %[sum64_hi], %[sum64_hi], %[tmp2] \n\t" - ".set pop \n\t" - : [tmp2] "+r" (tmp2), [tmp3] "+r" (tmp3), [sum64_hi] "+r" (sum64_hi), - [sum64_lo] "+r" (sum64_lo) - : - :"memory" - ); - } else { - // Test overflow and sum the result. - if (((!(sign_1 || sign_2)) && (0x7FFFFFFF - sum64_hi < tmp2)) || - ((sign_1 && sign_2) && (sum64_hi + tmp2 > 0))) { - // Shift right for overflow. - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[shift_internal], %[shift_internal], 1 \n\t" - "prepend %[sum64_lo], %[sum64_hi], 1 \n\t" - "sra %[sum64_hi], %[sum64_hi], 1 \n\t" - "prepend %[tmp3], %[tmp2], 1 \n\t" - "sra %[tmp2], %[tmp2], 1 \n\t" - "addsc %[sum64_lo], %[sum64_lo], %[tmp3] \n\t" - "addwc %[sum64_hi], %[sum64_hi], %[tmp2] \n\t" - ".set pop \n\t" - : [tmp2] "+r" (tmp2), [tmp3] "+r" (tmp3), - [shift_internal] "+r" (shift_internal), - [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : - : "hi", "lo", "memory" - ); - } else { - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addsc %[sum64_lo], %[sum64_lo], %[tmp3] \n\t" - "addwc %[sum64_hi], %[sum64_hi], %[tmp2] \n\t" - ".set pop \n\t" - : [tmp2] "+r" (tmp2), [tmp3] "+r" (tmp3), - [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : - : "hi", "lo", "memory" - ); - } - } - } - } - word32_high = sum64_hi; - word32_low = sum64_lo; - - // Calculate the value of shifting (shift_norm) for the 64-bit sum. - if (word32_high != 0) { - shift_norm = 32 - WebRtcSpl_NormW32(word32_high); - int tmp1; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "srl %[residual_energy], %[sum64_lo], %[shift_norm] \n\t" - "li %[tmp1], 32 \n\t" - "subu %[tmp1], %[tmp1], %[shift_norm] \n\t" - "sll %[tmp1], %[sum64_hi], %[tmp1] \n\t" - "or %[residual_energy], %[residual_energy], %[tmp1] \n\t" - ".set pop \n\t" - : [residual_energy] "=&r" (residual_energy), [tmp1]"=&r"(tmp1), - [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : [shift_norm] "r" (shift_norm) - : "memory" - ); - } else { - if ((word32_low & 0x80000000) != 0) { - shift_norm = 1; - residual_energy = (uint32_t)word32_low >> 1; - } else { - shift_norm = WebRtcSpl_NormW32(word32_low); - residual_energy = word32_low << shift_norm; - shift_norm = -shift_norm; - } - } - - // Q(q_val_polynomial * 2) * Q(q_val_corr) >> shift_internal >> shift_norm - // = Q(q_val_corr - shift_internal - shift_norm + q_val_polynomial * 2) - *q_val_residual_energy = - q_val_corr - shift_internal - shift_norm + q_val_polynomial * 2; - - return residual_energy; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model_unittest.cc b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model_unittest.cc deleted file mode 100644 index 82793f1344ff..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model_unittest.cc +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h" - -#include "system_wrappers/include/cpu_features_wrapper.h" -#include "test/gtest.h" - -class LpcMaskingModelTest : public ::testing::Test { - protected: - // Pass a function pointer to the Tester function. - void CalculateResidualEnergyTester( - CalculateResidualEnergy CalculateResidualEnergyFunction) { - const int kIntOrder = 10; - const int32_t kInt32QDomain = 5; - const int kIntShift = 11; - int16_t a[kIntOrder + 1] = {32760, 122, 7, 0, -32760, -3958, - -48, 18745, 498, 9, 23456}; - int32_t corr[kIntOrder + 1] = {11443647, -27495, 0, 98745, -11443600, 1, - 1, 498, 9, 888, 23456}; - int q_shift_residual = 0; - int32_t residual_energy = 0; - - // Test the code path where (residual_energy >= 0x10000). - residual_energy = CalculateResidualEnergyFunction( - kIntOrder, kInt32QDomain, kIntShift, a, corr, &q_shift_residual); - EXPECT_EQ(1789023310, residual_energy); - EXPECT_EQ(2, q_shift_residual); - - // Test the code path where (residual_energy < 0x10000) - // and ((energy & 0x8000) != 0). - for (int i = 0; i < kIntOrder + 1; i++) { - a[i] = 24575 >> i; - corr[i] = i; - } - residual_energy = CalculateResidualEnergyFunction( - kIntOrder, kInt32QDomain, kIntShift, a, corr, &q_shift_residual); - EXPECT_EQ(1595279092, residual_energy); - EXPECT_EQ(26, q_shift_residual); - - // Test the code path where (residual_energy <= 0x7fff). - for (int i = 0; i < kIntOrder + 1; i++) { - a[i] = 2457 >> i; - } - residual_energy = CalculateResidualEnergyFunction( - kIntOrder, kInt32QDomain, kIntShift, a, corr, &q_shift_residual); - EXPECT_EQ(2029266944, residual_energy); - EXPECT_EQ(33, q_shift_residual); - } -}; - -TEST_F(LpcMaskingModelTest, CalculateResidualEnergyTest) { - CalculateResidualEnergyTester(WebRtcIsacfix_CalculateResidualEnergyC); -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.c deleted file mode 100644 index d495d2923577..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.c +++ /dev/null @@ -1,1281 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_tables.c - * - * Coding tables for the KLT coefficients - * - */ - - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/isac/fix/source/lpc_tables.h" - -/* indices of KLT coefficients used */ -const uint16_t WebRtcIsacfix_kSelIndGain[12] = { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 10, 11}; - -const uint16_t WebRtcIsacfix_kSelIndShape[108] = { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, - 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, - 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, - 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, - 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, - 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, - 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, - 100, 101, 102, 103, 104, 105, 106, 107 -}; - -/* cdf array for model indicator */ -const uint16_t WebRtcIsacfix_kModelCdf[4] = { - 0, 15434, 37548, 65535 -}; - -/* pointer to cdf array for model indicator */ -const uint16_t *WebRtcIsacfix_kModelCdfPtr[1] = { - WebRtcIsacfix_kModelCdf -}; - -/* initial cdf index for decoder of model indicator */ -const uint16_t WebRtcIsacfix_kModelInitIndex[1] = { - 1 -}; - -/* offset to go from rounded value to quantization index */ -const int16_t WebRtcIsacfix_kQuantMinGain[12] ={ - 3, 6, 4, 6, 6, 9, 5, 16, 11, 34, 32, 47 -}; - -const int16_t WebRtcIsacfix_kQuantMinShape[108] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 1, 1, 1, 1, 2, 2, 2, 3, 0, 0, - 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, - 1, 1, 1, 2, 2, 3, 0, 0, 0, 0, - 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, - 2, 4, 3, 5, 0, 0, 0, 0, 1, 1, - 1, 1, 1, 1, 2, 1, 2, 2, 3, 4, - 4, 7, 0, 0, 1, 1, 1, 1, 1, 1, - 1, 2, 3, 2, 3, 4, 4, 5, 7, 13, - 0, 1, 1, 2, 3, 2, 2, 2, 4, 4, - 5, 6, 7, 11, 9, 13, 12, 26 -}; - -/* maximum quantization index */ -const uint16_t WebRtcIsacfix_kMaxIndGain[12] = { - 6, 12, 8, 14, 10, 19, 12, 31, 22, 56, 52, 138 -}; - -const uint16_t WebRtcIsacfix_kMaxIndShape[108] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 2, 2, 2, 2, 4, 4, 5, 6, 0, 0, - 0, 0, 1, 0, 0, 0, 0, 1, 2, 2, - 2, 2, 3, 4, 5, 7, 0, 0, 0, 0, - 2, 0, 2, 2, 2, 2, 3, 2, 2, 4, - 4, 6, 6, 9, 0, 0, 0, 0, 2, 2, - 2, 2, 2, 2, 3, 2, 4, 4, 7, 7, - 9, 13, 0, 0, 2, 2, 2, 2, 2, 2, - 3, 4, 5, 4, 6, 8, 8, 10, 16, 25, - 0, 2, 2, 4, 5, 4, 4, 4, 7, 8, - 9, 10, 13, 19, 17, 23, 25, 49 -}; - -/* index offset */ -const uint16_t WebRtcIsacfix_kOffsetGain[3][12] = { - { 0, 7, 20, 29, 44, 55, 75, 88, 120, 143, 200, 253}, - { 0, 7, 19, 27, 42, 53, 73, 86, 117, 140, 197, 249}, - { 0, 7, 20, 28, 44, 55, 75, 89, 121, 145, 202, 257} -}; - -const uint16_t WebRtcIsacfix_kOffsetShape[3][108] = { - { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 11, 14, 17, 20, 23, 28, 33, 39, 46, 47, - 48, 49, 50, 52, 53, 54, 55, 56, 58, 61, - 64, 67, 70, 74, 79, 85, 93, 94, 95, 96, - 97, 100, 101, 104, 107, 110, 113, 117, 120, 123, - 128, 133, 140, 147, 157, 158, 159, 160, 161, 164, - 167, 170, 173, 176, 179, 183, 186, 191, 196, 204, - 212, 222, 236, 237, 238, 241, 244, 247, 250, 253, - 256, 260, 265, 271, 276, 283, 292, 301, 312, 329, - 355, 356, 359, 362, 367, 373, 378, 383, 388, 396, - 405, 415, 426, 440, 460, 478, 502, 528 - }, - { - 0, 1, 2, 3, 4, 6, 7, 8, 9, 11, - 13, 16, 19, 22, 26, 29, 34, 39, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 55, 57, 60, - 63, 66, 70, 73, 78, 84, 91, 92, 93, 94, - 95, 96, 97, 99, 102, 105, 108, 111, 114, 118, - 123, 128, 134, 141, 151, 152, 153, 154, 156, 159, - 162, 165, 168, 171, 174, 177, 181, 186, 194, 200, - 208, 218, 233, 234, 235, 236, 239, 242, 245, 248, - 251, 254, 258, 263, 270, 277, 288, 297, 308, 324, - 349, 351, 354, 357, 361, 366, 372, 378, 383, 390, - 398, 407, 420, 431, 450, 472, 496, 524 - }, - { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 11, - 14, 17, 20, 23, 26, 29, 34, 40, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 58, 61, 64, - 67, 70, 73, 77, 82, 88, 96, 97, 98, 99, - 101, 102, 104, 107, 110, 113, 116, 119, 122, 125, - 129, 134, 141, 150, 160, 161, 162, 163, 166, 168, - 171, 174, 177, 180, 183, 186, 190, 195, 201, 208, - 216, 226, 243, 244, 245, 248, 251, 254, 257, 260, - 263, 268, 273, 278, 284, 291, 299, 310, 323, 340, - 366, 368, 371, 374, 379, 383, 389, 394, 399, 406, - 414, 422, 433, 445, 461, 480, 505, 533 - } -}; - -/* initial cdf index for KLT coefficients */ -const uint16_t WebRtcIsacfix_kInitIndexGain[3][12] = { - { 3, 6, 4, 7, 5, 10, 6, 16, 11, 28, 26, 69}, - { 3, 6, 4, 7, 5, 10, 6, 15, 11, 28, 26, 69}, - { 3, 6, 4, 8, 5, 10, 7, 16, 12, 28, 27, 70} -}; - -const uint16_t WebRtcIsacfix_kInitIndexShape[3][108] = { - { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 1, 1, 1, 1, 2, 2, 3, 3, 0, 0, - 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, - 1, 1, 2, 2, 3, 4, 0, 0, 0, 0, - 1, 0, 1, 1, 1, 1, 2, 1, 1, 2, - 2, 3, 3, 5, 0, 0, 0, 0, 1, 1, - 1, 1, 1, 1, 2, 1, 2, 2, 4, 4, - 5, 7, 0, 0, 1, 1, 1, 1, 1, 1, - 2, 2, 3, 2, 3, 4, 4, 5, 8, 13, - 0, 1, 1, 2, 3, 2, 2, 2, 4, 4, - 5, 5, 7, 10, 9, 12, 13, 25 - }, - { - 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, - 1, 1, 1, 2, 1, 2, 2, 3, 0, 0, - 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, - 1, 2, 1, 2, 3, 3, 0, 0, 0, 0, - 0, 0, 1, 1, 1, 1, 1, 1, 2, 2, - 2, 3, 3, 5, 0, 0, 0, 1, 1, 1, - 1, 1, 1, 1, 1, 2, 2, 4, 3, 4, - 5, 7, 0, 0, 0, 1, 1, 1, 1, 1, - 1, 2, 2, 3, 3, 5, 4, 5, 8, 12, - 1, 1, 1, 2, 2, 3, 3, 2, 3, 4, - 4, 6, 5, 9, 11, 12, 14, 25 - }, - { - 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, - 1, 1, 1, 1, 1, 2, 3, 3, 0, 0, - 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, - 1, 1, 2, 2, 3, 4, 0, 0, 0, 1, - 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, - 2, 3, 4, 5, 0, 0, 0, 1, 1, 1, - 1, 1, 1, 1, 1, 2, 2, 3, 3, 4, - 5, 8, 0, 0, 1, 1, 1, 1, 1, 1, - 2, 2, 2, 3, 3, 4, 5, 6, 8, 13, - 1, 1, 1, 2, 2, 3, 2, 2, 3, 4, - 4, 5, 6, 8, 9, 12, 14, 25 - } -}; - -/* offsets for quantizer representation levels*/ -const uint16_t WebRtcIsacfix_kOfLevelsGain[3] = { - 0, 392, 779 -}; - -const uint16_t WebRtcIsacfix_kOfLevelsShape[3] = { - 0, 578, 1152 -}; - -/* quantizer representation levels */ - - - -const int32_t WebRtcIsacfix_kLevelsGainQ17[1176] = { - -364547,-231664,-102123,-573,104807,238257,368823,-758583,-640135,-510291 - ,-377662,-252785,-113177,2627,112906,248601,389461,522691,644517,763974 - ,-538963,-368179,-245823,-106095,-890,104299,241111,350730,493190,-800763 - ,-646230,-510239,-382115,-248623,-111829,-2983,113852,251105,388114,519757 - ,644048,774712,896334,1057931,-770009,-635310,-503690,-375087,-248106,-108525 - ,-105,108259,243472,377948,519271,-1160885,-1032044,-914636,-777593,-647891 - ,-518408,-388028,-254321,-115293,-598,117849,251296,385367,515510,652727 - ,777432,920363,1038625,1153637,1316836,-632287,-505430,-379430,-248458,-118013 - ,-888,118762,250266,381650,513327,652169,766471,932113,-2107480,-1971030 - ,-1827020,-1698263,-1558670,-1436314,-1305377,-1172252,-1047355,-914202,-779651,-651001 - ,-520999,-390394,-255761,-123490,-1893,126839,256703,385493,518607,651760 - ,782750,908693,1044131,1163865,1311066,1424177,1582628,1709823,1831740,1955391 - ,-1423044,-1288917,-1181281,-1043222,-911770,-780354,-646799,-522664,-386721,-258266 - ,-128060,-1101,128233,259996,390336,519590,649290,778701,908010,1040796 - ,1161235,1306889,1441882,-4446002,-4301031,-4194304,-4080591,-3947740,-3808975,-3686530 - ,-3567839,-3383251,-3287089,-3136577,-3017405,-2869860,-2751321,-2619984,-2482932,-2354790 - ,-2223147,-2090669,-1964135,-1831208,-1706697,-1570817,-1446008,-1305386,-1175773,-1046066 - ,-915356,-785120,-653614,-524331,-393767,-260442,-130187,-799,128841,261466 - ,393616,520542,652117,784613,914159,1045399,1181072,1308971,1442502,1570346 - ,1693912,1843986,1966014,2090474,2224869,2364593,2475934,2628403,2752512,2856640 - ,-4192441,-4063232,-3917821,-3799195,-3666233,-3519199,-3411021,-3269192,-3135684,-3008826 - ,-2880875,-2747342,-2620981,-2494872,-2354979,-2229718,-2098939,-1964971,-1835399,-1703452 - ,-1572806,-1440482,-1311794,-1179338,-1046521,-919823,-785914,-655335,-523416,-395507 - ,-264833,-132184,-2546,131698,256217,391372,522688,651248,789964,909618 - ,1035305,1179145,1313824,1436934,1552353,1693722,1815508,1972826,2096328,2228224 - ,2359296,2490368,2598848,-6160384,-6029312,-5881382,-5767168,-5636096,-5505024,-5373952 - ,-5228418,-5110384,-4954923,-4880576,-4710990,-4587364,-4471340,-4333905,-4211513,-4051293 - ,-3907927,-3800105,-3675961,-3538640,-3413663,-3271148,-3152105,-3019103,-2869647,-2744015 - ,-2620639,-2479385,-2364211,-2227611,-2095427,-1974497,-1834168,-1703561,-1568961,-1439826 - ,-1309192,-1174050,-1050191,-917836,-786015,-656943,-518934,-394831,-257708,-128041 - ,1610,128991,264442,393977,521383,653849,788164,918641,1049122,1181971 - ,1308934,1439505,1571808,1706305,1836318,1966235,2097269,2228990,2357005,2490292 - ,2617400,2749680,2881234,3014880,3145637,3276467,3409099,3536637,3671493,3802918 - ,3929740,4065036,4194143,4325999,4456126,4586857,4717194,4843923,4978676,5110913 - ,5245281,5371394,5499780,5633779,5762611,5897682,6028688,6167546,6296465,6421682 - ,6548882,6682074,6809432,6941956,7078143,7204509,7334296,7475137,7609896,7732044 - ,7861604,8002039,8131670,8259222,8390299,8522399,8650037,8782348,8908402,9037815 - ,9164594,9300338,9434679,9574500,9699702,9833934,9948152,10083972,10244937,10332822 - ,10485760,10600122,10760754,10892964,11010048,11111004,11272192,11403264,11525091,11624984 - ,11796480,11915146,-393216,-262144,-101702,-740,100568,262144,393216,-786432 - ,-655360,-524288,-383907,-243301,-94956,-156,95547,269629,416691,524288 - ,655360,-393216,-262144,-88448,-37,87318,262144,393216,524288,-917504 - ,-786432,-655360,-495894,-373308,-267503,-93211,4119,91308,250895,393216 - ,526138,655360,786432,917504,-786432,-655360,-524288,-393216,-262144,-83497 - ,222,86893,240922,393216,524288,-1048576,-917504,-790472,-655360,-508639 - ,-383609,-262016,-95550,-3775,96692,256797,364847,534906,655360,786432 - ,889679,1048576,1179648,1310720,1441792,-655360,-524288,-377684,-248408,-93690 - ,1261,95441,227519,393216,524288,655360,786432,917504,-2097152,-1966080 - ,-1809470,-1703936,-1572864,-1441792,-1314289,-1195149,-1056205,-917504,-809951,-657769 - ,-521072,-383788,-248747,-106350,-2944,105550,243408,388548,521064,628732 - ,786432,885456,1064548,1179648,1310720,1441792,1572864,1703936,1835008,-1441792 - ,-1310720,-1179648,-1037570,-888492,-767774,-646634,-519935,-373458,-248029,-111915 - ,760,111232,247735,379432,507672,672699,786432,917504,1048576,1179648 - ,1310720,1441792,-4456448,-4325376,-4194304,-4063232,-3932160,-3801088,-3670016,-3538944 - ,-3407872,-3276800,-3145728,-3014656,-2883584,-2752512,-2647002,-2490368,-2359296,-2228224 - ,-2097152,-1951753,-1835008,-1703936,-1594177,-1462001,-1289150,-1160774,-1025917,-924928 - ,-782509,-641294,-516191,-386630,-251910,-118886,5210,121226,253949,386008 - ,517973,649374,780064,917783,1052462,1183856,1290593,1419389,1556641,1699884 - ,1835008,1988314,2090470,2228224,2359296,2490368,2621440,2752512,2883584,-3801088 - ,-3643514,-3539937,-3409931,-3263294,-3145658,-3012952,-2879230,-2752359,-2622556,-2483471 - ,-2357556,-2226500,-2093112,-1965892,-1833664,-1701035,-1567767,-1440320,-1310556,-1178339 - ,-1049625,-916812,-786477,-655277,-525050,-393773,-264828,-130696,-480,132126 - ,260116,394197,527846,652294,785563,917183,1049511,1175958,1308161,1438759 - ,1572253,1698835,1828535,1967072,2089391,2212798,2348901,2461547,2621440,2752512 - ,2883584,-7309870,-7203780,-7062699,-6939106,-6790819,-6672036,-6553600,-6422317,-6288422 - ,-6164694,-6026456,-5901410,-5754168,-5621459,-5502710,-5369686,-5240454,-5120712,-4976140 - ,-4847970,-4723070,-4589083,-4450923,-4324680,-4189892,-4065551,-3931803,-3800209,-3668539 - ,-3539395,-3404801,-3277470,-3141389,-3016710,-2885724,-2752612,-2618541,-2486762,-2354153 - ,-2225059,-2094984,-1968194,-1830895,-1699508,-1575743,-1444516,-1308683,-1179714,-1053088 - ,-917981,-783707,-653900,-524980,-395409,-260309,-131948,-3452,132113,263241 - ,392185,522597,654134,788288,919810,1045795,1179210,1314201,1444235,1574447 - ,1705193,1834009,1967332,2098102,2229019,2359147,2489859,2619878,2754966,2879671 - ,3014438,3146143,3276733,3405958,3542196,3667493,3798815,3932961,4062458,4187125 - ,4322346,4454875,4587752,4716809,4848274,4975027,5111957,5242215,5373085,5501158 - ,5640140,5762918,5895358,6024008,6157906,6290628,6422713,6546339,6675888,6815606 - ,6955288,7077501,7211630,7337893,7473635,7607175,7728310,7866475,7999658,8127888 - ,8241758,8386483,8522550,8641582,8771915,8922139,9038632,9179385,9313426,9437184 - ,9568256,9699328,9830400,9952933,10120004,10223616,10354688,10474645,10616832,-393216 - ,-262144,-85425,-121,82533,262144,393216,-786432,-655360,-524288,-379928 - ,-222821,-95200,287,95541,227093,393216,493567,655360,786432,-393216 - ,-262144,-86805,510,86722,262144,393216,524288,-1048576,-917504,-786432 - ,-624456,-529951,-395071,-241627,-101168,81,99975,241605,393216,524288 - ,655360,786432,917504,-786432,-655360,-524288,-393216,-230359,-95619,-137 - ,94425,226222,393216,524288,-1179648,-1048576,-917504,-773841,-655360,-492258 - ,-379715,-244707,-103621,-434,104523,242680,381575,523659,650565,786432 - ,917504,1048576,1179648,1310720,-786432,-629344,-524288,-376757,-242858,-101932 - ,-2715,107155,239212,366480,514943,655360,786432,917504,-2228224,-2097152 - ,-1966080,-1835008,-1703936,-1572864,-1441792,-1284584,-1179648,-1048819,-934658,-777181 - ,-626371,-515660,-377493,-248975,-113036,436,113584,248354,379718,512475 - ,653932,796494,917504,1048576,1179648,1310720,1441792,1572864,1703936,1835008 - ,-1572864,-1441792,-1297608,-1161159,-1032316,-917092,-779770,-647384,-515529,-384269 - ,-250003,-119252,1053,118111,249512,380545,512039,648101,770656,907003 - ,1021725,1178082,1310720,1441792,-4587520,-4456448,-4325376,-4194304,-4063232,-3932160 - ,-3801088,-3670016,-3538944,-3407872,-3276800,-3145728,-2999335,-2883584,-2752512,-2621440 - ,-2490368,-2359296,-2228224,-2112691,-1966080,-1848781,-1709830,-1566109,-1438427,-1303530 - ,-1176124,-1040936,-913876,-784585,-652025,-518361,-385267,-256342,-127297,-2733 - ,125422,257792,389363,519911,651106,783805,909407,1044143,1174156,1309267 - ,1436173,1553771,1708958,1814083,1967036,2095386,2255169,2359296,2478303,2621440 - ,2752512,-4456448,-4325376,-4194304,-4063232,-3932160,-3797524,-3670016,-3560250,-3413217 - ,-3257719,-3166416,-2986626,-2878000,-2781144,-2625383,-2495465,-2346792,-2230930,-2077063 - ,-1949225,-1819274,-1697261,-1568664,-1443074,-1304302,-1175289,-1043794,-913423,-785561 - ,-652104,-522835,-392667,-260517,-130088,-2,129509,260990,391931,522470 - ,655770,784902,917093,1046445,1176951,1303121,1441362,1565401,1702022,1822856 - ,1952852,2090384,2214607,2338436,2457483,2621440,-8781824,-8650752,-8519680,-8388608 - ,-8260828,-8126464,-8003337,-7859030,-7750057,-7602176,-7471104,-7340032,-7193045,-7090588 - ,-6946816,-6843344,-6676635,-6557575,-6447804,-6277614,-6159736,-6035729,-5884723,-5739567 - ,-5634818,-5489867,-5372864,-5243300,-5098939,-4988639,-4856258,-4728494,-4591717,-4447428 - ,-4322409,-4192918,-4062638,-3934141,-3797545,-3673373,-3531587,-3407391,-3277404,-3147797 - ,-3013578,-2886548,-2749811,-2616428,-2490949,-2361301,-2228482,-2096883,-1964343,-1831754 - ,-1702201,-1572495,-1442012,-1309242,-1182451,-1048996,-916905,-786510,-657079,-524730 - ,-393672,-261313,-128743,166,130678,261334,393287,524155,655570,786839 - ,917353,1052167,1179013,1309360,1442634,1571153,1703961,1832027,1965014,2097912 - ,2224861,2355341,2490455,2623051,2753484,2877015,3015783,3144157,3273705,3405255 - ,3542006,3669580,3802417,3935413,4065088,4190896,4333521,4456355,4579781,4713832 - ,4845707,4978625,5113278,5243817,5382318,5500592,5638135,5761179,5900822,6029270 - ,6186398,6297816,6436435,6559163,6666389,6806548,6950461,7086078,7195777,7350973 - ,7480132,7614852,7743514,7847288,8014762,8126464,8257536,8388608,8519680,8650752 - ,8781824,8912896,9043968,9175040,9306112,9437184 -}; - - - -const int16_t WebRtcIsacfix_kLevelsShapeQ10[1735] = { - 0, 0, -1, 0, 0, 1, 0, 1, 0, -821 - , 1, -763, -1, 656, -620, 0, 633, -636, 4, 615 - , -630, 1, 649, -1773, -670, 5, 678, 1810, -1876, -676 - , 0, 691, 1843, -1806, -743, -1, 749, 1795, 2920, -2872 - , -1761, -772, -3, 790, 1763, 2942, 0, 0, 0, 0 - , -792, 2, 0, 0, 1, 0, -854, 0, -702, -1 - , 662, -624, -5, 638, -611, -6, 638, -647, 0, 651 - , -685, -4, 679, 2123, -1814, -693, 0, 664, 1791, -1735 - , -737, 0, 771, 1854, 2873, -2867, -1842, -793, -1, 821 - , 1826, 2805, 3922, 0, 0, 0, -1, -779, 1, 786 - , 1, -708, 0, 789, -799, 1, 797, -663, 2, 646 - , -600, 3, 609, -600, 1, 658, 1807, -627, -3, 612 - , -625, 3, 632, -1732, -674, 1, 672, 2048, -1768, -715 - , 0, 724, 1784, -3881, -3072, -1774, -719, -1, 730, 1811 - , -2963, -1829, -806, -1, 816, 1795, 3050, -5389, -3784, -2942 - , -1893, -865, -12, 867, 1885, 2945, 3928, -2, 1, 4 - , 0, -694, 2, 665, -598, 5, 587, -599, -1, 661 - , -656, -7, 611, -607, 5, 603, -618, -4, 620, -1794 - , -645, -2, 654, -655, -1, 658, -1801, -700, 5, 707 - , 1927, -1752, -745, -8, 752, 1843, -2838, -1781, -801, 11 - , 796, 1811, 2942, 3866, -3849, -3026, -1848, -819, 2, 827 - , 1825, 2963, -3873, -2904, -1869, -910, -6, 903, 1902, 2885 - , 3978, 5286, -7168, -6081, -4989, -3968, -2963, -1970, -943, -2 - , 953, 1951, 2968, 3974, 5009, 6032, -2, 3, -1024, 2 - , 1024, -637, 1, 669, -613, -7, 630, -603, 4, 612 - , -612, 0, 590, -645, -11, 627, -657, -2, 671, 1849 - , -1853, -694, 2, 702, 1838, -3304, -1780, -736, -8, 732 - , 1772, -1709, -755, -6, 760, 1780, -2994, -1780, -800, 8 - , 819, 1830, 2816, -4096, -2822, -1881, -851, -4, 855, 1872 - , 2840, 3899, -3908, -2904, -1878, -887, 6, 897, 1872, 2942 - , 4008, -4992, -3881, -2933, -1915, -928, 1, 937, 1919, 2900 - , 4009, 4881, -6848, -6157, -5065, -3981, -2983, -1972, -978, -1 - , 968, 1979, 2988, 4008, 5007, 6108, 7003, 8051, 9027,-13272 - ,-12012,-11228,-10213, -9261, -8084, -7133, -6075, -5052, -4050, -3036 - , -2014, -996, -4, 1007, 2031, 3038, 4049, 5074, 6134, 7069 - , 8094, 9069, 10212, 11049, 12104, 51, -1024, -13, 1024, -609 - , -107, 613, -2048, -687, -95, 667, 2048, -3072, -1724, -785 - , -34, 732, 1819, -2048, -703, -26, 681, 2048, -2048, -686 - , -9, 665, 2048, -2048, -702, 37, 748, 1723, -4096, -2786 - , -1844, -837, 37, 811, 1742, 3072, -4096, -2783, -1848, -881 - , 39, 898, 1843, 2792, 3764, -5120, -4096, -2923, -1833, -852 - , -14, 862, 1824, 2834, 4096, -6144, -5120, -3914, -2842, -1870 - , -886, -27, 888, 1929, 2931, 4051, -7168, -6144, -5120, -3866 - , -2933, -1915, -927, 64, 933, 1902, 2929, 3912, 5063, 6144 - ,-11264,-10240, -9216, -8192, -7086, -6144, -5039, -3972, -2943, -1929 - , -941, 3, 938, 1942, 2959, 3933, 4905, 6088, 6983, 8192 - , -9216, -8192, -7202, -6088, -4983, -4019, -2955, -1975, -966, 17 - , 997, 1981, 2967, 3990, 4948, 6022, 6967, 8192,-13312,-12288 - ,-11264,-10240, -9216, -8049, -6997, -6040, -5026, -4043, -3029, -2034 - , -1015, -23, 984, 1997, 3010, 4038, 5002, 6015, 6946, 8061 - , 9216, 10240,-12381,-11264,-10240, -9060, -8058, -7153, -6085, -5075 - , -4051, -3042, -2037, -1017, -5, 1007, 2028, 3035, 4050, 5088 - , 6111, 7160, 8156, 9215, 10095, 11229, 12202, 13016,-26624,-25600 - ,-24582,-23671,-22674,-21400,-20355,-19508,-18315,-17269,-16361,-15299 - ,-14363,-13294,-12262,-11237,-10203, -9227, -8165, -7156, -6116, -5122 - , -4076, -3056, -2043, -1020, -8, 1027, 2047, 3065, 4110, 5130 - , 6125, 7168, 8195, 9206, 10230, 11227, 12256, 13304, 14281, 15316 - , 16374, 17382, 18428, 19388, 20361, 21468, 22448, 23781, 0, 0 - , -1, 0, -2, 1024, 0, 0, 0, -1, 1024, -1024 - , 1, -1024, 4, 1024, -1024, 2, 1024, -1024, 2, 1024 - , -2048, -1024, -4, 1024, -1024, 2, 1024, -2048, -1024, -3 - , 1024, 2048, -2048, -1024, 4, 1024, 2048, -3072, -2048, -1024 - , -1, 662, 2048, 0, 1, 0, 0, 1, -2, -2 - , 0, 2, 1024, -1, 1024, -1024, 4, 1024, -1024, 1 - , 1024, -1024, 1, 1024, -2048, -781, -4, 844, -807, -5 - , 866, -2048, -726, -13, 777, 2048, -2048, -643, -4, 617 - , 2048, 3072, -3072, -2048, -629, 1, 630, 2048, 3072, 0 - , -1, 1, -2, 2, 1, -1024, 5, -1024, 6, 1024 - , -1024, 4, 1024, -1024, 1, 1024, -1024, -9, 1024, -673 - , -7, 655, -2048, -665, -15, 716, -2048, -647, 4, 640 - , 2048, -2048, -615, -1, 635, 2048, -2048, -613, 10, 637 - , 2048, 3072, -3072, -2048, -647, -3, 641, 2048, 3072, -5120 - , -4096, -3072, -2048, -681, 6, 685, 2048, 3072, 4096, 1 - , 1, 0, -1, 1024, -1024, -3, 1024, -1024, 6, 1024 - , -1024, -1, 769, -733, 0, 1024, -876, -2, 653, -1024 - , -4, 786, -596, -13, 595, -634, -2, 638, 2048, -2048 - , -620, -5, 620, 2048, -4096, -3072, -2048, -639, 11, 655 - , 2048, 3072, -3072, -2048, -659, 5, 663, 2048, -3072, -1823 - , -687, 22, 695, 2048, 3072, 4096, -4096, -3072, -1848, -715 - , -3, 727, 1816, 3072, 4096, 5120, -8192, -7168, -6144, -5120 - , -4096, -2884, -1771, -756, -14, 775, 1844, 3072, 4096, 5120 - , 6144, -1, 1, 0, -1024, 2, 815, -768, 2, 708 - , -1024, -3, 693, -661, -7, 607, -643, -5, 609, -624 - , 3, 631, -682, -3, 691, 2048, -2048, -640, 5, 650 - , 2048, -3072, -2048, -701, 9, 704, 2048, 3072, -3072, -2048 - , -670, 10, 674, 2048, 3072, -5120, -4096, -3072, -1749, -738 - , 0, 733, 1811, 3072, 4096, 5120, -4096, -3072, -1873, -753 - , 0, 756, 1874, 3072, 4096, -5120, -4096, -2900, -1838, -793 - , -6, 793, 1868, 2837, 4096, 5120, -7168, -6144, -5120, -4096 - , -2832, -1891, -828, 1, 828, 1901, 2823, 3912, 5120, 6144 - , 7168, 8192,-13312,-12288,-11264,-10240, -9216, -8192, -7168, -6144 - , -5120, -3976, -3004, -1911, -869, 7, 869, 1932, 3024, 3992 - , 5009, 6144, 7168, 8192, 9216, 10240, 11264, -4, 1024, -629 - , -22, 609, -623, 9, 640, -2048, -768, 1, 682, -2048 - , -741, 49, 722, 2048, -3072, -1706, -808, -20, 768, 1750 - , -1684, -727, -29, 788, 1840, 3033, -1758, -784, 0, 801 - , 1702, -3072, -1813, -814, 38, 820, 1884, 2927, -4096, -3241 - , -1839, -922, 25, 882, 1886, 2812, -4096, -2982, -1923, -894 - , 84, 912, 1869, 2778, 4096, -4928, -3965, -2902, -1920, -883 - , 3, 917, 1953, 2921, 3957, 4922, 6144, 7168, -5120, -3916 - , -2897, -1949, -930, 31, 959, 1934, 2901, 3851, 5120, -9216 - , -8192, -7046, -6029, -5030, -4034, -2980, -1969, -1013, -76, 963 - , 1963, 2901, 3929, 4893, 6270, 7168, 8192, 9216,-12288,-11264 - ,-10240, -9216, -8192, -6846, -6123, -5108, -4008, -3000, -1963, -954 - , -6, 958, 1992, 3009, 4020, 5085, 6097, 7168, 8192, 9216 - ,-11264,-10139, -9194, -8127, -7156, -6102, -5053, -4049, -3036, -2025 - , -1009, -34, 974, 1984, 3034, 4028, 5138, 6000, 7057, 8166 - , 9070, 10033, 11360, 12288,-13312,-12288,-10932,-10190, -9120, -8123 - , -7128, -6103, -5074, -4081, -3053, -2029, -989, -4, 1010, 2028 - , 3051, 4073, 5071, 6099, 7132, 8147, 9295, 10159, 11023, 12263 - , 13312, 14336,-25600,-24576,-23552,-22529,-21504,-20480,-19456,-18637 - ,-17425,-16165,-15316,-14327,-13606,-12135,-11182,-10107, -9153, -8144 - , -7146, -6160, -5129, -4095, -3064, -2038, -1025, 1, 1031, 2072 - , 3074, 4088, 5123, 6149, 7157, 8173, 9198, 10244, 11250, 12268 - , 13263, 14289, 15351, 16370, 17402, 18413, 19474, 20337, 21386, 22521 - , 23367, 24350, 0, 0, 0, 0, 0, 0, 0, 0 - , -1024, 0, 1024, -1024, 0, 1024, -1024, 0, 1024, -1024 - , 0, 1024, -1024, 0, 1024, -773, 0, 1024, -674, 0 - , 645, -2048, -745, 0, 628, 2048, -2048, -712, 0, 681 - , 2048, 3072, -3072, -2048, -673, 0, 682, 1964, 3257, 0 - , 0, 0, 0, 0, 0, 0, 0, -1024, 0, 1024 - , -1024, 0, 1024, -1024, 0, 1024, -705, 0, 623, -771 - , 0, 1024, -786, 0, 688, -631, 0, 652, 2048, -2048 - , -627, -1, 666, 2048, -3072, -1756, -694, 0, 674, 2048 - , -3098, -1879, -720, 5, 694, 1886, 2958, 4096, 0, 0 - , 0, 0, 1024, 0, 0, 1024, -769, 0, 1024, -1024 - , 0, 1024, -1024, 0, 1024, -817, 0, 734, -786, 0 - , 651, -638, 0, 637, -623, 0, 671, -652, 0, 619 - , 2048, -2048, -670, -1, 663, 2048, -1908, -680, 1, 686 - , 2048, 3072, 4096, -4096, -3072, -1833, -711, 0, 727, 1747 - , 3072, 4096, -4096, -2971, -1826, -762, 2, 766, 1832, 2852 - , 3928, 5079, 0, 0, 0, -1024, 0, 1024, -1024, 0 - , -656, 0, 1024, -599, 0, 620, -1024, 0, 1024, -603 - , 0, 622, -643, 0, 660, -599, 0, 611, -641, -1 - , 651, 2048, -2048, -648, -2, 647, 1798, -3072, -2048, -672 - , 2, 670, 2048, -3072, -1780, -694, -1, 706, 1751, 3072 - , -3072, -1862, -757, 7, 739, 1798, 3072, 4096, -5120, -4096 - , -3253, -1811, -787, 3, 782, 1887, 3123, 4096, -7252, -6144 - , -5354, -4060, -2864, -1863, -820, -11, 847, 1903, 2970, 3851 - , 4921, 5957, 7168, 8192, 9306, 0, 0, -1024, 0, 1024 - , -726, 0, 706, -692, 0, 593, -598, 0, 616, -624 - , 0, 616, -605, 0, 613, -2048, -652, 1, 635, 2048 - , -2048, -647, -1, 660, 2048, -1811, -668, -2, 685, 2048 - , -1796, -731, -2, 730, 1702, 3072, -3072, -1766, -747, -4 - , 756, 1770, 3072, -4096, -3024, -1762, -783, 4, 771, 1781 - , 3072, -5120, -4057, -2807, -1832, -822, 0, 816, 1804, 2851 - , 3949, 5120, -6144, -4899, -3927, -2920, -1893, -874, -2, 868 - , 1881, 2905, 3960, 4912, 6144, -9216, -8192, -7168, -6225, -4963 - , -3943, -2956, -1890, -902, 0, 897, 1914, 2916, 3984, 4990 - , 6050, 7168,-11264,-10217, -9114, -8132, -7035, -5988, -4984, -4000 - , -2980, -1962, -927, 7, 931, 1956, 2981, 4031, 4972, 6213 - , 7227, 8192, 9216, 10240, 11170, 12288, 13312, 14336, 0, 1024 - , -557, 1, 571, -606, -4, 612, -1676, -707, 10, 673 - , 2048, -2048, -727, 5, 686, -3072, -1772, -755, 12, 716 - , 1877, -1856, -786, 2, 786, 1712, -1685, -818, -16, 863 - , 1729, -3072, -1762, -857, 3, 866, 1838, 2841, -3862, -2816 - , -1864, -925, -2, 923, 1897, 2779, -2782, -1838, -920, -28 - , 931, 1951, 2835, 3804, -4815, -4001, -2940, -1934, -959, -22 - , 975, 1957, 2904, 3971, 4835, -5148, -3892, -2944, -1953, -986 - , -11, 989, 1968, 2939, 3949, 4947, 5902, -9216, -8192, -6915 - , -6004, -4965, -4013, -3009, -1977, -987, -1, 982, 1972, 3000 - , 3960, 4939, 5814, -8976, -7888, -7084, -5955, -5043, -4009, -2991 - , -2002, -1000, -8, 993, 2011, 3023, 4026, 5028, 6023, 7052 - , 8014, 9216,-11240,-10036, -9125, -8118, -7105, -6062, -5048, -4047 - , -3044, -2025, -1009, -1, 1011, 2023, 3042, 4074, 5085, 6108 - , 7119, 8142, 9152, 10114, 11141, 12250, 13307,-15360,-14099,-13284 - ,-12291,-11223,-10221, -9152, -8147, -7128, -6104, -5077, -4072, -3062 - , -2033, -1020, 7, 1018, 2038, 3059, 4081, 5084, 6109, 7102 - , 8128, 9134, 10125, 11239, 12080,-23552,-22528,-21504,-20480,-19456 - ,-18159,-17240,-16291,-15364,-14285,-13305,-12271,-11233,-10217, -9198 - , -8175, -7157, -6134, -5122, -4089, -3071, -2047, -1018, 3, 1026 - , 2041, 3077, 4090, 5108, 6131, 7150, 8172, 9175, 10196, 11272 - , 12303, 13273, 14328, 15332, 16334, 17381, 18409, 19423, 20423, 21451 - , 22679, 23391, 24568, 25600, 26589 -}; - -/* cdf tables for quantizer indices */ -const uint16_t WebRtcIsacfix_kCdfGain[1212] = { - 0, 13, 301, 3730, 61784, 65167, 65489, 65535, 0, 17, - 142, 314, 929, 2466, 7678, 56450, 63463, 64740, 65204, 65426, - 65527, 65535, 0, 8, 100, 724, 6301, 60105, 65125, 65510, - 65531, 65535, 0, 13, 117, 368, 1068, 3010, 11928, 53603, - 61177, 63404, 64505, 65108, 65422, 65502, 65531, 65535, 0, 4, - 17, 96, 410, 1859, 12125, 54361, 64103, 65305, 65497, 65535, - 0, 4, 88, 230, 469, 950, 1746, 3228, 6092, 16592, - 44756, 56848, 61256, 63308, 64325, 64920, 65309, 65460, 65502, 65522, - 65535, 0, 88, 352, 1675, 6339, 20749, 46686, 59284, 63525, - 64949, 65359, 65502, 65527, 65535, 0, 13, 38, 63, 117, - 234, 381, 641, 929, 1407, 2043, 2809, 4032, 5753, 8792, - 14407, 24308, 38941, 48947, 55403, 59293, 61411, 62688, 63630, 64329, - 64840, 65188, 65376, 65472, 65506, 65527, 65531, 65535, 0, 8, - 29, 75, 222, 615, 1327, 2801, 5623, 9931, 16094, 24966, - 34419, 43458, 50676, 56186, 60055, 62500, 63936, 64765, 65225, 65435, - 65514, 65535, 0, 8, 13, 15, 17, 21, 33, 59, - 71, 92, 151, 243, 360, 456, 674, 934, 1223, 1583, - 1989, 2504, 3031, 3617, 4354, 5154, 6163, 7411, 8780, 10747, - 12874, 15591, 18974, 23027, 27436, 32020, 36948, 41830, 46205, 49797, - 53042, 56094, 58418, 60360, 61763, 62818, 63559, 64103, 64509, 64798, - 65045, 65162, 65288, 65363, 65447, 65506, 65522, 65531, 65533, 65535, - 0, 4, 6, 25, 38, 71, 138, 264, 519, 808, - 1227, 1825, 2516, 3408, 4279, 5560, 7092, 9197, 11420, 14108, - 16947, 20300, 23926, 27459, 31164, 34827, 38575, 42178, 45540, 48747, - 51444, 54090, 56426, 58460, 60080, 61595, 62734, 63668, 64275, 64673, - 64936, 65112, 65217, 65334, 65426, 65464, 65477, 65489, 65518, 65527, - 65529, 65531, 65533, 65535, 0, 2, 4, 8, 10, 12, - 14, 16, 21, 33, 50, 71, 84, 92, 105, 138, - 180, 255, 318, 377, 435, 473, 511, 590, 682, 758, - 913, 1097, 1256, 1449, 1671, 1884, 2169, 2445, 2772, 3157, - 3563, 3944, 4375, 4848, 5334, 5820, 6448, 7101, 7716, 8378, - 9102, 9956, 10752, 11648, 12707, 13670, 14758, 15910, 17187, 18472, - 19627, 20649, 21951, 23169, 24283, 25552, 26862, 28227, 29391, 30764, - 31882, 33213, 34432, 35600, 36910, 38116, 39464, 40729, 41872, 43144, - 44371, 45514, 46762, 47813, 48968, 50069, 51032, 51974, 52908, 53737, - 54603, 55445, 56282, 56990, 57572, 58191, 58840, 59410, 59887, 60264, - 60607, 60946, 61269, 61516, 61771, 61960, 62198, 62408, 62558, 62776, - 62985, 63207, 63408, 63546, 63739, 63906, 64070, 64237, 64371, 64551, - 64677, 64836, 64999, 65095, 65213, 65284, 65338, 65380, 65426, 65447, - 65472, 65485, 65487, 65489, 65502, 65510, 65512, 65514, 65516, 65518, - 65522, 65531, 65533, 65535, 0, 2, 4, 6, 65528, 65531, - 65533, 65535, 0, 2, 4, 6, 8, 10, 222, 65321, - 65513, 65528, 65531, 65533, 65535, 0, 2, 4, 50, 65476, - 65529, 65531, 65533, 65535, 0, 2, 4, 6, 8, 12, - 38, 544, 64936, 65509, 65523, 65525, 65529, 65531, 65533, 65535, - 0, 2, 4, 6, 8, 10, 1055, 64508, 65528, 65531, - 65533, 65535, 0, 2, 4, 6, 8, 10, 12, 123, - 3956, 62999, 65372, 65495, 65515, 65521, 65523, 65525, 65527, 65529, - 65531, 65533, 65535, 0, 2, 4, 12, 53, 4707, 59445, - 65467, 65525, 65527, 65529, 65531, 65533, 65535, 0, 2, 4, - 6, 8, 10, 12, 14, 16, 38, 40, 50, 67, - 96, 234, 929, 14345, 55750, 64866, 65389, 65462, 65514, 65517, - 65519, 65521, 65523, 65525, 65527, 65529, 65531, 65533, 65535, 0, - 2, 4, 6, 8, 10, 15, 35, 91, 377, 1946, - 13618, 52565, 63714, 65184, 65465, 65520, 65523, 65525, 65527, 65529, - 65531, 65533, 65535, 0, 2, 4, 6, 8, 10, 12, - 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, - 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, - 54, 82, 149, 362, 751, 1701, 4239, 12893, 38627, 55072, - 60875, 63071, 64158, 64702, 65096, 65283, 65412, 65473, 65494, 65505, - 65508, 65517, 65519, 65521, 65523, 65525, 65527, 65529, 65531, 65533, - 65535, 0, 2, 15, 23, 53, 143, 260, 418, 698, - 988, 1353, 1812, 2411, 3144, 4015, 5143, 6401, 7611, 8999, - 10653, 12512, 14636, 16865, 19404, 22154, 24798, 27521, 30326, 33102, - 35790, 38603, 41415, 43968, 46771, 49435, 52152, 54715, 57143, 59481, - 61178, 62507, 63603, 64489, 64997, 65257, 65427, 65473, 65503, 65520, - 65529, 65531, 65533, 65535, 0, 3, 6, 9, 26, 32, - 44, 46, 64, 94, 111, 164, 205, 254, 327, 409, - 506, 608, 733, 885, 1093, 1292, 1482, 1742, 1993, 2329, - 2615, 3029, 3374, 3798, 4257, 4870, 5405, 5992, 6618, 7225, - 7816, 8418, 9051, 9761, 10532, 11380, 12113, 13010, 13788, 14594, - 15455, 16361, 17182, 18088, 18997, 20046, 20951, 21968, 22947, 24124, - 25296, 26547, 27712, 28775, 29807, 30835, 31709, 32469, 33201, 34014, - 34876, 35773, 36696, 37620, 38558, 39547, 40406, 41277, 42367, 43290, - 44445, 45443, 46510, 47684, 48973, 50157, 51187, 52242, 53209, 54083, - 55006, 55871, 56618, 57293, 57965, 58556, 59222, 59722, 60180, 60554, - 60902, 61250, 61554, 61837, 62100, 62372, 62631, 62856, 63078, 63324, - 63557, 63768, 63961, 64089, 64235, 64352, 64501, 64633, 64770, 64887, - 65001, 65059, 65121, 65188, 65246, 65302, 65346, 65390, 65428, 65463, - 65477, 65506, 65515, 65517, 65519, 65521, 65523, 65525, 65527, 65529, - 65531, 65533, 65535, 0, 2, 4, 109, 65332, 65531, 65533, - 65535, 0, 2, 4, 6, 8, 25, 1817, 63874, 65511, - 65527, 65529, 65531, 65533, 65535, 0, 2, 4, 907, 65014, - 65529, 65531, 65533, 65535, 0, 2, 4, 6, 8, 10, - 12, 132, 2743, 62708, 65430, 65525, 65527, 65529, 65531, 65533, - 65535, 0, 2, 4, 6, 8, 35, 3743, 61666, 65485, - 65531, 65533, 65535, 0, 2, 4, 6, 8, 10, 23, - 109, 683, 6905, 58417, 64911, 65398, 65497, 65518, 65525, 65527, - 65529, 65531, 65533, 65535, 0, 2, 4, 6, 53, 510, - 10209, 55212, 64573, 65441, 65522, 65529, 65531, 65533, 65535, 0, - 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, - 22, 32, 90, 266, 1037, 3349, 14468, 50488, 62394, 64685, - 65341, 65480, 65514, 65519, 65521, 65523, 65525, 65527, 65529, 65531, - 65533, 65535, 0, 2, 4, 6, 9, 16, 37, 106, - 296, 748, 1868, 5733, 18897, 45553, 60165, 63949, 64926, 65314, - 65441, 65508, 65524, 65529, 65531, 65533, 65535, 0, 2, 4, - 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, - 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, - 46, 48, 50, 83, 175, 344, 667, 1293, 2337, 4357, - 8033, 14988, 28600, 43244, 52011, 57042, 59980, 61779, 63065, 63869, - 64390, 64753, 64988, 65164, 65326, 65422, 65462, 65492, 65506, 65522, - 65524, 65526, 65531, 65533, 65535, 0, 2, 4, 6, 8, - 10, 12, 14, 16, 25, 39, 48, 55, 62, 65, - 85, 106, 139, 169, 194, 252, 323, 485, 688, 1074, - 1600, 2544, 3863, 5733, 8303, 11397, 15529, 20273, 25734, 31455, - 36853, 41891, 46410, 50306, 53702, 56503, 58673, 60479, 61880, 62989, - 63748, 64404, 64852, 65124, 65309, 65424, 65480, 65524, 65528, 65533, - 65535, 0, 2, 4, 6, 8, 10, 12, 14, 21, - 23, 25, 27, 29, 31, 39, 41, 43, 48, 60, - 72, 79, 106, 136, 166, 187, 224, 252, 323, 381, - 427, 478, 568, 660, 783, 912, 1046, 1175, 1365, 1567, - 1768, 2024, 2347, 2659, 3049, 3529, 4033, 4623, 5281, 5925, - 6726, 7526, 8417, 9468, 10783, 12141, 13571, 15222, 16916, 18659, - 20350, 22020, 23725, 25497, 27201, 29026, 30867, 32632, 34323, 36062, - 37829, 39466, 41144, 42654, 43981, 45343, 46579, 47759, 49013, 50171, - 51249, 52283, 53245, 54148, 54938, 55669, 56421, 57109, 57791, 58464, - 59092, 59674, 60105, 60653, 61083, 61407, 61757, 62095, 62388, 62649, - 62873, 63157, 63358, 63540, 63725, 63884, 64046, 64155, 64278, 64426, - 64548, 64654, 64806, 64906, 64994, 65077, 65137, 65215, 65277, 65324, - 65354, 65409, 65437, 65455, 65462, 65490, 65495, 65499, 65508, 65511, - 65513, 65515, 65517, 65519, 65521, 65523, 65525, 65527, 65529, 65531, - 65533, 65535 -}; - -const uint16_t WebRtcIsacfix_kCdfShape[2059] = { - 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, - 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 4, - 65535, 0, 8, 65514, 65535, 0, 29, 65481, 65535, 0, - 121, 65439, 65535, 0, 239, 65284, 65535, 0, 8, 779, - 64999, 65527, 65535, 0, 8, 888, 64693, 65522, 65535, 0, - 29, 2604, 62843, 65497, 65531, 65535, 0, 25, 176, 4576, - 61164, 65275, 65527, 65535, 0, 65535, 0, 65535, 0, 65535, - 0, 65535, 0, 4, 65535, 0, 65535, 0, 65535, 0, - 65535, 0, 65535, 0, 4, 65535, 0, 33, 65502, 65535, - 0, 54, 65481, 65535, 0, 251, 65309, 65535, 0, 611, - 65074, 65535, 0, 1273, 64292, 65527, 65535, 0, 4, 1809, - 63940, 65518, 65535, 0, 88, 4392, 60603, 65426, 65531, 65535, - 0, 25, 419, 7046, 57756, 64961, 65514, 65531, 65535, 0, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 4, 65531, - 65535, 0, 65535, 0, 8, 65531, 65535, 0, 4, 65527, - 65535, 0, 17, 65510, 65535, 0, 42, 65481, 65535, 0, - 197, 65342, 65531, 65535, 0, 385, 65154, 65535, 0, 1005, - 64522, 65535, 0, 8, 1985, 63469, 65533, 65535, 0, 38, - 3119, 61884, 65514, 65535, 0, 4, 6, 67, 4961, 60804, - 65472, 65535, 0, 17, 565, 9182, 56538, 65087, 65514, 65535, - 0, 8, 63, 327, 2118, 14490, 52774, 63839, 65376, 65522, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, - 17, 65522, 65535, 0, 59, 65489, 65535, 0, 50, 65522, - 65535, 0, 54, 65489, 65535, 0, 310, 65179, 65535, 0, - 615, 64836, 65535, 0, 4, 1503, 63965, 65535, 0, 2780, - 63383, 65535, 0, 21, 3919, 61051, 65527, 65535, 0, 84, - 6674, 59929, 65435, 65535, 0, 4, 255, 7976, 55784, 65150, - 65518, 65531, 65535, 0, 4, 8, 582, 10726, 53465, 64949, - 65518, 65535, 0, 29, 339, 3006, 17555, 49517, 62956, 65200, - 65497, 65531, 65535, 0, 2, 33, 138, 565, 2324, 7670, - 22089, 45966, 58949, 63479, 64966, 65380, 65518, 65535, 0, 65535, - 0, 65535, 0, 2, 65533, 65535, 0, 46, 65514, 65535, - 0, 414, 65091, 65535, 0, 540, 64911, 65535, 0, 419, - 65162, 65535, 0, 976, 64790, 65535, 0, 2977, 62495, 65531, - 65535, 0, 4, 3852, 61034, 65527, 65535, 0, 4, 29, - 6021, 60243, 65468, 65535, 0, 84, 6711, 58066, 65418, 65535, - 0, 13, 281, 9550, 54917, 65125, 65506, 65535, 0, 2, - 63, 984, 12108, 52644, 64342, 65435, 65527, 65535, 0, 29, - 251, 2014, 14871, 47553, 62881, 65229, 65518, 65535, 0, 13, - 142, 749, 4220, 18497, 45200, 60913, 64823, 65426, 65527, 65535, - 0, 13, 71, 264, 1176, 3789, 10500, 24480, 43488, 56324, - 62315, 64493, 65242, 65464, 65514, 65522, 65531, 65535, 0, 4, - 13, 38, 109, 205, 448, 850, 1708, 3429, 6276, 11371, - 19221, 29734, 40955, 49391, 55411, 59460, 62102, 63793, 64656, 65150, - 65401, 65485, 65522, 65531, 65535, 0, 65535, 0, 2, 65533, - 65535, 0, 1160, 65476, 65535, 0, 2, 6640, 64763, 65533, - 65535, 0, 2, 38, 9923, 61009, 65527, 65535, 0, 2, - 4949, 63092, 65533, 65535, 0, 2, 3090, 63398, 65533, 65535, - 0, 2, 2520, 58744, 65510, 65535, 0, 2, 13, 544, - 8784, 51403, 65148, 65533, 65535, 0, 2, 25, 1017, 10412, - 43550, 63651, 65489, 65527, 65535, 0, 2, 4, 29, 783, - 13377, 52462, 64524, 65495, 65533, 65535, 0, 2, 4, 6, - 100, 1817, 18451, 52590, 63559, 65376, 65531, 65535, 0, 2, - 4, 6, 46, 385, 2562, 11225, 37416, 60488, 65026, 65487, - 65529, 65533, 65535, 0, 2, 4, 6, 8, 10, 12, - 42, 222, 971, 5221, 19811, 45048, 60312, 64486, 65294, 65474, - 65525, 65529, 65533, 65535, 0, 2, 4, 8, 71, 167, - 666, 2533, 7875, 19622, 38082, 54359, 62108, 64633, 65290, 65495, - 65529, 65533, 65535, 0, 2, 4, 6, 8, 10, 13, - 109, 586, 1930, 4949, 11600, 22641, 36125, 48312, 56899, 61495, - 63927, 64932, 65389, 65489, 65518, 65531, 65533, 65535, 0, 4, - 6, 8, 67, 209, 712, 1838, 4195, 8432, 14432, 22834, - 31723, 40523, 48139, 53929, 57865, 60657, 62403, 63584, 64363, 64907, - 65167, 65372, 65472, 65514, 65535, 0, 2, 4, 13, 25, - 42, 46, 50, 75, 113, 147, 281, 448, 657, 909, - 1185, 1591, 1976, 2600, 3676, 5317, 7398, 9914, 12941, 16169, - 19477, 22885, 26464, 29851, 33360, 37228, 41139, 44802, 48654, 52058, - 55181, 57676, 59581, 61022, 62190, 63107, 63676, 64199, 64547, 64924, - 65158, 65313, 65430, 65481, 65518, 65535, 0, 65535, 0, 65535, - 0, 65535, 0, 65535, 0, 65533, 65535, 0, 65535, 0, - 65535, 0, 65535, 0, 65533, 65535, 0, 2, 65535, 0, - 2, 65533, 65535, 0, 2, 65533, 65535, 0, 2, 65533, - 65535, 0, 2, 4, 65533, 65535, 0, 2, 65533, 65535, - 0, 2, 4, 65531, 65533, 65535, 0, 2, 4, 65531, - 65533, 65535, 0, 2, 4, 6, 65524, 65533, 65535, 0, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, - 65535, 0, 65535, 0, 65535, 0, 65533, 65535, 0, 65533, - 65535, 0, 2, 65533, 65535, 0, 2, 65533, 65535, 0, - 2, 65533, 65535, 0, 2, 4, 65532, 65535, 0, 6, - 65523, 65535, 0, 2, 15, 65530, 65533, 65535, 0, 2, - 35, 65493, 65531, 65533, 65535, 0, 2, 4, 158, 65382, - 65531, 65533, 65535, 0, 65535, 0, 65535, 0, 65535, 0, - 65535, 0, 65535, 0, 65535, 0, 2, 65535, 0, 2, - 65533, 65535, 0, 2, 65533, 65535, 0, 2, 65533, 65535, - 0, 2, 65533, 65535, 0, 9, 65512, 65535, 0, 2, - 12, 65529, 65535, 0, 2, 73, 65434, 65533, 65535, 0, - 2, 240, 65343, 65533, 65535, 0, 2, 476, 65017, 65531, - 65533, 65535, 0, 2, 4, 1046, 64686, 65531, 65533, 65535, - 0, 2, 4, 6, 8, 1870, 63898, 65529, 65531, 65533, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65533, 65535, - 0, 2, 65533, 65535, 0, 2, 65533, 65535, 0, 2, - 65532, 65535, 0, 6, 65533, 65535, 0, 6, 65523, 65535, - 0, 2, 65532, 65535, 0, 137, 65439, 65535, 0, 576, - 64899, 65533, 65535, 0, 2, 289, 65299, 65533, 65535, 0, - 2, 4, 6, 880, 64134, 65531, 65533, 65535, 0, 2, - 4, 1853, 63347, 65533, 65535, 0, 2, 6, 2516, 61762, - 65529, 65531, 65533, 65535, 0, 2, 4, 9, 3980, 61380, - 65503, 65529, 65531, 65533, 65535, 0, 2, 4, 6, 8, - 10, 12, 61, 6393, 59859, 65466, 65527, 65529, 65531, 65533, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 2, 65532, - 65535, 0, 3, 65529, 65535, 0, 2, 65529, 65535, 0, - 61, 65453, 65535, 0, 234, 65313, 65535, 0, 503, 65138, - 65535, 0, 155, 65402, 65533, 65535, 0, 2, 1058, 64554, - 65533, 65535, 0, 2, 4, 3138, 62109, 65531, 65533, 65535, - 0, 2, 4, 2031, 63339, 65531, 65533, 65535, 0, 2, - 4, 6, 9, 4155, 60778, 65523, 65529, 65531, 65533, 65535, - 0, 2, 4, 41, 6189, 59269, 65490, 65531, 65533, 65535, - 0, 2, 4, 6, 210, 8789, 57043, 65400, 65528, 65531, - 65533, 65535, 0, 2, 4, 6, 8, 26, 453, 10086, - 55499, 64948, 65483, 65524, 65527, 65529, 65531, 65533, 65535, 0, - 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, - 114, 1014, 11202, 52670, 64226, 65356, 65503, 65514, 65523, 65525, - 65527, 65529, 65531, 65533, 65535, 0, 65533, 65535, 0, 15, - 65301, 65535, 0, 152, 64807, 65535, 0, 2, 3328, 63308, - 65535, 0, 2, 4050, 59730, 65533, 65535, 0, 2, 164, - 10564, 61894, 65529, 65535, 0, 15, 6712, 59831, 65076, 65532, - 65535, 0, 32, 7712, 57449, 65459, 65535, 0, 2, 210, - 7849, 53110, 65021, 65523, 65535, 0, 2, 12, 1081, 13883, - 48262, 62870, 65477, 65535, 0, 2, 88, 847, 6145, 37852, - 62012, 65454, 65533, 65535, 0, 9, 47, 207, 1823, 14522, - 45521, 61069, 64891, 65481, 65528, 65531, 65533, 65535, 0, 2, - 9, 488, 2881, 12758, 38703, 58412, 64420, 65410, 65533, 65535, - 0, 2, 4, 6, 61, 333, 1891, 6486, 19720, 43188, - 57547, 62472, 64796, 65421, 65497, 65523, 65529, 65531, 65533, 65535, - 0, 2, 4, 6, 8, 10, 12, 29, 117, 447, - 1528, 6138, 21242, 43133, 56495, 62432, 64746, 65362, 65500, 65529, - 65531, 65533, 65535, 0, 2, 18, 105, 301, 760, 1490, - 3472, 7568, 15002, 26424, 40330, 53029, 60048, 62964, 64274, 64890, - 65337, 65445, 65489, 65513, 65527, 65530, 65533, 65535, 0, 2, - 4, 6, 41, 102, 409, 853, 2031, 4316, 7302, 11328, - 16869, 24825, 34926, 43481, 50877, 56126, 59874, 62103, 63281, 63857, - 64166, 64675, 65382, 65522, 65531, 65533, 65535, 0, 2, 4, - 6, 8, 10, 12, 14, 16, 18, 29, 38, 53, - 58, 96, 181, 503, 1183, 2849, 5590, 8600, 11379, 13942, - 16478, 19453, 22638, 26039, 29411, 32921, 37596, 41433, 44998, 48560, - 51979, 55106, 57666, 59892, 61485, 62616, 63484, 64018, 64375, 64685, - 64924, 65076, 65278, 65395, 65471, 65509, 65529, 65535, 0, 65535, - 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, - 0, 65535, 0, 65535, 0, 2, 65533, 65535, 0, 2, - 65533, 65535, 0, 2, 65533, 65535, 0, 2, 65533, 65535, - 0, 2, 65533, 65535, 0, 2, 65533, 65535, 0, 7, - 65519, 65535, 0, 2, 14, 65491, 65533, 65535, 0, 2, - 81, 65427, 65531, 65533, 65535, 0, 2, 4, 312, 65293, - 65528, 65533, 65535, 0, 65535, 0, 65535, 0, 65535, 0, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, - 2, 65533, 65535, 0, 2, 65533, 65535, 0, 2, 65533, - 65535, 0, 5, 65523, 65535, 0, 2, 65533, 65535, 0, - 7, 65526, 65535, 0, 46, 65464, 65533, 65535, 0, 2, - 120, 65309, 65533, 65535, 0, 2, 5, 362, 65097, 65533, - 65535, 0, 2, 18, 1164, 64785, 65528, 65531, 65533, 65535, - 0, 65535, 0, 65535, 0, 65535, 0, 65533, 65535, 0, - 65535, 0, 65533, 65535, 0, 2, 65533, 65535, 0, 2, - 65533, 65535, 0, 2, 65533, 65535, 0, 2, 65530, 65535, - 0, 2, 65523, 65535, 0, 69, 65477, 65535, 0, 141, - 65459, 65535, 0, 194, 65325, 65533, 65535, 0, 2, 543, - 64912, 65533, 65535, 0, 5, 1270, 64301, 65529, 65531, 65533, - 65535, 0, 2, 4, 12, 2055, 63538, 65508, 65531, 65533, - 65535, 0, 2, 7, 102, 3775, 61970, 65429, 65526, 65528, - 65533, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 2, - 65533, 65535, 0, 2, 65535, 0, 9, 65533, 65535, 0, - 25, 65512, 65535, 0, 2, 65533, 65535, 0, 44, 65480, - 65535, 0, 48, 65475, 65535, 0, 162, 65373, 65535, 0, - 637, 64806, 65533, 65535, 0, 2, 935, 64445, 65533, 65535, - 0, 2, 4, 1662, 64083, 65533, 65535, 0, 2, 12, - 3036, 62469, 65521, 65533, 65535, 0, 2, 120, 5405, 60468, - 65469, 65531, 65533, 65535, 0, 2, 4, 18, 254, 6663, - 58999, 65272, 65528, 65533, 65535, 0, 2, 4, 9, 12, - 67, 591, 8981, 56781, 64564, 65365, 65508, 65524, 65526, 65529, - 65531, 65533, 65535, 0, 65535, 0, 65535, 0, 2, 65533, - 65535, 0, 9, 65526, 65535, 0, 14, 65503, 65535, 0, - 127, 65390, 65535, 0, 517, 64990, 65535, 0, 178, 65330, - 65535, 0, 2, 1055, 64533, 65533, 65535, 0, 2, 1558, - 63942, 65533, 65535, 0, 2, 2205, 63173, 65533, 65535, 0, - 25, 4493, 60862, 65505, 65533, 65535, 0, 2, 48, 5890, - 59442, 65482, 65533, 65535, 0, 2, 4, 127, 7532, 58191, - 65394, 65533, 65535, 0, 2, 5, 32, 550, 10388, 54924, - 65046, 65510, 65531, 65533, 65535, 0, 2, 4, 30, 150, - 1685, 14340, 51375, 63619, 65288, 65503, 65528, 65533, 65535, 0, - 2, 4, 6, 8, 28, 97, 473, 2692, 15407, 50020, - 62880, 65064, 65445, 65508, 65531, 65533, 65535, 0, 2, 4, - 12, 32, 79, 150, 372, 907, 2184, 5868, 18207, 45431, - 59856, 64031, 65096, 65401, 65481, 65507, 65521, 65523, 65525, 65527, - 65529, 65531, 65533, 65535, 0, 65533, 65535, 0, 182, 65491, - 65535, 0, 877, 64286, 65535, 0, 9, 2708, 63612, 65533, - 65535, 0, 2, 6038, 59532, 65535, 0, 2, 92, 5500, - 60539, 65533, 65535, 0, 268, 8908, 56512, 65385, 65535, 0, - 129, 13110, 52742, 65036, 65535, 0, 2, 806, 14003, 51929, - 64732, 65523, 65535, 0, 7, 92, 2667, 18159, 47678, 62610, - 65355, 65535, 0, 32, 1836, 19676, 48237, 61677, 64960, 65526, - 65535, 0, 21, 159, 967, 5668, 22782, 44709, 58317, 64020, - 65406, 65528, 65535, 0, 7, 162, 1838, 8328, 23929, 43014, - 56394, 63374, 65216, 65484, 65521, 65535, 0, 2, 4, 6, - 28, 268, 1120, 3613, 10688, 24185, 40989, 54917, 61684, 64510, - 65403, 65530, 65535, 0, 2, 16, 44, 139, 492, 1739, - 5313, 13558, 26766, 41566, 52446, 58937, 62815, 64480, 65201, 65454, - 65524, 65533, 65535, 0, 7, 25, 76, 263, 612, 1466, - 3325, 6832, 12366, 20152, 29466, 39255, 47360, 53506, 57740, 60726, - 62845, 64131, 64882, 65260, 65459, 65521, 65528, 65530, 65535, 0, - 2, 4, 14, 48, 136, 312, 653, 1240, 2369, 4327, - 7028, 10759, 15449, 21235, 28027, 35386, 42938, 49562, 54990, 59119, - 62086, 63916, 64863, 65249, 65445, 65493, 65523, 65535, 0, 2, - 4, 6, 8, 10, 12, 21, 83, 208, 409, 723, - 1152, 1868, 2951, 4463, 6460, 8979, 11831, 15195, 18863, 22657, - 26762, 30881, 34963, 39098, 43054, 47069, 50620, 53871, 56821, 59386, - 61340, 62670, 63512, 64023, 64429, 64750, 64944, 65126, 65279, 65366, - 65413, 65445, 65473, 65505, 65510, 65521, 65528, 65530, 65535 -}; - -/* pointers to cdf tables for quantizer indices */ -const uint16_t *WebRtcIsacfix_kCdfGainPtr[3][12] = { - { WebRtcIsacfix_kCdfGain +0 +0, WebRtcIsacfix_kCdfGain +0 +8, WebRtcIsacfix_kCdfGain +0 +22, - WebRtcIsacfix_kCdfGain +0 +32, WebRtcIsacfix_kCdfGain +0 +48, WebRtcIsacfix_kCdfGain +0 +60, - WebRtcIsacfix_kCdfGain +0 +81, WebRtcIsacfix_kCdfGain +0 +95, WebRtcIsacfix_kCdfGain +0 +128, - WebRtcIsacfix_kCdfGain +0 +152, WebRtcIsacfix_kCdfGain +0 +210, WebRtcIsacfix_kCdfGain +0 +264 - }, - { WebRtcIsacfix_kCdfGain +404 +0, WebRtcIsacfix_kCdfGain +404 +8, WebRtcIsacfix_kCdfGain +404 +21, - WebRtcIsacfix_kCdfGain +404 +30, WebRtcIsacfix_kCdfGain +404 +46, WebRtcIsacfix_kCdfGain +404 +58, - WebRtcIsacfix_kCdfGain +404 +79, WebRtcIsacfix_kCdfGain +404 +93, WebRtcIsacfix_kCdfGain +404 +125, - WebRtcIsacfix_kCdfGain +404 +149, WebRtcIsacfix_kCdfGain +404 +207, WebRtcIsacfix_kCdfGain +404 +260 - }, - { WebRtcIsacfix_kCdfGain +803 +0, WebRtcIsacfix_kCdfGain +803 +8, WebRtcIsacfix_kCdfGain +803 +22, - WebRtcIsacfix_kCdfGain +803 +31, WebRtcIsacfix_kCdfGain +803 +48, WebRtcIsacfix_kCdfGain +803 +60, - WebRtcIsacfix_kCdfGain +803 +81, WebRtcIsacfix_kCdfGain +803 +96, WebRtcIsacfix_kCdfGain +803 +129, - WebRtcIsacfix_kCdfGain +803 +154, WebRtcIsacfix_kCdfGain +803 +212, WebRtcIsacfix_kCdfGain +803 +268 - } -}; - -const uint16_t *WebRtcIsacfix_kCdfShapePtr[3][108] = { - { WebRtcIsacfix_kCdfShape +0 +0, WebRtcIsacfix_kCdfShape +0 +2, WebRtcIsacfix_kCdfShape +0 +4, - WebRtcIsacfix_kCdfShape +0 +6, WebRtcIsacfix_kCdfShape +0 +8, WebRtcIsacfix_kCdfShape +0 +10, - WebRtcIsacfix_kCdfShape +0 +12, WebRtcIsacfix_kCdfShape +0 +14, WebRtcIsacfix_kCdfShape +0 +16, - WebRtcIsacfix_kCdfShape +0 +18, WebRtcIsacfix_kCdfShape +0 +21, WebRtcIsacfix_kCdfShape +0 +25, - WebRtcIsacfix_kCdfShape +0 +29, WebRtcIsacfix_kCdfShape +0 +33, WebRtcIsacfix_kCdfShape +0 +37, - WebRtcIsacfix_kCdfShape +0 +43, WebRtcIsacfix_kCdfShape +0 +49, WebRtcIsacfix_kCdfShape +0 +56, - WebRtcIsacfix_kCdfShape +0 +64, WebRtcIsacfix_kCdfShape +0 +66, WebRtcIsacfix_kCdfShape +0 +68, - WebRtcIsacfix_kCdfShape +0 +70, WebRtcIsacfix_kCdfShape +0 +72, WebRtcIsacfix_kCdfShape +0 +75, - WebRtcIsacfix_kCdfShape +0 +77, WebRtcIsacfix_kCdfShape +0 +79, WebRtcIsacfix_kCdfShape +0 +81, - WebRtcIsacfix_kCdfShape +0 +83, WebRtcIsacfix_kCdfShape +0 +86, WebRtcIsacfix_kCdfShape +0 +90, - WebRtcIsacfix_kCdfShape +0 +94, WebRtcIsacfix_kCdfShape +0 +98, WebRtcIsacfix_kCdfShape +0 +102, - WebRtcIsacfix_kCdfShape +0 +107, WebRtcIsacfix_kCdfShape +0 +113, WebRtcIsacfix_kCdfShape +0 +120, - WebRtcIsacfix_kCdfShape +0 +129, WebRtcIsacfix_kCdfShape +0 +131, WebRtcIsacfix_kCdfShape +0 +133, - WebRtcIsacfix_kCdfShape +0 +135, WebRtcIsacfix_kCdfShape +0 +137, WebRtcIsacfix_kCdfShape +0 +141, - WebRtcIsacfix_kCdfShape +0 +143, WebRtcIsacfix_kCdfShape +0 +147, WebRtcIsacfix_kCdfShape +0 +151, - WebRtcIsacfix_kCdfShape +0 +155, WebRtcIsacfix_kCdfShape +0 +159, WebRtcIsacfix_kCdfShape +0 +164, - WebRtcIsacfix_kCdfShape +0 +168, WebRtcIsacfix_kCdfShape +0 +172, WebRtcIsacfix_kCdfShape +0 +178, - WebRtcIsacfix_kCdfShape +0 +184, WebRtcIsacfix_kCdfShape +0 +192, WebRtcIsacfix_kCdfShape +0 +200, - WebRtcIsacfix_kCdfShape +0 +211, WebRtcIsacfix_kCdfShape +0 +213, WebRtcIsacfix_kCdfShape +0 +215, - WebRtcIsacfix_kCdfShape +0 +217, WebRtcIsacfix_kCdfShape +0 +219, WebRtcIsacfix_kCdfShape +0 +223, - WebRtcIsacfix_kCdfShape +0 +227, WebRtcIsacfix_kCdfShape +0 +231, WebRtcIsacfix_kCdfShape +0 +235, - WebRtcIsacfix_kCdfShape +0 +239, WebRtcIsacfix_kCdfShape +0 +243, WebRtcIsacfix_kCdfShape +0 +248, - WebRtcIsacfix_kCdfShape +0 +252, WebRtcIsacfix_kCdfShape +0 +258, WebRtcIsacfix_kCdfShape +0 +264, - WebRtcIsacfix_kCdfShape +0 +273, WebRtcIsacfix_kCdfShape +0 +282, WebRtcIsacfix_kCdfShape +0 +293, - WebRtcIsacfix_kCdfShape +0 +308, WebRtcIsacfix_kCdfShape +0 +310, WebRtcIsacfix_kCdfShape +0 +312, - WebRtcIsacfix_kCdfShape +0 +316, WebRtcIsacfix_kCdfShape +0 +320, WebRtcIsacfix_kCdfShape +0 +324, - WebRtcIsacfix_kCdfShape +0 +328, WebRtcIsacfix_kCdfShape +0 +332, WebRtcIsacfix_kCdfShape +0 +336, - WebRtcIsacfix_kCdfShape +0 +341, WebRtcIsacfix_kCdfShape +0 +347, WebRtcIsacfix_kCdfShape +0 +354, - WebRtcIsacfix_kCdfShape +0 +360, WebRtcIsacfix_kCdfShape +0 +368, WebRtcIsacfix_kCdfShape +0 +378, - WebRtcIsacfix_kCdfShape +0 +388, WebRtcIsacfix_kCdfShape +0 +400, WebRtcIsacfix_kCdfShape +0 +418, - WebRtcIsacfix_kCdfShape +0 +445, WebRtcIsacfix_kCdfShape +0 +447, WebRtcIsacfix_kCdfShape +0 +451, - WebRtcIsacfix_kCdfShape +0 +455, WebRtcIsacfix_kCdfShape +0 +461, WebRtcIsacfix_kCdfShape +0 +468, - WebRtcIsacfix_kCdfShape +0 +474, WebRtcIsacfix_kCdfShape +0 +480, WebRtcIsacfix_kCdfShape +0 +486, - WebRtcIsacfix_kCdfShape +0 +495, WebRtcIsacfix_kCdfShape +0 +505, WebRtcIsacfix_kCdfShape +0 +516, - WebRtcIsacfix_kCdfShape +0 +528, WebRtcIsacfix_kCdfShape +0 +543, WebRtcIsacfix_kCdfShape +0 +564, - WebRtcIsacfix_kCdfShape +0 +583, WebRtcIsacfix_kCdfShape +0 +608, WebRtcIsacfix_kCdfShape +0 +635 - }, - { WebRtcIsacfix_kCdfShape +686 +0, WebRtcIsacfix_kCdfShape +686 +2, WebRtcIsacfix_kCdfShape +686 +4, - WebRtcIsacfix_kCdfShape +686 +6, WebRtcIsacfix_kCdfShape +686 +8, WebRtcIsacfix_kCdfShape +686 +11, - WebRtcIsacfix_kCdfShape +686 +13, WebRtcIsacfix_kCdfShape +686 +15, WebRtcIsacfix_kCdfShape +686 +17, - WebRtcIsacfix_kCdfShape +686 +20, WebRtcIsacfix_kCdfShape +686 +23, WebRtcIsacfix_kCdfShape +686 +27, - WebRtcIsacfix_kCdfShape +686 +31, WebRtcIsacfix_kCdfShape +686 +35, WebRtcIsacfix_kCdfShape +686 +40, - WebRtcIsacfix_kCdfShape +686 +44, WebRtcIsacfix_kCdfShape +686 +50, WebRtcIsacfix_kCdfShape +686 +56, - WebRtcIsacfix_kCdfShape +686 +63, WebRtcIsacfix_kCdfShape +686 +65, WebRtcIsacfix_kCdfShape +686 +67, - WebRtcIsacfix_kCdfShape +686 +69, WebRtcIsacfix_kCdfShape +686 +71, WebRtcIsacfix_kCdfShape +686 +73, - WebRtcIsacfix_kCdfShape +686 +75, WebRtcIsacfix_kCdfShape +686 +77, WebRtcIsacfix_kCdfShape +686 +79, - WebRtcIsacfix_kCdfShape +686 +82, WebRtcIsacfix_kCdfShape +686 +85, WebRtcIsacfix_kCdfShape +686 +89, - WebRtcIsacfix_kCdfShape +686 +93, WebRtcIsacfix_kCdfShape +686 +97, WebRtcIsacfix_kCdfShape +686 +102, - WebRtcIsacfix_kCdfShape +686 +106, WebRtcIsacfix_kCdfShape +686 +112, WebRtcIsacfix_kCdfShape +686 +119, - WebRtcIsacfix_kCdfShape +686 +127, WebRtcIsacfix_kCdfShape +686 +129, WebRtcIsacfix_kCdfShape +686 +131, - WebRtcIsacfix_kCdfShape +686 +133, WebRtcIsacfix_kCdfShape +686 +135, WebRtcIsacfix_kCdfShape +686 +137, - WebRtcIsacfix_kCdfShape +686 +139, WebRtcIsacfix_kCdfShape +686 +142, WebRtcIsacfix_kCdfShape +686 +146, - WebRtcIsacfix_kCdfShape +686 +150, WebRtcIsacfix_kCdfShape +686 +154, WebRtcIsacfix_kCdfShape +686 +158, - WebRtcIsacfix_kCdfShape +686 +162, WebRtcIsacfix_kCdfShape +686 +167, WebRtcIsacfix_kCdfShape +686 +173, - WebRtcIsacfix_kCdfShape +686 +179, WebRtcIsacfix_kCdfShape +686 +186, WebRtcIsacfix_kCdfShape +686 +194, - WebRtcIsacfix_kCdfShape +686 +205, WebRtcIsacfix_kCdfShape +686 +207, WebRtcIsacfix_kCdfShape +686 +209, - WebRtcIsacfix_kCdfShape +686 +211, WebRtcIsacfix_kCdfShape +686 +214, WebRtcIsacfix_kCdfShape +686 +218, - WebRtcIsacfix_kCdfShape +686 +222, WebRtcIsacfix_kCdfShape +686 +226, WebRtcIsacfix_kCdfShape +686 +230, - WebRtcIsacfix_kCdfShape +686 +234, WebRtcIsacfix_kCdfShape +686 +238, WebRtcIsacfix_kCdfShape +686 +242, - WebRtcIsacfix_kCdfShape +686 +247, WebRtcIsacfix_kCdfShape +686 +253, WebRtcIsacfix_kCdfShape +686 +262, - WebRtcIsacfix_kCdfShape +686 +269, WebRtcIsacfix_kCdfShape +686 +278, WebRtcIsacfix_kCdfShape +686 +289, - WebRtcIsacfix_kCdfShape +686 +305, WebRtcIsacfix_kCdfShape +686 +307, WebRtcIsacfix_kCdfShape +686 +309, - WebRtcIsacfix_kCdfShape +686 +311, WebRtcIsacfix_kCdfShape +686 +315, WebRtcIsacfix_kCdfShape +686 +319, - WebRtcIsacfix_kCdfShape +686 +323, WebRtcIsacfix_kCdfShape +686 +327, WebRtcIsacfix_kCdfShape +686 +331, - WebRtcIsacfix_kCdfShape +686 +335, WebRtcIsacfix_kCdfShape +686 +340, WebRtcIsacfix_kCdfShape +686 +346, - WebRtcIsacfix_kCdfShape +686 +354, WebRtcIsacfix_kCdfShape +686 +362, WebRtcIsacfix_kCdfShape +686 +374, - WebRtcIsacfix_kCdfShape +686 +384, WebRtcIsacfix_kCdfShape +686 +396, WebRtcIsacfix_kCdfShape +686 +413, - WebRtcIsacfix_kCdfShape +686 +439, WebRtcIsacfix_kCdfShape +686 +442, WebRtcIsacfix_kCdfShape +686 +446, - WebRtcIsacfix_kCdfShape +686 +450, WebRtcIsacfix_kCdfShape +686 +455, WebRtcIsacfix_kCdfShape +686 +461, - WebRtcIsacfix_kCdfShape +686 +468, WebRtcIsacfix_kCdfShape +686 +475, WebRtcIsacfix_kCdfShape +686 +481, - WebRtcIsacfix_kCdfShape +686 +489, WebRtcIsacfix_kCdfShape +686 +498, WebRtcIsacfix_kCdfShape +686 +508, - WebRtcIsacfix_kCdfShape +686 +522, WebRtcIsacfix_kCdfShape +686 +534, WebRtcIsacfix_kCdfShape +686 +554, - WebRtcIsacfix_kCdfShape +686 +577, WebRtcIsacfix_kCdfShape +686 +602, WebRtcIsacfix_kCdfShape +686 +631 - }, - { WebRtcIsacfix_kCdfShape +1368 +0, WebRtcIsacfix_kCdfShape +1368 +2, WebRtcIsacfix_kCdfShape +1368 +4, - WebRtcIsacfix_kCdfShape +1368 +6, WebRtcIsacfix_kCdfShape +1368 +8, WebRtcIsacfix_kCdfShape +1368 +10, - WebRtcIsacfix_kCdfShape +1368 +12, WebRtcIsacfix_kCdfShape +1368 +14, WebRtcIsacfix_kCdfShape +1368 +16, - WebRtcIsacfix_kCdfShape +1368 +20, WebRtcIsacfix_kCdfShape +1368 +24, WebRtcIsacfix_kCdfShape +1368 +28, - WebRtcIsacfix_kCdfShape +1368 +32, WebRtcIsacfix_kCdfShape +1368 +36, WebRtcIsacfix_kCdfShape +1368 +40, - WebRtcIsacfix_kCdfShape +1368 +44, WebRtcIsacfix_kCdfShape +1368 +50, WebRtcIsacfix_kCdfShape +1368 +57, - WebRtcIsacfix_kCdfShape +1368 +65, WebRtcIsacfix_kCdfShape +1368 +67, WebRtcIsacfix_kCdfShape +1368 +69, - WebRtcIsacfix_kCdfShape +1368 +71, WebRtcIsacfix_kCdfShape +1368 +73, WebRtcIsacfix_kCdfShape +1368 +75, - WebRtcIsacfix_kCdfShape +1368 +77, WebRtcIsacfix_kCdfShape +1368 +79, WebRtcIsacfix_kCdfShape +1368 +81, - WebRtcIsacfix_kCdfShape +1368 +85, WebRtcIsacfix_kCdfShape +1368 +89, WebRtcIsacfix_kCdfShape +1368 +93, - WebRtcIsacfix_kCdfShape +1368 +97, WebRtcIsacfix_kCdfShape +1368 +101, WebRtcIsacfix_kCdfShape +1368 +105, - WebRtcIsacfix_kCdfShape +1368 +110, WebRtcIsacfix_kCdfShape +1368 +116, WebRtcIsacfix_kCdfShape +1368 +123, - WebRtcIsacfix_kCdfShape +1368 +132, WebRtcIsacfix_kCdfShape +1368 +134, WebRtcIsacfix_kCdfShape +1368 +136, - WebRtcIsacfix_kCdfShape +1368 +138, WebRtcIsacfix_kCdfShape +1368 +141, WebRtcIsacfix_kCdfShape +1368 +143, - WebRtcIsacfix_kCdfShape +1368 +146, WebRtcIsacfix_kCdfShape +1368 +150, WebRtcIsacfix_kCdfShape +1368 +154, - WebRtcIsacfix_kCdfShape +1368 +158, WebRtcIsacfix_kCdfShape +1368 +162, WebRtcIsacfix_kCdfShape +1368 +166, - WebRtcIsacfix_kCdfShape +1368 +170, WebRtcIsacfix_kCdfShape +1368 +174, WebRtcIsacfix_kCdfShape +1368 +179, - WebRtcIsacfix_kCdfShape +1368 +185, WebRtcIsacfix_kCdfShape +1368 +193, WebRtcIsacfix_kCdfShape +1368 +203, - WebRtcIsacfix_kCdfShape +1368 +214, WebRtcIsacfix_kCdfShape +1368 +216, WebRtcIsacfix_kCdfShape +1368 +218, - WebRtcIsacfix_kCdfShape +1368 +220, WebRtcIsacfix_kCdfShape +1368 +224, WebRtcIsacfix_kCdfShape +1368 +227, - WebRtcIsacfix_kCdfShape +1368 +231, WebRtcIsacfix_kCdfShape +1368 +235, WebRtcIsacfix_kCdfShape +1368 +239, - WebRtcIsacfix_kCdfShape +1368 +243, WebRtcIsacfix_kCdfShape +1368 +247, WebRtcIsacfix_kCdfShape +1368 +251, - WebRtcIsacfix_kCdfShape +1368 +256, WebRtcIsacfix_kCdfShape +1368 +262, WebRtcIsacfix_kCdfShape +1368 +269, - WebRtcIsacfix_kCdfShape +1368 +277, WebRtcIsacfix_kCdfShape +1368 +286, WebRtcIsacfix_kCdfShape +1368 +297, - WebRtcIsacfix_kCdfShape +1368 +315, WebRtcIsacfix_kCdfShape +1368 +317, WebRtcIsacfix_kCdfShape +1368 +319, - WebRtcIsacfix_kCdfShape +1368 +323, WebRtcIsacfix_kCdfShape +1368 +327, WebRtcIsacfix_kCdfShape +1368 +331, - WebRtcIsacfix_kCdfShape +1368 +335, WebRtcIsacfix_kCdfShape +1368 +339, WebRtcIsacfix_kCdfShape +1368 +343, - WebRtcIsacfix_kCdfShape +1368 +349, WebRtcIsacfix_kCdfShape +1368 +355, WebRtcIsacfix_kCdfShape +1368 +361, - WebRtcIsacfix_kCdfShape +1368 +368, WebRtcIsacfix_kCdfShape +1368 +376, WebRtcIsacfix_kCdfShape +1368 +385, - WebRtcIsacfix_kCdfShape +1368 +397, WebRtcIsacfix_kCdfShape +1368 +411, WebRtcIsacfix_kCdfShape +1368 +429, - WebRtcIsacfix_kCdfShape +1368 +456, WebRtcIsacfix_kCdfShape +1368 +459, WebRtcIsacfix_kCdfShape +1368 +463, - WebRtcIsacfix_kCdfShape +1368 +467, WebRtcIsacfix_kCdfShape +1368 +473, WebRtcIsacfix_kCdfShape +1368 +478, - WebRtcIsacfix_kCdfShape +1368 +485, WebRtcIsacfix_kCdfShape +1368 +491, WebRtcIsacfix_kCdfShape +1368 +497, - WebRtcIsacfix_kCdfShape +1368 +505, WebRtcIsacfix_kCdfShape +1368 +514, WebRtcIsacfix_kCdfShape +1368 +523, - WebRtcIsacfix_kCdfShape +1368 +535, WebRtcIsacfix_kCdfShape +1368 +548, WebRtcIsacfix_kCdfShape +1368 +565, - WebRtcIsacfix_kCdfShape +1368 +585, WebRtcIsacfix_kCdfShape +1368 +611, WebRtcIsacfix_kCdfShape +1368 +640 - } -}; - -/* code length for all coefficients using different models */ - -const int16_t WebRtcIsacfix_kCodeLenGainQ11[392] = { - 25189, 16036, 8717, 358, 8757, 15706, 21456, 24397, 18502, 17559 - , 13794, 11088, 7480, 873, 6603, 11636, 14627, 16805, 19132, 26624 - , 26624, 19408, 13751, 7280, 583, 7591, 15178, 23773, 28672, 25189 - , 19045, 16442, 13412, 10397, 5893, 1338, 6376, 9992, 12074, 13853 - , 15781, 19821, 22819, 28672, 28672, 25189, 19858, 15781, 11262, 5477 - , 1298, 5632, 11814, 17234, 22020, 28672, 19677, 18125, 16587, 14521 - , 13032, 11196, 9249, 5411, 2495, 4994, 7975, 10234, 12308, 13892 - , 15148, 17944, 21725, 23917, 25189, 19539, 16293, 11531, 7808, 4475 - , 2739, 4872, 8089, 11314, 14992, 18105, 23257, 26624, 25189, 23257 - , 23257, 20982, 18697, 18023, 16338, 16036, 14539, 13695, 13146, 11763 - , 10754, 9074, 7260, 5584, 4430, 5553, 6848, 8344, 10141, 11636 - , 12535, 13416, 14342, 15477, 17296, 19282, 22349, 23773, 28672, 28672 - , 26624, 23773, 21456, 18023, 15118, 13362, 11212, 9293, 8043, 6985 - , 5908, 5721, 5853, 6518, 7316, 8360, 9716, 11289, 12912, 14652 - , 16969, 19858, 23773, 26624, 28013, 30720, 30720, 28672, 25426, 23141 - , 25426, 23773, 20720, 19408, 18697, 19282, 16859, 16338, 16026, 15377 - , 15021, 14319, 14251, 13937, 13260, 13017, 12332, 11703, 11430, 10359 - , 10128, 9405, 8757, 8223, 7974, 7859, 7646, 7673, 7997, 8580 - , 8880, 9061, 9866, 10397, 11358, 12200, 13244, 14157, 15021, 16026 - , 16490, 18697, 18479, 20011, 19677, 20720, 24576, 26276, 30720, 30720 - , 28672, 30720, 24068, 25189, 22437, 20345, 18479, 16396, 16026, 14928 - , 13877, 13450, 12696, 12766, 11626, 11098, 10159, 9998, 9437, 9275 - , 8783, 8552, 8629, 8488, 8522, 8454, 8571, 8775, 8915, 9427 - , 9483, 9851, 10260, 10933, 11131, 11974, 12560, 13833, 15080, 16304 - , 17491, 19017, 18697, 19408, 22020, 25189, 25426, 22819, 26276, 30720 - , 30720, 30720, 30720, 30720, 30720, 28672, 30720, 30720, 30720, 30720 - , 28013, 25426, 24397, 23773, 25189, 26624, 25189, 22437, 21725, 20011 - , 20527, 20720, 20771, 22020, 22020, 19858, 19408, 19972, 17866, 17360 - , 17791, 17219, 16805, 16927, 16067, 16162, 15661, 15178, 15021, 15209 - , 14845, 14570, 14490, 14490, 13733, 13617, 13794, 13577, 13312, 12824 - , 13032, 12683, 12189, 12469, 12109, 11940, 11636, 11617, 11932, 12294 - , 11578, 11775, 12039, 11654, 11560, 11439, 11909, 11421, 12029, 11513 - , 11773, 11899, 11560, 11805, 11476, 11664, 11963, 11647, 11754, 11963 - , 11703, 12211, 11932, 12074, 12469, 12535, 12560, 12912, 12783, 12866 - , 12884, 13378, 13957, 13775, 13635, 14019, 14545, 15240, 15520, 15554 - , 15697, 16490, 16396, 17281, 16599, 16969, 17963, 16859, 16983, 16805 - , 17099, 18210, 17219, 17646, 17700, 17646, 18297, 17425, 18479, 17791 - , 17718, 19282, 18672, 20173, 20982, 21725, 21456, 23773, 23257, 25189 - , 30720, 30720, 25189, 26624, 30720, 30720, 30720, 30720, 28672, 26276 - , 30720, 30720 -}; - -const int16_t WebRtcIsacfix_kCodeLenShapeQ11[578] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 28672 - , 0, 26624, 1, 23773, 22819, 4, 20982, 18598, 10, 19282 - , 16587, 22, 16442, 26624, 13126, 60, 14245, 26624, 26624, 12736 - , 79, 12912, 25189, 22819, 9563, 249, 9474, 22349, 28672, 23257 - , 17944, 7980, 434, 8181, 16431, 26624, 0, 0, 0, 0 - , 28672, 0, 0, 0, 0, 0, 28672, 0, 22437, 3 - , 22437, 20982, 5, 20982, 16442, 22, 16752, 13814, 49, 14646 - , 11645, 116, 11734, 26624, 28672, 10613, 158, 11010, 24397, 19539 - , 8046, 453, 7709, 19017, 28672, 23257, 15110, 6770, 758, 6523 - , 14108, 24397, 28672, 0, 0, 0, 0, 28672, 0, 28672 - , 0, 26624, 1, 28672, 28672, 1, 26624, 24397, 2, 23257 - , 21725, 4, 20982, 17158, 18, 17281, 28672, 15178, 35, 15209 - , 12343, 92, 12320, 26624, 10344, 189, 10217, 30720, 22020, 9033 - , 322, 8549, 23773, 28672, 30720, 20622, 7666, 473, 7806, 20527 - , 24397, 14135, 5995, 960, 6018, 14872, 23773, 26624, 20928, 16293 - , 10636, 4926, 1588, 5256, 11088, 18043, 25189, 0, 0, 0 - , 0, 24397, 1, 25189, 20720, 5, 21456, 21209, 3, 25189 - , 20982, 5, 21456, 15818, 30, 15410, 13794, 60, 13416, 28672 - , 11162, 142, 11025, 9337, 231, 10094, 23773, 8338, 405, 7930 - , 26624, 19677, 6787, 613, 7318, 19161, 28672, 16442, 6319, 932 - , 5748, 15312, 25189, 28672, 28672, 28672, 13998, 5513, 1263, 5146 - , 14024, 24397, 22819, 15818, 9460, 4447, 2122, 4681, 9970, 15945 - , 22349, 28672, 30720, 22622, 19017, 14872, 10689, 7405, 4473, 2983 - , 4783, 7894, 11186, 14964, 18210, 24397, 0, 0, 30720, 0 - , 30720, 21456, 3, 23773, 14964, 39, 14757, 14179, 53, 13751 - , 14928, 36, 15272, 12430, 79, 13228, 9135, 285, 9077, 28672 - , 28672, 8377, 403, 7919, 26624, 28672, 23257, 7068, 560, 7473 - , 20345, 19677, 6770, 720, 6464, 18697, 25189, 16249, 5779, 1087 - , 5494, 15209, 22819, 30720, 20622, 12601, 5240, 1419, 5091, 12095 - , 19408, 26624, 22819, 16805, 10683, 4812, 2056, 4293, 9836, 16026 - , 24397, 25189, 18409, 13833, 8681, 4503, 2653, 4220, 8329, 13853 - , 19132, 26624, 25189, 20771, 17219, 12630, 9520, 6733, 4565, 3657 - , 4817, 7069, 10058, 13212, 16805, 21209, 26624, 26276, 28672, 28672 - , 26276, 23257, 20173, 19282, 16538, 15051, 12811, 10754, 9267, 7547 - , 6270, 5407, 5214, 6057, 7054, 8226, 9488, 10806, 12793, 14442 - , 16442, 19677, 22099, 26276, 28672, 0, 30720, 0, 30720, 11920 - , 56, 20720, 30720, 6766, 355, 13130, 30720, 30720, 22180, 5589 - , 736, 7902, 26624, 30720, 7634, 354, 9721, 30720, 30720, 9027 - , 246, 10117, 30720, 30720, 9630, 453, 6709, 23257, 30720, 25683 - , 14228, 6127, 1271, 4615, 15178, 30720, 30720, 23504, 12382, 5739 - , 2015, 3492, 10560, 22020, 26624, 30720, 30720, 23257, 13192, 4873 - , 1527, 5001, 12445, 22020, 30720, 30720, 30720, 30720, 19344, 10761 - , 4051, 1927, 5281, 10594, 17866, 28672, 30720, 30720, 30720, 21869 - , 15554, 10060, 5979, 2710, 3085, 7889, 14646, 21725, 28672, 30720 - , 30720, 30720, 30720, 30720, 30720, 30720, 22719, 17425, 13212, 8083 - , 4439, 2820, 4305, 8136, 12988, 17425, 21151, 28672, 28672, 30720 - , 30720, 30720, 28672, 20527, 19282, 14412, 10513, 7407, 5079, 3744 - , 4115, 6308, 9621, 13599, 17040, 22349, 28672, 30720, 30720, 30720 - , 30720, 30720, 30720, 29522, 19282, 14545, 11485, 9093, 6760, 5262 - , 4672, 4970, 6005, 7852, 9732, 12343, 14672, 19161, 22819, 25189 - , 30720, 30720, 28672, 30720, 30720, 20720, 18125, 14388, 12007, 9825 - , 8092, 7064, 6069, 5903, 5932, 6359, 7169, 8310, 9324, 10711 - , 11867, 13096, 14157, 16338, 17040, 19161, 21725, 23773, 30720, 30720 - , 26276, 25426, 24397, 28672, 28672, 23257, 22020, 22349, 18297, 17646 - , 16983, 16431, 16162, 15021, 15178, 13751, 12142, 10895, 10193, 9632 - , 9086, 8896, 8823, 8735, 8591, 8754, 8649, 8361, 8329, 8522 - , 8373, 8739, 8993, 9657, 10454, 11279, 11899, 12614, 14024, 14273 - , 15477, 15240, 16649, 17866, 18697, 21151, 22099, 0 - // The final 0 was added due to http://bugs.webrtc.org/10584. -}; - -/* left KLT transforms */ -const int16_t WebRtcIsacfix_kT1GainQ15[3][4] = { - { -26130, 19773, 19773, 26130 }, - { -26664, 19046, 19046, 26664 }, - { -23538, 22797, 22797, 23538 } -}; - - - -const int16_t WebRtcIsacfix_kT1ShapeQ15[3][324] = { - { 52,16,168,7,439,-138,-89,306,671,882, - 157,1301,291,1598,-3571,-1943,-1119,32404,96,-12, - 379,-64,-307,345,-836,539,1045,2541,-2865,-992, - 1683,-4717,5808,7427,30599,2319,183,-73,451,481, - 933,-198,781,-397,1244,-777,3690,-2414,149,-1356, - -2593,-31140,8289,-1737,-202,-14,-214,360,501,450, - -245,-7,797,3638,-2804,3042,-337,22137,-22103,2264, - 6838,-3381,305,172,263,-195,-355,351,179,513, - 2234,3343,5509,7531,19075,-17740,-16836,2244,-629,-1505, - -153,108,124,-324,2694,-124,1492,-850,5347,4285, - 7439,-10229,-22822,-12467,-12891,3645,822,-232,131,13, - 374,565,536,4681,1294,-1935,1926,-5734,-10643,26462, - -12480,-5589,-1038,-2468,964,-704,-247,-106,186,-558, - -4050,3760,2972,2141,-7393,6294,26740,11991,-3251,5461, - 5341,1574,2208,-51,-552,-297,-753,-154,2068,-5371, - 3578,4106,28043,-10533,8041,2353,2389,4609,3410,1906, - 351,-249,18,-15,1117,539,2870,9084,17585,-24528, - -366,-6490,2009,-3170,2942,1116,-232,1672,1065,606, - -399,-388,-518,38,3728,28948,-11936,4543,4104,-4441, - 1545,-4044,1485,622,-68,186,-473,135,-280,125, - -546,-1813,6989,6606,23711,19376,-2636,2870,-4553,-1687, - 878,-375,205,-208,-409,-108,-200,-45,-1670,-337, - 8213,-5524,-2334,5240,-12939,-26205,5937,-1582,-592,-959, - -5374,2449,3400,559,349,-492,668,12379,-27684,3419, - 5117,4415,-297,-8270,-1252,-3490,-1272,-1199,-3159,191, - 630,488,-797,-3071,12912,-27783,-10249,1047,647,619, - 111,-3722,-915,-1055,-502,5,-1384,-306,221,68, - 5219,13173,-26474,-11663,-5626,927,806,-1127,236,-589, - -522,-230,-312,-315,-428,-573,426,192,-11830,-26883, - -14121,-2785,-1429,-109,410,-832,-302,539,-459,104, - 1,-530,-202,-289,153,116,30082,-12944,-671,20, - 649,98,103,215,234,0,280,-51,-169,298, - 31,230,-73,-51 - }, - { -154,-7,-192,61,-739,-389,-947,-162,-60,94, - 511,-716,1520,-1428,4168,-2214,1816,32270,-123,-77, - -199,-99,-42,-588,203,-240,-930,-35,1580,234, - 3206,-5507,-1495,-10946,30000,-2667,-136,-176,-240,-175, - -204,-661,-1796,-1039,-1271,498,3143,734,2663,2699, - -8127,29333,10495,2356,-72,113,-91,118,-2840,-723, - -1733,-1158,-389,-2116,-3054,-3,-5179,8071,29546,6308, - 5657,-3178,-186,-294,-473,-635,1213,-983,-1437,-1715, - -1094,1280,-92,-9573,948,29576,-7060,-5921,2954,1349, - -337,-108,-1099,962,418,-413,-1149,-334,1241,3975, - -6825,26725,-14377,7051,-4772,-1707,2335,2008,-150,570, - 1371,42,-1649,-619,2039,3369,-1225,1583,-2755,-15207, - -27504,-4855,-4304,1495,2733,1324,15,-448,403,353, - 3016,-1242,2338,2673,2064,-7496,-30447,-3686,5833,-1301, - -2455,2122,1519,608,43,-653,773,-3072,912,-1537, - 4505,10284,30237,1549,3200,-691,205,1702,658,1014, - 1499,148,79,-322,-1162,-4639,-813,7536,3204,29109, - -10747,-26,1611,2286,2114,2561,1022,372,348,207, - 1062,-1088,-443,-9849,2381,5671,29097,-7612,-2927,3853, - 194,1155,275,1438,1438,1312,581,888,-784,906, - 112,-11103,25104,14438,-9311,-3068,1210,368,370,-940, - -2434,-1148,1925,392,657,258,-526,1475,-2281,-4265, - -1880,1534,2185,-1472,959,-30934,6306,3114,-4109,1768, - -2612,-703,45,644,2185,2033,5670,7211,19114,-22427, - 6432,5150,-4090,-2694,3860,1245,-596,293,1829,369, - -319,229,-3256,2170,-6374,-26216,-4570,-16053,-5766,-262, - -2006,2873,-1477,147,378,-1544,-344,-544,-985,-481, - 4210,4542,30757,-7291,-4863,1529,-2079,-628,-603,-783, - -408,1646,697,808,-620,-292,181,158,-13313,-29173, - 5984,-1262,859,-1776,-558,-24,-883,-1421,739,210, - -531,-285,131,-160,-246,-56,29345,-13706,-2859,-2966, - -300,-970,-2382,-268,-103,-636,-12,-62,-691,-253, - -147,-127,27,66 - }, - { 55,-212,-198,489,-274,81,682,399,328,-934, - -389,-37,1357,-3632,5276,6581,-9493,-29921,29,-45, - 2,190,172,-15,311,-130,-1085,-25,324,-684, - 3223,-6580,4485,-5280,-29521,9933,82,-320,-530,229, - -705,-533,-414,848,-1842,-4473,1390,-857,6717,-6692, - 4648,29397,576,8339,-68,-85,238,-330,264,-1012, - -381,-203,-3384,-3329,3906,6810,3790,-6250,28312,-8078, - 8089,1565,160,-569,-612,-613,-1063,-1928,-1125,3421, - -7481,-7484,4942,-6984,4330,-25591,-10574,-6982,5682,-1781, - -308,89,178,-1715,-420,-3530,-5776,1219,-8617,-7137, - 7015,4981,24875,12657,-5408,-3356,-785,-1972,326,-858, - -506,-3382,-986,-6258,-2259,4015,-8374,-10482,3127,23826, - -14126,-514,-5417,2178,-2912,-17,-587,80,67,-5881, - -1702,-5351,-4481,398,-10156,-225,20727,-15460,-11603,7752, - 3660,1714,-2001,-359,499,-527,-1225,-7820,-1297,-6326, - -8526,7900,-18328,13311,-17488,-2926,-196,-17,2281,873, - 480,-160,-624,471,780,-8729,1707,-14262,-20647,1721, - 18590,-2206,-1214,-1066,312,-2602,783,-412,-113,49, - -119,1305,-2371,-15132,-1833,-18252,20295,-8316,2227,341, - -2074,-702,3082,-262,-465,-198,430,30,-70,-788, - 2342,-25132,-4863,19783,-484,2137,2811,-1906,799,1586, - 962,-734,-191,-30,-129,-93,-1126,1729,5860,-2030, - 8953,603,-3338,-10869,-1144,22070,12130,10513,3191,-6881, - -3514,2090,711,-666,1843,-5997,-5681,2921,-17641,-2801, - 4969,18590,7169,12214,8587,4405,3008,-1074,-371,-77, - 253,331,-5611,5014,13152,-1985,18483,-1696,8043,20463, - 2381,-393,1688,-1205,618,1220,457,248,-83,176, - 7920,-13676,-22139,-3038,17402,2036,844,3258,994,719, - 2087,-44,426,494,12,-91,46,5,-14204,22912, - -18156,-361,442,2298,-829,2229,386,1433,1335,1323, - 55,-592,-139,49,-12,-57,27783,17134,350,-282, - 552,158,142,2488,465,329,1087,118,143,10, - 56,65,-15,-31 - } -}; - -/* right KLT transforms */ -const int16_t WebRtcIsacfix_kT2GainQ15[3][36] = { - { 4775, -14892, 20313, -17104, 10533, -3613, -6782, 16044, -8889, - -11019, 21330, -10720, 13193, -15678, -11101, 14461, 12250, -13096, - -16951, 2167, 16066, 15569, -702, -16754, -19195, -12823, -4321, - 5128, 13348, 17825, 13232, 13404, 13494, 13490, 13383, 13261 - }, - { -3725, 11408, -18493, 20031, -13097, 3865, 9344, -19294, 10740, - 8856, -18432, 8982, 13975, -14444, -11930, 11774, 14285, -13594, - -16323, -4, 16340, 15609, 359, -17220, -18401, -13471, -4643, - 5225, 13375, 18053, 13124, 13463, 13621, 13583, 13393, 13072 - }, - { -3513, 11402, -17883, 19504, -14399, 4885, 8702, -19513, 12046, - 8533, -18110, 8447, 12778, -14838, -12444, 13177, 14107, -12759, - -17268, 914, 15822, 15661, 838, -16686, -18907, -12936, -4820, - 4175, 12398, 18830, 12913, 13215, 13433, 13572, 13601, 13518 - } -}; - -const int16_t WebRtcIsacfix_kT2ShapeQ15[3][36] = { - { 4400, -11512, 17205, -19470, 14770, -5345, 9784, -19222, 11228, - 6842, -18371, 9909, 14191, -13496, -11563, 14015, 11827, -14839, - -15439, 948, 17802, 14827, -2053, -17132, 18723, 14516, 4135, - -6822, -13869, -16016, 12975, 13341, 13563, 13603, 13478, 13296 - }, - { 5420, -14215, 19060, -18073, 11709, -3911, 9645, -18335, 7717, - 10842, -19283, 9777, 14898, -12555, -13661, 11668, 13520, -13733, - -15936, -1358, 15671, 16728, 328, -17100, 17527, 13973, 5587, - -5194, -14165, -17677, 12970, 13446, 13693, 13660, 13462, 13015 - }, - { 4386, -12426, 18019, -18895, 13894, -5034, 9713, -19270, 10283, - 8692, -18439, 9317, 13992, -13454, -13241, 12850, 13366, -13336, - -16334, -498, 15976, 16213, -114, -16987, 18191, 13659, 4958, - -5116, -13444, -18021, 12911, 13424, 13718, 13674, 13464, 13054 - } -}; - -/* means of log gains and LAR coefficients*/ -const int16_t WebRtcIsacfix_kMeansGainQ8[3][12] = { - { -1758, -1370, -1758, -1373, -1757, -1375, - -1758, -1374, -1758, -1373, -1755, -1370 - }, - { -1569, -1224, -1569, -1225, -1569, -1227, - -1569, -1226, -1567, -1225, -1565, -1224 - }, - { -1452, -957, -1447, -951, -1438, -944, - -1431, -938, -1419, -931, -1406, -926 - } -}; - - -const int32_t WebRtcIsacfix_kMeansShapeQ17[3][108] = { - { -119581, 34418, -44193, 11112, -4428, 18906, 9222, 8068, 1953, 5425, - 1871, 1689, 109933, 33751, 10471, -2566, 1090, 2320, -119219, 33728, - -43759, 11450, -4870, 19117, 9174, 8037, 1972, 5331, 1872, 1843, - 109899, 34301, 10629, -2316, 1272, 2562, -118608, 32318, -44012, 11591, - -4914, 18932, 9456, 8088, 1900, 5419, 1723, 1853, 109963, 35059, - 10745, -2335, 1161, 2520, -119174, 32107, -44462, 11635, -4694, 18611, - 9757, 8108, 1969, 5486, 1673, 1777, 109636, 34907, 10643, -2406, - 1034, 2420, -118597, 32320, -44590, 10854, -4569, 18821, 9701, 7866, - 2003, 5577, 1732, 1626, 109913, 34448, 10714, -2752, 990, 2228, - -118138, 32996, -44352, 10334, -3772, 18488, 9464, 7865, 2208, 5540, - 1745, 1664, 109880, 33381, 10640, -2779, 980, 2054 - }, - { -146328, 46370, 1047, 26431, 10035, 13933, 6415, 14359, -2368, 6661, - 2269, 1764, 96623, 7802, 4163, 10742, 1643, 2954, -146871, 46561, 1127, - 26225, 10113, 14096, 6771, 14323, -2037, 6788, 2297, 1761, 96324, 8382, - 4309, 10450, 1695, 3016, -146502, 46475, 1580, 26118, 10487, 14179, 6622, - 14439, -2034, 6757, 2342, 1761, 95869, 8966, 4347, 10358, 1999, 2855, - -146958, 47717, 826, 25952, 10263, 14061, 5266, 13681, -2417, 6582, 2047, - 1608, 96257, 9107, 4452, 10301, 1792, 2676, -146992, 47123, 446, 25822, - 10405, 14292, 5140, 13804, -2403, 6496, 1834, 1735, 97489, 9253, 4414, - 10684, 1549, 2721, -145811, 46182, 901, 26482, 10241, 14524, 6075, 14514, - -2147, 6691, 2196, 1899, 97011, 8178, 4102, 10758, 1638, 2869 - }, - { -166617, 46969, -43908, 17726, 6330, 25615, 6913, 5450, -2301, 1984, - 507, 2883, 149998, 28709, 19333, 16703, 11093, 8965, -168254, 46604, - -44315, 17862, 6474, 25746, 7018, 5373, -2343, 1930, 513, 2819, 150391, - 28627, 19194, 16678, 10998, 8929, -169093, 46084, -44767, 17427, 6401, - 25674, 7147, 5472, -2336, 1820, 491, 2802, 149860, 28430, 19064, 16524, - 10898, 8875, -170205, 46189, -44877, 17403, 6190, 25209, 7035, 5673, -2173, - 1894, 574, 2756, 148830, 28230, 18819, 16418, 10789, 8811, -171263, 45045, - -44834, 16858, 6103, 24726, 7014, 5713, -2103, 1877, 518, 2729, 147073, - 27744, 18629, 16277, 10690, 8703, -171720, 44153, -45062, 15951, 5872, - 24429, 7044, 5585, -2082, 1807, 519, 2769, 144791, 27402, 18490, 16126, - 10548, 8635 - } -}; diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h deleted file mode 100644 index 50e1b124598a..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_tables.h - * - * header file for coding tables for the LPC coefficients - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_TABLES_H_ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/* indices of KLT coefficients used */ -extern const uint16_t WebRtcIsacfix_kSelIndGain[12]; - -extern const uint16_t WebRtcIsacfix_kSelIndShape[108]; - -/* cdf array for model indicator */ -extern const uint16_t WebRtcIsacfix_kModelCdf[KLT_NUM_MODELS + 1]; - -/* pointer to cdf array for model indicator */ -extern const uint16_t* WebRtcIsacfix_kModelCdfPtr[1]; - -/* initial cdf index for decoder of model indicator */ -extern const uint16_t WebRtcIsacfix_kModelInitIndex[1]; - -/* offset to go from rounded value to quantization index */ -extern const int16_t WebRtcIsacfix_kQuantMinGain[12]; - -extern const int16_t WebRtcIsacfix_kQuantMinShape[108]; - -/* maximum quantization index */ -extern const uint16_t WebRtcIsacfix_kMaxIndGain[12]; - -extern const uint16_t WebRtcIsacfix_kMaxIndShape[108]; - -/* index offset */ -extern const uint16_t WebRtcIsacfix_kOffsetGain[KLT_NUM_MODELS][12]; - -extern const uint16_t WebRtcIsacfix_kOffsetShape[KLT_NUM_MODELS][108]; - -/* initial cdf index for KLT coefficients */ -extern const uint16_t WebRtcIsacfix_kInitIndexGain[KLT_NUM_MODELS][12]; - -extern const uint16_t WebRtcIsacfix_kInitIndexShape[KLT_NUM_MODELS][108]; - -/* offsets for quantizer representation levels */ -extern const uint16_t WebRtcIsacfix_kOfLevelsGain[3]; - -extern const uint16_t WebRtcIsacfix_kOfLevelsShape[3]; - -/* quantizer representation levels */ -extern const int32_t WebRtcIsacfix_kLevelsGainQ17[1176]; - -extern const int16_t WebRtcIsacfix_kLevelsShapeQ10[1735]; - -/* cdf tables for quantizer indices */ -extern const uint16_t WebRtcIsacfix_kCdfGain[1212]; - -extern const uint16_t WebRtcIsacfix_kCdfShape[2059]; - -/* pointers to cdf tables for quantizer indices */ -extern const uint16_t* WebRtcIsacfix_kCdfGainPtr[KLT_NUM_MODELS][12]; - -extern const uint16_t* WebRtcIsacfix_kCdfShapePtr[KLT_NUM_MODELS][108]; - -/* code length for all coefficients using different models */ -extern const int16_t WebRtcIsacfix_kCodeLenGainQ11[392]; - -extern const int16_t WebRtcIsacfix_kCodeLenShapeQ11[578]; - -/* left KLT transforms */ -extern const int16_t WebRtcIsacfix_kT1GainQ15[KLT_NUM_MODELS][4]; - -extern const int16_t WebRtcIsacfix_kT1ShapeQ15[KLT_NUM_MODELS][324]; - -/* right KLT transforms */ -extern const int16_t WebRtcIsacfix_kT2GainQ15[KLT_NUM_MODELS][36]; - -extern const int16_t WebRtcIsacfix_kT2ShapeQ15[KLT_NUM_MODELS][36]; - -/* means of log gains and LAR coefficients */ -extern const int16_t WebRtcIsacfix_kMeansGainQ8[KLT_NUM_MODELS][12]; - -extern const int32_t WebRtcIsacfix_kMeansShapeQ17[3][108]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_TABLES_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c deleted file mode 100644 index 78cb93f7ae14..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c +++ /dev/null @@ -1,435 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "rtc_base/compile_assert_c.h" - -/* log2[0.2, 0.5, 0.98] in Q8 */ -static const int16_t kLogLagWinQ8[3] = { - -594, -256, -7 -}; - -/* [1 -0.75 0.25] in Q12 */ -static const int16_t kACoefQ12[3] = { - 4096, -3072, 1024 -}; - -int32_t WebRtcIsacfix_Log2Q8(uint32_t x) { - int32_t zeros; - int16_t frac; - - zeros=WebRtcSpl_NormU32(x); - frac = (int16_t)(((x << zeros) & 0x7FFFFFFF) >> 23); - /* log2(magn(i)) */ - - return ((31 - zeros) << 8) + frac; -} - -static __inline int16_t Exp2Q10(int16_t x) { // Both in and out in Q10 - - int16_t tmp16_1, tmp16_2; - - tmp16_2=(int16_t)(0x0400|(x&0x03FF)); - tmp16_1 = -(x >> 10); - if(tmp16_1>0) - return tmp16_2 >> tmp16_1; - else - return tmp16_2 << -tmp16_1; - -} - - - -/* 1D parabolic interpolation . All input and output values are in Q8 */ -static __inline void Intrp1DQ8(int32_t *x, int32_t *fx, int32_t *y, int32_t *fy) { - - int16_t sign1=1, sign2=1; - int32_t r32, q32, t32, nom32, den32; - int16_t t16, tmp16, tmp16_1; - - if ((fx[0]>0) && (fx[2]>0)) { - r32=fx[1]-fx[2]; - q32=fx[0]-fx[1]; - nom32=q32+r32; - den32 = (q32 - r32) * 2; - if (nom32<0) - sign1=-1; - if (den32<0) - sign2=-1; - - /* t = (q32+r32)/(2*(q32-r32)) = (fx[0]-fx[1] + fx[1]-fx[2])/(2 * fx[0]-fx[1] - (fx[1]-fx[2]))*/ - /* (Signs are removed because WebRtcSpl_DivResultInQ31 can't handle negative numbers) */ - /* t in Q31, without signs */ - t32 = WebRtcSpl_DivResultInQ31(nom32 * sign1, den32 * sign2); - - t16 = (int16_t)(t32 >> 23); /* Q8 */ - t16=t16*sign1*sign2; /* t in Q8 with signs */ - - *y = x[0]+t16; /* Q8 */ - // *y = x[1]+t16; /* Q8 */ - - /* The following code calculates fy in three steps */ - /* fy = 0.5 * t * (t-1) * fx[0] + (1-t*t) * fx[1] + 0.5 * t * (t+1) * fx[2]; */ - - /* Part I: 0.5 * t * (t-1) * fx[0] */ - tmp16_1 = (int16_t)(t16 * t16); /* Q8*Q8=Q16 */ - tmp16_1 >>= 2; /* Q16>>2 = Q14 */ - t16 <<= 6; /* Q8<<6 = Q14 */ - tmp16 = tmp16_1-t16; - *fy = WEBRTC_SPL_MUL_16_32_RSFT15(tmp16, fx[0]); /* (Q14 * Q8 >>15)/2 = Q8 */ - - /* Part II: (1-t*t) * fx[1] */ - tmp16 = 16384-tmp16_1; /* 1 in Q14 - Q14 */ - *fy += WEBRTC_SPL_MUL_16_32_RSFT14(tmp16, fx[1]);/* Q14 * Q8 >> 14 = Q8 */ - - /* Part III: 0.5 * t * (t+1) * fx[2] */ - tmp16 = tmp16_1+t16; - *fy += WEBRTC_SPL_MUL_16_32_RSFT15(tmp16, fx[2]);/* (Q14 * Q8 >>15)/2 = Q8 */ - } else { - *y = x[0]; - *fy= fx[1]; - } -} - - -static void FindFour32(int32_t *in, int16_t length, int16_t *bestind) -{ - int32_t best[4]= {-100, -100, -100, -100}; - int16_t k; - - for (k=0; k best[3]) { - if (in[k] > best[2]) { - if (in[k] > best[1]) { - if (in[k] > best[0]) { // The Best - best[3] = best[2]; - bestind[3] = bestind[2]; - best[2] = best[1]; - bestind[2] = bestind[1]; - best[1] = best[0]; - bestind[1] = bestind[0]; - best[0] = in[k]; - bestind[0] = k; - } else { // 2nd best - best[3] = best[2]; - bestind[3] = bestind[2]; - best[2] = best[1]; - bestind[2] = bestind[1]; - best[1] = in[k]; - bestind[1] = k; - } - } else { // 3rd best - best[3] = best[2]; - bestind[3] = bestind[2]; - best[2] = in[k]; - bestind[2] = k; - } - } else { // 4th best - best[3] = in[k]; - bestind[3] = k; - } - } - } -} - - - - - -extern void WebRtcIsacfix_PCorr2Q32(const int16_t *in, int32_t *logcorQ8); - - - -void WebRtcIsacfix_InitialPitch(const int16_t *in, /* Q0 */ - PitchAnalysisStruct *State, - int16_t *lagsQ7 /* Q7 */ - ) -{ - int16_t buf_dec16[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2+2]; - int32_t *crrvecQ8_1,*crrvecQ8_2; - int32_t cv1q[PITCH_LAG_SPAN2+2],cv2q[PITCH_LAG_SPAN2+2], peakvq[PITCH_LAG_SPAN2+2]; - int k; - int16_t peaks_indq; - int16_t peakiq[PITCH_LAG_SPAN2]; - int32_t corr; - int32_t corr32, corr_max32, corr_max_o32; - int16_t npkq; - int16_t best4q[4]={0,0,0,0}; - int32_t xq[3],yq[1],fyq[1]; - int32_t *fxq; - int32_t best_lag1q, best_lag2q; - int32_t tmp32a,tmp32b,lag32,ratq; - int16_t start; - int16_t oldgQ12, tmp16a, tmp16b, gain_bias16,tmp16c, tmp16d, bias16; - int32_t tmp32c,tmp32d, tmp32e; - int16_t old_lagQ; - int32_t old_lagQ8; - int32_t lagsQ8[4]; - - old_lagQ = State->PFstr_wght.oldlagQ7; // Q7 - old_lagQ8 = old_lagQ << 1; // Q8 - - oldgQ12= State->PFstr_wght.oldgainQ12; - - crrvecQ8_1=&cv1q[1]; - crrvecQ8_2=&cv2q[1]; - - - /* copy old values from state buffer */ - memcpy(buf_dec16, State->dec_buffer16, sizeof(State->dec_buffer16)); - - /* decimation; put result after the old values */ - WebRtcIsacfix_DecimateAllpass32(in, State->decimator_state32, PITCH_FRAME_LEN, - &buf_dec16[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2]); - - /* low-pass filtering */ - start= PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2; - WebRtcSpl_FilterARFastQ12(&buf_dec16[start],&buf_dec16[start],(int16_t*)kACoefQ12,3, PITCH_FRAME_LEN/2); - - /* copy end part back into state buffer */ - for (k = 0; k < (PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2); k++) - State->dec_buffer16[k] = buf_dec16[k+PITCH_FRAME_LEN/2]; - - - /* compute correlation for first and second half of the frame */ - WebRtcIsacfix_PCorr2Q32(buf_dec16, crrvecQ8_1); - WebRtcIsacfix_PCorr2Q32(buf_dec16 + PITCH_CORR_STEP2, crrvecQ8_2); - - - /* bias towards pitch lag of previous frame */ - tmp32a = WebRtcIsacfix_Log2Q8((uint32_t) old_lagQ8) - 2304; - // log2(0.5*oldlag) in Q8 - tmp32b = oldgQ12 * oldgQ12 >> 10; // Q12 & * 4.0; - gain_bias16 = (int16_t) tmp32b; //Q12 - if (gain_bias16 > 3276) gain_bias16 = 3276; // 0.8 in Q12 - - - for (k = 0; k < PITCH_LAG_SPAN2; k++) - { - if (crrvecQ8_1[k]>0) { - tmp32b = WebRtcIsacfix_Log2Q8((uint32_t) (k + (PITCH_MIN_LAG/2-2))); - tmp16a = (int16_t) (tmp32b - tmp32a); // Q8 & fabs(ratio)<4 - tmp32c = tmp16a * tmp16a >> 6; // Q10 - tmp16b = (int16_t) tmp32c; // Q10 & <8 - tmp32d = tmp16b * 177 >> 8; // mult with ln2 in Q8 - tmp16c = (int16_t) tmp32d; // Q10 & <4 - tmp16d = Exp2Q10((int16_t) -tmp16c); //Q10 - tmp32c = gain_bias16 * tmp16d >> 13; // Q10 & * 0.5 - bias16 = (int16_t) (1024 + tmp32c); // Q10 - tmp32b = WebRtcIsacfix_Log2Q8((uint32_t)bias16) - 2560; - // Q10 in -> Q8 out with 10*2^8 offset - crrvecQ8_1[k] += tmp32b ; // -10*2^8 offset - } - } - - /* taper correlation functions */ - for (k = 0; k < 3; k++) { - crrvecQ8_1[k] += kLogLagWinQ8[k]; - crrvecQ8_2[k] += kLogLagWinQ8[k]; - - crrvecQ8_1[PITCH_LAG_SPAN2-1-k] += kLogLagWinQ8[k]; - crrvecQ8_2[PITCH_LAG_SPAN2-1-k] += kLogLagWinQ8[k]; - } - - - /* Make zeropadded corr vectors */ - cv1q[0]=0; - cv2q[0]=0; - cv1q[PITCH_LAG_SPAN2+1]=0; - cv2q[PITCH_LAG_SPAN2+1]=0; - corr_max32 = 0; - - for (k = 1; k <= PITCH_LAG_SPAN2; k++) - { - - - corr32=crrvecQ8_1[k-1]; - if (corr32 > corr_max32) - corr_max32 = corr32; - - corr32=crrvecQ8_2[k-1]; - corr32 += -4; // Compensate for later (log2(0.99)) - - if (corr32 > corr_max32) - corr_max32 = corr32; - - } - - /* threshold value to qualify as a peak */ - // corr_max32 += -726; // log(0.14)/log(2.0) in Q8 - corr_max32 += -1000; // log(0.14)/log(2.0) in Q8 - corr_max_o32 = corr_max32; - - - /* find peaks in corr1 */ - peaks_indq = 0; - for (k = 1; k <= PITCH_LAG_SPAN2; k++) - { - corr32=cv1q[k]; - if (corr32>corr_max32) { // Disregard small peaks - if ((corr32>=cv1q[k-1]) && (corr32>cv1q[k+1])) { // Peak? - peakvq[peaks_indq] = corr32; - peakiq[peaks_indq++] = k; - } - } - } - - - /* find highest interpolated peak */ - corr_max32=0; - best_lag1q =0; - if (peaks_indq > 0) { - FindFour32(peakvq, (int16_t) peaks_indq, best4q); - npkq = WEBRTC_SPL_MIN(peaks_indq, 4); - - for (k=0;k> 8; - tmp32c= tmp32b + 256; - *fyq += tmp32c; - if (*fyq > corr_max32) { - corr_max32 = *fyq; - best_lag1q = *yq; - } - } - tmp32b = (best_lag1q - OFFSET_Q8) * 2; - lagsQ8[0] = tmp32b + PITCH_MIN_LAG_Q8; - lagsQ8[1] = lagsQ8[0]; - } else { - lagsQ8[0] = old_lagQ8; - lagsQ8[1] = lagsQ8[0]; - } - - /* Bias towards constant pitch */ - tmp32a = lagsQ8[0] - PITCH_MIN_LAG_Q8; - ratq = (tmp32a >> 1) + OFFSET_Q8; - - for (k = 1; k <= PITCH_LAG_SPAN2; k++) - { - tmp32a = k << 7; // 0.5*k Q8 - tmp32b = tmp32a * 2 - ratq; // Q8 - tmp32c = (int16_t)tmp32b * (int16_t)tmp32b >> 8; // Q8 - - tmp32b = tmp32c + (ratq >> 1); - // (k-r)^2 + 0.5 * r Q8 - tmp32c = WebRtcIsacfix_Log2Q8((uint32_t)tmp32a) - 2048; - // offset 8*2^8 , log2(0.5*k) Q8 - tmp32d = WebRtcIsacfix_Log2Q8((uint32_t)tmp32b) - 2048; - // offset 8*2^8 , log2(0.5*k) Q8 - tmp32e = tmp32c - tmp32d; - - cv2q[k] += tmp32e >> 1; - - } - - /* find peaks in corr2 */ - corr_max32 = corr_max_o32; - peaks_indq = 0; - - for (k = 1; k <= PITCH_LAG_SPAN2; k++) - { - corr=cv2q[k]; - if (corr>corr_max32) { // Disregard small peaks - if ((corr>=cv2q[k-1]) && (corr>cv2q[k+1])) { // Peak? - peakvq[peaks_indq] = corr; - peakiq[peaks_indq++] = k; - } - } - } - - - - /* find highest interpolated peak */ - corr_max32 = 0; - best_lag2q =0; - if (peaks_indq > 0) { - - FindFour32(peakvq, (int16_t) peaks_indq, best4q); - npkq = WEBRTC_SPL_MIN(peaks_indq, 4); - for (k=0;k> 8; - tmp32c= tmp32b + 256; - *fyq += tmp32c; - if (*fyq > corr_max32) { - corr_max32 = *fyq; - best_lag2q = *yq; - } - } - - tmp32b = (best_lag2q - OFFSET_Q8) * 2; - lagsQ8[2] = tmp32b + PITCH_MIN_LAG_Q8; - lagsQ8[3] = lagsQ8[2]; - } else { - lagsQ8[2] = lagsQ8[0]; - lagsQ8[3] = lagsQ8[0]; - } - - lagsQ7[0] = (int16_t)(lagsQ8[0] >> 1); - lagsQ7[1] = (int16_t)(lagsQ8[1] >> 1); - lagsQ7[2] = (int16_t)(lagsQ8[2] >> 1); - lagsQ7[3] = (int16_t)(lagsQ8[3] >> 1); -} - - - -void WebRtcIsacfix_PitchAnalysis(const int16_t *inn, /* PITCH_FRAME_LEN samples */ - int16_t *outQ0, /* PITCH_FRAME_LEN+QLOOKAHEAD samples */ - PitchAnalysisStruct *State, - int16_t *PitchLags_Q7, - int16_t *PitchGains_Q12) -{ - int16_t inbufQ0[PITCH_FRAME_LEN + QLOOKAHEAD]; - int16_t k; - - /* inital pitch estimate */ - WebRtcIsacfix_InitialPitch(inn, State, PitchLags_Q7); - - - /* Calculate gain */ - WebRtcIsacfix_PitchFilterGains(inn, &(State->PFstr_wght), PitchLags_Q7, PitchGains_Q12); - - /* concatenate previous input's end and current input */ - for (k = 0; k < QLOOKAHEAD; k++) { - inbufQ0[k] = State->inbuf[k]; - } - for (k = 0; k < PITCH_FRAME_LEN; k++) { - inbufQ0[k+QLOOKAHEAD] = (int16_t) inn[k]; - } - - /* lookahead pitch filtering for masking analysis */ - WebRtcIsacfix_PitchFilter(inbufQ0, outQ0, &(State->PFstr), PitchLags_Q7,PitchGains_Q12, 2); - - - /* store last part of input */ - for (k = 0; k < QLOOKAHEAD; k++) { - State->inbuf[k] = inbufQ0[k + PITCH_FRAME_LEN]; - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h deleted file mode 100644 index 4303c82711df..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_estimator.h - * - * Pitch functions - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_ESTIMATOR_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_ESTIMATOR_H_ - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -void WebRtcIsacfix_PitchAnalysis( - const int16_t* in, /* PITCH_FRAME_LEN samples */ - int16_t* outQ0, /* PITCH_FRAME_LEN+QLOOKAHEAD samples */ - PitchAnalysisStruct* State, - int16_t* lagsQ7, - int16_t* PitchGains_Q12); - -void WebRtcIsacfix_InitialPitch(const int16_t* in, - PitchAnalysisStruct* State, - int16_t* qlags); - -void WebRtcIsacfix_PitchFilter(int16_t* indatFix, - int16_t* outdatQQ, - PitchFiltstr* pfp, - int16_t* lagsQ7, - int16_t* gainsQ12, - int16_t type); - -void WebRtcIsacfix_PitchFilterCore(int loopNumber, - int16_t gain, - size_t index, - int16_t sign, - int16_t* inputState, - int16_t* outputBuff2, - const int16_t* coefficient, - int16_t* inputBuf, - int16_t* outputBuf, - int* index2); - -void WebRtcIsacfix_PitchFilterGains(const int16_t* indatQ0, - PitchFiltstr* pfp, - int16_t* lagsQ7, - int16_t* gainsQ12); - -void WebRtcIsacfix_DecimateAllpass32( - const int16_t* in, - int32_t* state_in, /* array of size: 2*ALLPASSSECTIONS+1 */ - int16_t N, /* number of input samples */ - int16_t* out); /* array of size N/2 */ - -int32_t WebRtcIsacfix_Log2Q8(uint32_t x); - -void WebRtcIsacfix_PCorr2Q32(const int16_t* in, int32_t* logcorQ8); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_ESTIMATOR_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c deleted file mode 100644 index c4af9ab32a19..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" - -#ifdef WEBRTC_HAS_NEON -#include -#endif - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "rtc_base/compile_assert_c.h" - -extern int32_t WebRtcIsacfix_Log2Q8(uint32_t x); - -void WebRtcIsacfix_PCorr2Q32(const int16_t* in, int32_t* logcorQ8) { - int16_t scaling,n,k; - int32_t csum32, lys, lcs; - int64_t ysum64; - const int32_t oneQ8 = 1 << 8; // 1.00 in Q8 - const int16_t* x; - const int16_t* inptr; - - x = in + PITCH_MAX_LAG / 2 + 2; - scaling = WebRtcSpl_GetScalingSquare((int16_t*)in, - PITCH_CORR_LEN2, - PITCH_CORR_LEN2); - ysum64 = 1; - csum32 = 0; - x = in + PITCH_MAX_LAG / 2 + 2; - for (n = 0; n < PITCH_CORR_LEN2; n++) { - ysum64 += in[n] * in[n] >> scaling; // Q0 - csum32 += x[n] * in[n] >> scaling; // Q0 - } - logcorQ8 += PITCH_LAG_SPAN2 - 1; - lys = WebRtcIsacfix_Log2Q8((uint32_t)ysum64) >> 1; // Q8, sqrt(ysum) - if (csum32 > 0) { - lcs = WebRtcIsacfix_Log2Q8((uint32_t)csum32); // 2log(csum) in Q8 - if (lcs > (lys + oneQ8)) { // csum/sqrt(ysum) > 2 in Q8 - *logcorQ8 = lcs - lys; // log2(csum/sqrt(ysum)) - } else { - *logcorQ8 = oneQ8; // 1.00 - } - } else { - *logcorQ8 = 0; - } - - - for (k = 1; k < PITCH_LAG_SPAN2; k++) { - inptr = &in[k]; - ysum64 -= in[k - 1] * in[k - 1] >> scaling; - ysum64 += (int32_t)(in[PITCH_CORR_LEN2 + k - 1]) - * in[PITCH_CORR_LEN2 + k - 1] >> scaling; - -#ifdef WEBRTC_HAS_NEON - { - int32_t vbuff[4]; - int32x4_t int_32x4_sum = vmovq_n_s32(0); - // Can't shift a Neon register to right with a non-constant shift value. - int32x4_t int_32x4_scale = vdupq_n_s32(-scaling); - // Assert a codition used in loop unrolling at compile-time. - RTC_COMPILE_ASSERT(PITCH_CORR_LEN2 %4 == 0); - - for (n = 0; n < PITCH_CORR_LEN2; n += 4) { - int16x4_t int_16x4_x = vld1_s16(&x[n]); - int16x4_t int_16x4_in = vld1_s16(&inptr[n]); - int32x4_t int_32x4 = vmull_s16(int_16x4_x, int_16x4_in); - int_32x4 = vshlq_s32(int_32x4, int_32x4_scale); - int_32x4_sum = vaddq_s32(int_32x4_sum, int_32x4); - } - - // Use vector store to avoid long stall from data trasferring - // from vector to general register. - vst1q_s32(vbuff, int_32x4_sum); - csum32 = vbuff[0] + vbuff[1]; - csum32 += vbuff[2]; - csum32 += vbuff[3]; - } -#else - int64_t csum64_tmp = 0; - if(scaling == 0) { - for (n = 0; n < PITCH_CORR_LEN2; n++) { - csum64_tmp += (int32_t)(x[n]) * inptr[n]; - } - } else { - for (n = 0; n < PITCH_CORR_LEN2; n++) { - csum64_tmp += ((int32_t)(x[n]) * inptr[n]) >> scaling; - } - } - csum32 = csum64_tmp; -#endif - - logcorQ8--; - - lys = WebRtcIsacfix_Log2Q8((uint32_t)ysum64) >> 1; // Q8, sqrt(ysum) - - if (csum32 > 0) { - lcs = WebRtcIsacfix_Log2Q8((uint32_t)csum32); // 2log(csum) in Q8 - if (lcs > (lys + oneQ8)) { // csum/sqrt(ysum) > 2 - *logcorQ8 = lcs - lys; // log2(csum/sqrt(ysum)) - } else { - *logcorQ8 = oneQ8; // 1.00 - } - } else { - *logcorQ8 = 0; - } - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_mips.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_mips.c deleted file mode 100644 index 4ead84c492d1..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_mips.c +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "rtc_base/compile_assert_c.h" - -extern int32_t WebRtcIsacfix_Log2Q8(uint32_t x); - -void WebRtcIsacfix_PCorr2Q32(const int16_t* in, int32_t* logcorQ8) { - int16_t scaling,n,k; - int32_t ysum32,csum32, lys, lcs; - const int32_t oneQ8 = 1 << 8; // 1.00 in Q8 - const int16_t* x; - const int16_t* inptr; - - x = in + PITCH_MAX_LAG / 2 + 2; - scaling = WebRtcSpl_GetScalingSquare((int16_t*)in, - PITCH_CORR_LEN2, - PITCH_CORR_LEN2); - ysum32 = 1; - csum32 = 0; - x = in + PITCH_MAX_LAG / 2 + 2; - { - const int16_t* tmp_x = x; - const int16_t* tmp_in = in; - int32_t tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7, tmp8; - n = PITCH_CORR_LEN2; - RTC_COMPILE_ASSERT(PITCH_CORR_LEN2 % 4 == 0); - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "1: \n\t" - "lh %[tmp1], 0(%[tmp_in]) \n\t" - "lh %[tmp2], 2(%[tmp_in]) \n\t" - "lh %[tmp3], 4(%[tmp_in]) \n\t" - "lh %[tmp4], 6(%[tmp_in]) \n\t" - "lh %[tmp5], 0(%[tmp_x]) \n\t" - "lh %[tmp6], 2(%[tmp_x]) \n\t" - "lh %[tmp7], 4(%[tmp_x]) \n\t" - "lh %[tmp8], 6(%[tmp_x]) \n\t" - "mul %[tmp5], %[tmp1], %[tmp5] \n\t" - "mul %[tmp1], %[tmp1], %[tmp1] \n\t" - "mul %[tmp6], %[tmp2], %[tmp6] \n\t" - "mul %[tmp2], %[tmp2], %[tmp2] \n\t" - "mul %[tmp7], %[tmp3], %[tmp7] \n\t" - "mul %[tmp3], %[tmp3], %[tmp3] \n\t" - "mul %[tmp8], %[tmp4], %[tmp8] \n\t" - "mul %[tmp4], %[tmp4], %[tmp4] \n\t" - "addiu %[n], %[n], -4 \n\t" - "srav %[tmp5], %[tmp5], %[scaling] \n\t" - "srav %[tmp1], %[tmp1], %[scaling] \n\t" - "srav %[tmp6], %[tmp6], %[scaling] \n\t" - "srav %[tmp2], %[tmp2], %[scaling] \n\t" - "srav %[tmp7], %[tmp7], %[scaling] \n\t" - "srav %[tmp3], %[tmp3], %[scaling] \n\t" - "srav %[tmp8], %[tmp8], %[scaling] \n\t" - "srav %[tmp4], %[tmp4], %[scaling] \n\t" - "addu %[ysum32], %[ysum32], %[tmp1] \n\t" - "addu %[csum32], %[csum32], %[tmp5] \n\t" - "addu %[ysum32], %[ysum32], %[tmp2] \n\t" - "addu %[csum32], %[csum32], %[tmp6] \n\t" - "addu %[ysum32], %[ysum32], %[tmp3] \n\t" - "addu %[csum32], %[csum32], %[tmp7] \n\t" - "addu %[ysum32], %[ysum32], %[tmp4] \n\t" - "addu %[csum32], %[csum32], %[tmp8] \n\t" - "addiu %[tmp_in], %[tmp_in], 8 \n\t" - "bgtz %[n], 1b \n\t" - " addiu %[tmp_x], %[tmp_x], 8 \n\t" - ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), - [tmp4] "=&r" (tmp4), [tmp5] "=&r" (tmp5), [tmp6] "=&r" (tmp6), - [tmp7] "=&r" (tmp7), [tmp8] "=&r" (tmp8), [tmp_in] "+r" (tmp_in), - [ysum32] "+r" (ysum32), [tmp_x] "+r" (tmp_x), [csum32] "+r" (csum32), - [n] "+r" (n) - : [scaling] "r" (scaling) - : "memory", "hi", "lo" - ); - } - logcorQ8 += PITCH_LAG_SPAN2 - 1; - lys = WebRtcIsacfix_Log2Q8((uint32_t)ysum32) >> 1; // Q8, sqrt(ysum) - if (csum32 > 0) { - lcs = WebRtcIsacfix_Log2Q8((uint32_t)csum32); // 2log(csum) in Q8 - if (lcs > (lys + oneQ8)) { // csum/sqrt(ysum) > 2 in Q8 - *logcorQ8 = lcs - lys; // log2(csum/sqrt(ysum)) - } else { - *logcorQ8 = oneQ8; // 1.00 - } - } else { - *logcorQ8 = 0; - } - - for (k = 1; k < PITCH_LAG_SPAN2; k++) { - inptr = &in[k]; - const int16_t* tmp_in1 = &in[k - 1]; - const int16_t* tmp_in2 = &in[PITCH_CORR_LEN2 + k - 1]; - const int16_t* tmp_x = x; - int32_t tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7, tmp8; - n = PITCH_CORR_LEN2; - csum32 = 0; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "lh %[tmp1], 0(%[tmp_in1]) \n\t" - "lh %[tmp2], 0(%[tmp_in2]) \n\t" - "mul %[tmp1], %[tmp1], %[tmp1] \n\t" - "mul %[tmp2], %[tmp2], %[tmp2] \n\t" - "srav %[tmp1], %[tmp1], %[scaling] \n\t" - "srav %[tmp2], %[tmp2], %[scaling] \n\t" - "subu %[ysum32], %[ysum32], %[tmp1] \n\t" - "bnez %[scaling], 2f \n\t" - " addu %[ysum32], %[ysum32], %[tmp2] \n\t" - "1: \n\t" - "lh %[tmp1], 0(%[inptr]) \n\t" - "lh %[tmp2], 0(%[tmp_x]) \n\t" - "lh %[tmp3], 2(%[inptr]) \n\t" - "lh %[tmp4], 2(%[tmp_x]) \n\t" - "lh %[tmp5], 4(%[inptr]) \n\t" - "lh %[tmp6], 4(%[tmp_x]) \n\t" - "lh %[tmp7], 6(%[inptr]) \n\t" - "lh %[tmp8], 6(%[tmp_x]) \n\t" - "mul %[tmp1], %[tmp1], %[tmp2] \n\t" - "mul %[tmp2], %[tmp3], %[tmp4] \n\t" - "mul %[tmp3], %[tmp5], %[tmp6] \n\t" - "mul %[tmp4], %[tmp7], %[tmp8] \n\t" - "addiu %[n], %[n], -4 \n\t" - "addiu %[inptr], %[inptr], 8 \n\t" - "addiu %[tmp_x], %[tmp_x], 8 \n\t" - "addu %[csum32], %[csum32], %[tmp1] \n\t" - "addu %[csum32], %[csum32], %[tmp2] \n\t" - "addu %[csum32], %[csum32], %[tmp3] \n\t" - "bgtz %[n], 1b \n\t" - " addu %[csum32], %[csum32], %[tmp4] \n\t" - "b 3f \n\t" - " nop \n\t" - "2: \n\t" - "lh %[tmp1], 0(%[inptr]) \n\t" - "lh %[tmp2], 0(%[tmp_x]) \n\t" - "lh %[tmp3], 2(%[inptr]) \n\t" - "lh %[tmp4], 2(%[tmp_x]) \n\t" - "lh %[tmp5], 4(%[inptr]) \n\t" - "lh %[tmp6], 4(%[tmp_x]) \n\t" - "lh %[tmp7], 6(%[inptr]) \n\t" - "lh %[tmp8], 6(%[tmp_x]) \n\t" - "mul %[tmp1], %[tmp1], %[tmp2] \n\t" - "mul %[tmp2], %[tmp3], %[tmp4] \n\t" - "mul %[tmp3], %[tmp5], %[tmp6] \n\t" - "mul %[tmp4], %[tmp7], %[tmp8] \n\t" - "addiu %[n], %[n], -4 \n\t" - "addiu %[inptr], %[inptr], 8 \n\t" - "addiu %[tmp_x], %[tmp_x], 8 \n\t" - "srav %[tmp1], %[tmp1], %[scaling] \n\t" - "srav %[tmp2], %[tmp2], %[scaling] \n\t" - "srav %[tmp3], %[tmp3], %[scaling] \n\t" - "srav %[tmp4], %[tmp4], %[scaling] \n\t" - "addu %[csum32], %[csum32], %[tmp1] \n\t" - "addu %[csum32], %[csum32], %[tmp2] \n\t" - "addu %[csum32], %[csum32], %[tmp3] \n\t" - "bgtz %[n], 2b \n\t" - " addu %[csum32], %[csum32], %[tmp4] \n\t" - "3: \n\t" - ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), - [tmp4] "=&r" (tmp4), [tmp5] "=&r" (tmp5), [tmp6] "=&r" (tmp6), - [tmp7] "=&r" (tmp7), [tmp8] "=&r" (tmp8), [inptr] "+r" (inptr), - [csum32] "+r" (csum32), [tmp_x] "+r" (tmp_x), [ysum32] "+r" (ysum32), - [n] "+r" (n) - : [tmp_in1] "r" (tmp_in1), [tmp_in2] "r" (tmp_in2), - [scaling] "r" (scaling) - : "memory", "hi", "lo" - ); - - logcorQ8--; - lys = WebRtcIsacfix_Log2Q8((uint32_t)ysum32) >> 1; // Q8, sqrt(ysum) - if (csum32 > 0) { - lcs = WebRtcIsacfix_Log2Q8((uint32_t)csum32); // 2log(csum) in Q8 - if (lcs > (lys + oneQ8)) { // csum/sqrt(ysum) > 2 - *logcorQ8 = lcs - lys; // log2(csum/sqrt(ysum)) - } else { - *logcorQ8 = oneQ8; // 1.00 - } - } else { - *logcorQ8 = 0; - } - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c deleted file mode 100644 index 735533020e13..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c +++ /dev/null @@ -1,248 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" -#include "rtc_base/compile_assert_c.h" - -// Number of segments in a pitch subframe. -static const int kSegments = 5; - -// A division factor of 1/5 in Q15. -static const int16_t kDivFactor = 6553; - -// Interpolation coefficients; generated by design_pitch_filter.m. -// Coefficients are stored in Q14. -static const int16_t kIntrpCoef[PITCH_FRACS][PITCH_FRACORDER] = { - {-367, 1090, -2706, 9945, 10596, -3318, 1626, -781, 287}, - {-325, 953, -2292, 7301, 12963, -3320, 1570, -743, 271}, - {-240, 693, -1622, 4634, 14809, -2782, 1262, -587, 212}, - {-125, 358, -817, 2144, 15982, -1668, 721, -329, 118}, - { 0, 0, -1, 1, 16380, 1, -1, 0, 0}, - { 118, -329, 721, -1668, 15982, 2144, -817, 358, -125}, - { 212, -587, 1262, -2782, 14809, 4634, -1622, 693, -240}, - { 271, -743, 1570, -3320, 12963, 7301, -2292, 953, -325} -}; - -static __inline size_t CalcLrIntQ(int16_t fixVal, - int16_t qDomain) { - int32_t roundVal = 1 << (qDomain - 1); - - return (fixVal + roundVal) >> qDomain; -} - -void WebRtcIsacfix_PitchFilter(int16_t* indatQQ, // Q10 if type is 1 or 4, - // Q0 if type is 2. - int16_t* outdatQQ, - PitchFiltstr* pfp, - int16_t* lagsQ7, - int16_t* gainsQ12, - int16_t type) { - int k, ind, cnt; - int16_t sign = 1; - int16_t inystateQQ[PITCH_DAMPORDER]; - int16_t ubufQQ[PITCH_INTBUFFSIZE + QLOOKAHEAD]; - const int16_t Gain = 21299; // 1.3 in Q14 - int16_t oldLagQ7; - int16_t oldGainQ12, lagdeltaQ7, curLagQ7, gaindeltaQ12, curGainQ12; - size_t frcQQ = 0; - int32_t indW32 = 0; - const int16_t* fracoeffQQ = NULL; - - // Assumptions in ARM assembly for WebRtcIsacfix_PitchFilterCoreARM(). - RTC_COMPILE_ASSERT(PITCH_FRACORDER == 9); - RTC_COMPILE_ASSERT(PITCH_DAMPORDER == 5); - - // Set up buffer and states. - memcpy(ubufQQ, pfp->ubufQQ, sizeof(pfp->ubufQQ)); - memcpy(inystateQQ, pfp->ystateQQ, sizeof(inystateQQ)); - - // Get old lag and gain value from memory. - oldLagQ7 = pfp->oldlagQ7; - oldGainQ12 = pfp->oldgainQ12; - - if (type == 4) { - sign = -1; - - // Make output more periodic. - for (k = 0; k < PITCH_SUBFRAMES; k++) { - gainsQ12[k] = (int16_t)(gainsQ12[k] * Gain >> 14); - } - } - - // No interpolation if pitch lag step is big. - if (((lagsQ7[0] * 3 >> 1) < oldLagQ7) || (lagsQ7[0] > (oldLagQ7 * 3 >> 1))) { - oldLagQ7 = lagsQ7[0]; - oldGainQ12 = gainsQ12[0]; - } - - ind = 0; - - for (k = 0; k < PITCH_SUBFRAMES; k++) { - // Calculate interpolation steps. - lagdeltaQ7 = lagsQ7[k] - oldLagQ7; - lagdeltaQ7 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND( - lagdeltaQ7, kDivFactor, 15); - curLagQ7 = oldLagQ7; - gaindeltaQ12 = gainsQ12[k] - oldGainQ12; - gaindeltaQ12 = (int16_t)(gaindeltaQ12 * kDivFactor >> 15); - - curGainQ12 = oldGainQ12; - oldLagQ7 = lagsQ7[k]; - oldGainQ12 = gainsQ12[k]; - - // Each frame has 4 60-sample pitch subframes, and each subframe has 5 - // 12-sample segments. Each segment need to be processed with - // newly-updated parameters, so we break the pitch filtering into - // two for-loops (5 x 12) below. It's also why kDivFactor = 0.2 (in Q15). - for (cnt = 0; cnt < kSegments; cnt++) { - // Update parameters for each segment. - curGainQ12 += gaindeltaQ12; - curLagQ7 += lagdeltaQ7; - indW32 = CalcLrIntQ(curLagQ7, 7); - if (indW32 < PITCH_FRACORDER - 2) { - // WebRtcIsacfix_PitchFilterCore requires indW32 >= PITCH_FRACORDER - - // 2; otherwise, it will read from entries of ubufQQ that haven't been - // written yet. (This problem has only been seen in fuzzer tests, not - // in real life.) See Chromium bug 581901. - indW32 = PITCH_FRACORDER - 2; - } - frcQQ = ((indW32 << 7) + 64 - curLagQ7) >> 4; - - if (frcQQ >= PITCH_FRACS) { - frcQQ = 0; - } - fracoeffQQ = kIntrpCoef[frcQQ]; - - // Pitch filtering. - WebRtcIsacfix_PitchFilterCore(PITCH_SUBFRAME_LEN / kSegments, curGainQ12, - indW32, sign, inystateQQ, ubufQQ, fracoeffQQ, indatQQ, outdatQQ, &ind); - } - } - - // Export buffer and states. - memcpy(pfp->ubufQQ, ubufQQ + PITCH_FRAME_LEN, sizeof(pfp->ubufQQ)); - memcpy(pfp->ystateQQ, inystateQQ, sizeof(pfp->ystateQQ)); - - pfp->oldlagQ7 = oldLagQ7; - pfp->oldgainQ12 = oldGainQ12; - - if (type == 2) { - // Filter look-ahead segment. - WebRtcIsacfix_PitchFilterCore(QLOOKAHEAD, curGainQ12, indW32, 1, inystateQQ, - ubufQQ, fracoeffQQ, indatQQ, outdatQQ, &ind); - } -} - - -void WebRtcIsacfix_PitchFilterGains(const int16_t* indatQ0, - PitchFiltstr* pfp, - int16_t* lagsQ7, - int16_t* gainsQ12) { - int k, n, m; - size_t ind, pos, pos3QQ; - - int16_t ubufQQ[PITCH_INTBUFFSIZE]; - int16_t oldLagQ7, lagdeltaQ7, curLagQ7; - const int16_t* fracoeffQQ = NULL; - int16_t scale; - int16_t cnt = 0, tmpW16; - size_t frcQQ, indW16 = 0; - int32_t tmpW32, tmp2W32, csum1QQ, esumxQQ; - - // Set up buffer and states. - memcpy(ubufQQ, pfp->ubufQQ, sizeof(pfp->ubufQQ)); - oldLagQ7 = pfp->oldlagQ7; - - // No interpolation if pitch lag step is big. - if (((lagsQ7[0] * 3 >> 1) < oldLagQ7) || (lagsQ7[0] > (oldLagQ7 * 3 >> 1))) { - oldLagQ7 = lagsQ7[0]; - } - - ind = 0; - pos = ind + PITCH_BUFFSIZE; - scale = 0; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - - // Calculate interpolation steps. - lagdeltaQ7 = lagsQ7[k] - oldLagQ7; - lagdeltaQ7 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND( - lagdeltaQ7, kDivFactor, 15); - curLagQ7 = oldLagQ7; - oldLagQ7 = lagsQ7[k]; - - csum1QQ = 1; - esumxQQ = 1; - - // Same as function WebRtcIsacfix_PitchFilter(), we break the pitch - // filtering into two for-loops (5 x 12) below. - for (cnt = 0; cnt < kSegments; cnt++) { - // Update parameters for each segment. - curLagQ7 += lagdeltaQ7; - indW16 = CalcLrIntQ(curLagQ7, 7); - frcQQ = ((indW16 << 7) + 64 - curLagQ7) >> 4; - - if (frcQQ >= PITCH_FRACS) { - frcQQ = 0; - } - fracoeffQQ = kIntrpCoef[frcQQ]; - - pos3QQ = pos - (indW16 + 4); - - for (n = 0; n < PITCH_SUBFRAME_LEN / kSegments; n++) { - // Filter to get fractional pitch. - - tmpW32 = 0; - for (m = 0; m < PITCH_FRACORDER; m++) { - tmpW32 += ubufQQ[pos3QQ + m] * fracoeffQQ[m]; - } - - // Subtract from input and update buffer. - ubufQQ[pos] = indatQ0[ind]; - - tmp2W32 = WEBRTC_SPL_MUL_16_32_RSFT14(indatQ0[ind], tmpW32); - tmpW32 += 8192; - tmpW16 = tmpW32 >> 14; - tmpW32 = tmpW16 * tmpW16; - - if ((tmp2W32 > 1073700000) || (csum1QQ > 1073700000) || - (tmpW32 > 1073700000) || (esumxQQ > 1073700000)) { // 2^30 - scale++; - csum1QQ >>= 1; - esumxQQ >>= 1; - } - csum1QQ += tmp2W32 >> scale; - esumxQQ += tmpW32 >> scale; - - ind++; - pos++; - pos3QQ++; - } - } - - if (csum1QQ < esumxQQ) { - tmp2W32 = WebRtcSpl_DivResultInQ31(csum1QQ, esumxQQ); - - // Gain should be half the correlation. - tmpW32 = tmp2W32 >> 20; - } else { - tmpW32 = 4096; - } - gainsQ12[k] = (int16_t)WEBRTC_SPL_SAT(PITCH_MAX_GAIN_Q12, tmpW32, 0); - } - - // Export buffer and states. - memcpy(pfp->ubufQQ, ubufQQ + PITCH_FRAME_LEN, sizeof(pfp->ubufQQ)); - pfp->oldlagQ7 = lagsQ7[PITCH_SUBFRAMES - 1]; - pfp->oldgainQ12 = gainsQ12[PITCH_SUBFRAMES - 1]; - -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_armv6.S b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_armv6.S deleted file mode 100644 index 065946856f8f..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_armv6.S +++ /dev/null @@ -1,143 +0,0 @@ -@ -@ Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. -@ -@ Use of this source code is governed by a BSD-style license -@ that can be found in the LICENSE file in the root of the source -@ tree. An additional intellectual property rights grant can be found -@ in the file PATENTS. All contributing project authors may -@ be found in the AUTHORS file in the root of the source tree. -@ - -@ Contains the core loop routine for the pitch filter function in iSAC, -@ optimized for ARMv7 platforms. -@ -@ Output is bit-exact with the reference C code in pitch_filter.c. - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "rtc_base/system/asm_defines.h" - -GLOBAL_FUNCTION WebRtcIsacfix_PitchFilterCore -.align 2 - -@ void WebRtcIsacfix_PitchFilterCore(int loopNumber, -@ int16_t gain, -@ size_t index, -@ int16_t sign, -@ int16_t* inputState, -@ int16_t* outputBuf2, -@ const int16_t* coefficient, -@ int16_t* inputBuf, -@ int16_t* outputBuf, -@ int* index2) { -DEFINE_FUNCTION WebRtcIsacfix_PitchFilterCore - push {r4-r11} - sub sp, #8 - - str r0, [sp] @ loopNumber - str r3, [sp, #4] @ sign - ldr r3, [sp, #44] @ outputBuf2 - ldr r6, [sp, #60] @ index2 - ldr r7, [r6] @ *index2 - ldr r8, [sp, #52] @ inputBuf - ldr r12, [sp, #56] @ outputBuf - - add r4, r7, r0 - str r4, [r6] @ Store return value to index2. - - mov r10, r7, asl #1 - add r12, r10 @ &outputBuf[*index2] - add r8, r10 @ &inputBuf[*index2] - - add r4, r7, #PITCH_BUFFSIZE @ *index2 + PITCH_BUFFSIZE - add r6, r3, r4, lsl #1 @ &outputBuf2[*index2 + PITCH_BUFFSIZE] - sub r4, r2 @ r2: index - sub r4, #2 @ *index2 + PITCH_BUFFSIZE - index - 2 - add r3, r4, lsl #1 @ &ubufQQpos2[*index2] - ldr r9, [sp, #48] @ coefficient - -LOOP: -@ Usage of registers in the loop: -@ r0: loop counter -@ r1: gain -@ r2: tmpW32 -@ r3: &ubufQQpos2[] -@ r6: &outputBuf2[] -@ r8: &inputBuf[] -@ r9: &coefficient[] -@ r12: &outputBuf[] -@ r4, r5, r7, r10, r11: scratch - - @ Filter to get fractional pitch. - @ The pitch filter loop here is unrolled with 9 multipications. - pld [r3] - ldr r10, [r3], #4 @ ubufQQpos2[*index2 + 0, *index2 + 1] - ldr r4, [r9], #4 @ coefficient[0, 1] - ldr r11, [r3], #4 - ldr r5, [r9], #4 - smuad r2, r10, r4 - smlad r2, r11, r5, r2 - - ldr r10, [r3], #4 - ldr r4, [r9], #4 - ldr r11, [r3], #4 - ldr r5, [r9], #4 - smlad r2, r10, r4, r2 - ldrh r10, [r3], #-14 @ r3 back to &ubufQQpos2[*index2]. - ldrh r4, [r9], #-16 @ r9 back to &coefficient[0]. - smlad r2, r11, r5, r2 - smlabb r2, r10, r4, r2 - - @ Saturate to avoid overflow in tmpW16. - asr r2, #1 - add r4, r2, #0x1000 - ssat r7, #16, r4, asr #13 - - @ Shift low pass filter state, and excute the low pass filter. - @ The memmove() and the low pass filter loop are unrolled and mixed. - smulbb r5, r1, r7 - add r7, r5, #0x800 - asr r7, #12 @ Get the value for inputState[0]. - ldr r11, [sp, #40] @ inputState - pld [r11] - adr r10, kDampFilter - ldrsh r4, [r10], #2 @ kDampFilter[0] - mul r2, r7, r4 - ldr r4, [r11] @ inputState[0, 1], before shift. - strh r7, [r11] @ inputState[0], after shift. - ldr r5, [r11, #4] @ inputState[2, 3], before shift. - ldr r7, [r10], #4 @ kDampFilter[1, 2] - ldr r10, [r10] @ kDampFilter[3, 4] - str r4, [r11, #2] @ inputState[1, 2], after shift. - str r5, [r11, #6] @ inputState[3, 4], after shift. - smlad r2, r4, r7, r2 - smlad r2, r5, r10, r2 - - @ Saturate to avoid overflow. - @ First shift the sample to the range of [0xC0000000, 0x3FFFFFFF], - @ to avoid overflow in the next saturation step. - asr r2, #1 - add r10, r2, #0x2000 - ssat r10, #16, r10, asr #14 - - @ Subtract from input and update buffer. - ldr r11, [sp, #4] @ sign - ldrsh r4, [r8] - ldrsh r7, [r8], #2 @ inputBuf[*index2] - smulbb r5, r11, r10 - subs r0, #1 - sub r4, r5 - ssat r2, #16, r4 - strh r2, [r12], #2 @ outputBuf[*index2] - - add r2, r7 - ssat r2, #16, r2 - strh r2, [r6], #2 @ outputBuff2[*index2 + PITCH_BUFFSIZE] - bgt LOOP - - add sp, #8 - pop {r4-r11} - bx lr - -.align 2 -kDampFilter: - .short -2294, 8192, 20972, 8192, -2294 diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c deleted file mode 100644 index f23d19de9cb9..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" - -/* Filter coefficicients in Q15. */ -static const int16_t kDampFilter[PITCH_DAMPORDER] = { - -2294, 8192, 20972, 8192, -2294 -}; - -void WebRtcIsacfix_PitchFilterCore(int loopNumber, - int16_t gain, - size_t index, - int16_t sign, - int16_t* inputState, - int16_t* outputBuf2, - const int16_t* coefficient, - int16_t* inputBuf, - int16_t* outputBuf, - int* index2) { - int i = 0, j = 0; /* Loop counters. */ - int16_t* ubufQQpos2 = &outputBuf2[PITCH_BUFFSIZE - (index + 2)]; - int16_t tmpW16 = 0; - - for (i = 0; i < loopNumber; i++) { - int32_t tmpW32 = 0; - - /* Filter to get fractional pitch. */ - for (j = 0; j < PITCH_FRACORDER; j++) { - tmpW32 += ubufQQpos2[*index2 + j] * coefficient[j]; - } - - /* Saturate to avoid overflow in tmpW16. */ - tmpW32 = WEBRTC_SPL_SAT(536862719, tmpW32, -536879104); - tmpW32 += 8192; - tmpW16 = (int16_t)(tmpW32 >> 14); - - /* Shift low pass filter state. */ - memmove(&inputState[1], &inputState[0], - (PITCH_DAMPORDER - 1) * sizeof(int16_t)); - inputState[0] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND( - gain, tmpW16, 12); - - /* Low pass filter. */ - tmpW32 = 0; - /* TODO(kma): Define a static inline function WebRtcSpl_DotProduct() - in spl_inl.h to replace this and other similar loops. */ - for (j = 0; j < PITCH_DAMPORDER; j++) { - tmpW32 += inputState[j] * kDampFilter[j]; - } - - /* Saturate to avoid overflow in tmpW16. */ - tmpW32 = WEBRTC_SPL_SAT(1073725439, tmpW32, -1073758208); - tmpW32 += 16384; - tmpW16 = (int16_t)(tmpW32 >> 15); - - /* Subtract from input and update buffer. */ - tmpW32 = inputBuf[*index2] - sign * tmpW16; - outputBuf[*index2] = WebRtcSpl_SatW32ToW16(tmpW32); - tmpW32 = inputBuf[*index2] + outputBuf[*index2]; - outputBuf2[*index2 + PITCH_BUFFSIZE] = WebRtcSpl_SatW32ToW16(tmpW32); - - (*index2)++; - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c deleted file mode 100644 index 785fd9464fe9..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" - -void WebRtcIsacfix_PitchFilterCore(int loopNumber, - int16_t gain, - size_t index, - int16_t sign, - int16_t* inputState, - int16_t* outputBuf2, - const int16_t* coefficient, - int16_t* inputBuf, - int16_t* outputBuf, - int* index2) { - int ind2t = *index2; - int i = 0; - int16_t* out2_pos2 = &outputBuf2[PITCH_BUFFSIZE - (index + 2)] + ind2t; - int32_t w1, w2, w3, w4, w5, gain32, sign32; - int32_t coef1, coef2, coef3, coef4, coef5 = 0; - // Define damp factors as int32_t (pair of int16_t) - int32_t kDampF0 = 0x0000F70A; - int32_t kDampF1 = 0x51EC2000; - int32_t kDampF2 = 0xF70A2000; - int16_t* input1 = inputBuf + ind2t; - int16_t* output1 = outputBuf + ind2t; - int16_t* output2 = outputBuf2 + ind2t + PITCH_BUFFSIZE; - - // Load coefficients outside the loop and sign-extend gain and sign - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "lwl %[coef1], 3(%[coefficient]) \n\t" - "lwl %[coef2], 7(%[coefficient]) \n\t" - "lwl %[coef3], 11(%[coefficient]) \n\t" - "lwl %[coef4], 15(%[coefficient]) \n\t" - "lwr %[coef1], 0(%[coefficient]) \n\t" - "lwr %[coef2], 4(%[coefficient]) \n\t" - "lwr %[coef3], 8(%[coefficient]) \n\t" - "lwr %[coef4], 12(%[coefficient]) \n\t" - "lhu %[coef5], 16(%[coefficient]) \n\t" - "seh %[gain32], %[gain] \n\t" - "seh %[sign32], %[sign] \n\t" - ".set pop \n\t" - : [coef1] "=&r" (coef1), [coef2] "=&r" (coef2), [coef3] "=&r" (coef3), - [coef4] "=&r" (coef4), [coef5] "=&r" (coef5), [gain32] "=&r" (gain32), - [sign32] "=&r" (sign32) - : [coefficient] "r" (coefficient), [gain] "r" (gain), - [sign] "r" (sign) - : "memory" - ); - - for (i = 0; i < loopNumber; i++) { - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - // Filter to get fractional pitch - "li %[w1], 8192 \n\t" - "mtlo %[w1] \n\t" - "mthi $0 \n\t" - "lwl %[w1], 3(%[out2_pos2]) \n\t" - "lwl %[w2], 7(%[out2_pos2]) \n\t" - "lwl %[w3], 11(%[out2_pos2]) \n\t" - "lwl %[w4], 15(%[out2_pos2]) \n\t" - "lwr %[w1], 0(%[out2_pos2]) \n\t" - "lwr %[w2], 4(%[out2_pos2]) \n\t" - "lwr %[w3], 8(%[out2_pos2]) \n\t" - "lwr %[w4], 12(%[out2_pos2]) \n\t" - "lhu %[w5], 16(%[out2_pos2]) \n\t" - "dpa.w.ph $ac0, %[w1], %[coef1] \n\t" - "dpa.w.ph $ac0, %[w2], %[coef2] \n\t" - "dpa.w.ph $ac0, %[w3], %[coef3] \n\t" - "dpa.w.ph $ac0, %[w4], %[coef4] \n\t" - "dpa.w.ph $ac0, %[w5], %[coef5] \n\t" - "addiu %[out2_pos2], %[out2_pos2], 2 \n\t" - "mthi $0, $ac1 \n\t" - "lwl %[w2], 3(%[inputState]) \n\t" - "lwl %[w3], 7(%[inputState]) \n\t" - // Fractional pitch shift & saturation - "extr_s.h %[w1], $ac0, 14 \n\t" - "li %[w4], 16384 \n\t" - "lwr %[w2], 0(%[inputState]) \n\t" - "lwr %[w3], 4(%[inputState]) \n\t" - "mtlo %[w4], $ac1 \n\t" - // Shift low pass filter state - "swl %[w2], 5(%[inputState]) \n\t" - "swl %[w3], 9(%[inputState]) \n\t" - "mul %[w1], %[gain32], %[w1] \n\t" - "swr %[w2], 2(%[inputState]) \n\t" - "swr %[w3], 6(%[inputState]) \n\t" - // Low pass filter accumulation - "dpa.w.ph $ac1, %[kDampF1], %[w2] \n\t" - "dpa.w.ph $ac1, %[kDampF2], %[w3] \n\t" - "lh %[w4], 0(%[input1]) \n\t" - "addiu %[input1], %[input1], 2 \n\t" - "shra_r.w %[w1], %[w1], 12 \n\t" - "sh %[w1], 0(%[inputState]) \n\t" - "dpa.w.ph $ac1, %[kDampF0], %[w1] \n\t" - // Low pass filter shift & saturation - "extr_s.h %[w2], $ac1, 15 \n\t" - "mul %[w2], %[w2], %[sign32] \n\t" - // Buffer update - "subu %[w2], %[w4], %[w2] \n\t" - "shll_s.w %[w2], %[w2], 16 \n\t" - "sra %[w2], %[w2], 16 \n\t" - "sh %[w2], 0(%[output1]) \n\t" - "addu %[w2], %[w2], %[w4] \n\t" - "shll_s.w %[w2], %[w2], 16 \n\t" - "addiu %[output1], %[output1], 2 \n\t" - "sra %[w2], %[w2], 16 \n\t" - "sh %[w2], 0(%[output2]) \n\t" - "addiu %[output2], %[output2], 2 \n\t" - ".set pop \n\t" - : [w1] "=&r" (w1), [w2] "=&r" (w2), [w3] "=&r" (w3), [w4] "=&r" (w4), - [w5] "=&r" (w5), [input1] "+r" (input1), [out2_pos2] "+r" (out2_pos2), - [output1] "+r" (output1), [output2] "+r" (output2) - : [coefficient] "r" (coefficient), [inputState] "r" (inputState), - [gain32] "r" (gain32), [sign32] "r" (sign32), [kDampF0] "r" (kDampF0), - [kDampF1] "r" (kDampF1), [kDampF2] "r" (kDampF2), - [coef1] "r" (coef1), [coef2] "r" (coef2), [coef3] "r" (coef3), - [coef4] "r" (coef4), [coef5] "r" (coef5) - : "hi", "lo", "$ac1hi", "$ac1lo", "memory" - ); - } - (*index2) += loopNumber; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.c deleted file mode 100644 index bfbab1950d45..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.c +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_gain_tables.c - * - * This file contains tables for the pitch filter side-info in the entropy coder. - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h" - - -/********************* Pitch Filter Gain Coefficient Tables ************************/ - -/* cdf for quantized pitch filter gains */ -const uint16_t WebRtcIsacfix_kPitchGainCdf[255] = { - 0, 2, 4, 6, 64, 901, 903, 905, 16954, 16956, - 16961, 17360, 17362, 17364, 17366, 17368, 17370, 17372, 17374, 17411, - 17514, 17516, 17583, 18790, 18796, 18802, 20760, 20777, 20782, 21722, - 21724, 21728, 21738, 21740, 21742, 21744, 21746, 21748, 22224, 22227, - 22230, 23214, 23229, 23239, 25086, 25108, 25120, 26088, 26094, 26098, - 26175, 26177, 26179, 26181, 26183, 26185, 26484, 26507, 26522, 27705, - 27731, 27750, 29767, 29799, 29817, 30866, 30883, 30885, 31025, 31029, - 31031, 31033, 31035, 31037, 31114, 31126, 31134, 32687, 32722, 32767, - 35718, 35742, 35757, 36943, 36952, 36954, 37115, 37128, 37130, 37132, - 37134, 37136, 37143, 37145, 37152, 38843, 38863, 38897, 47458, 47467, - 47474, 49040, 49061, 49063, 49145, 49157, 49159, 49161, 49163, 49165, - 49167, 49169, 49171, 49757, 49770, 49782, 61333, 61344, 61346, 62860, - 62883, 62885, 62887, 62889, 62891, 62893, 62895, 62897, 62899, 62901, - 62903, 62905, 62907, 62909, 65496, 65498, 65500, 65521, 65523, 65525, - 65527, 65529, 65531, 65533, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535 -}; - -/* index limits and ranges */ -const int16_t WebRtcIsacfix_kLowerlimiGain[3] = { - -7, -2, -1 -}; - -const int16_t WebRtcIsacfix_kUpperlimitGain[3] = { - 0, 3, 1 -}; - -const uint16_t WebRtcIsacfix_kMultsGain[2] = { - 18, 3 -}; - -/* size of cdf table */ -const uint16_t WebRtcIsacfix_kCdfTableSizeGain[1] = { - 256 -}; - -/* mean values of pitch filter gains in FIXED point Q12 */ -const int16_t WebRtcIsacfix_kPitchGain1[144] = { - 843, 1092, 1336, 1222, 1405, 1656, 1500, 1815, 1843, 1838, 1839, - 1843, 1843, 1843, 1843, 1843, 1843, 1843, 814, 846, 1092, 1013, - 1174, 1383, 1391, 1511, 1584, 1734, 1753, 1843, 1843, 1843, 1843, - 1843, 1843, 1843, 524, 689, 777, 845, 947, 1069, 1090, 1263, - 1380, 1447, 1559, 1676, 1645, 1749, 1843, 1843, 1843, 1843, 81, - 477, 563, 611, 706, 806, 849, 1012, 1192, 1128, 1330, 1489, - 1425, 1576, 1826, 1741, 1843, 1843, 0, 290, 305, 356, 488, - 575, 602, 741, 890, 835, 1079, 1196, 1182, 1376, 1519, 1506, - 1680, 1843, 0, 47, 97, 69, 289, 381, 385, 474, 617, - 664, 803, 1079, 935, 1160, 1269, 1265, 1506, 1741, 0, 0, - 0, 0, 112, 120, 190, 283, 442, 343, 526, 809, 684, - 935, 1134, 1020, 1265, 1506, 0, 0, 0, 0, 0, 0, - 0, 111, 256, 87, 373, 597, 430, 684, 935, 770, 1020, - 1265 -}; - -const int16_t WebRtcIsacfix_kPitchGain2[144] = { - 1760, 1525, 1285, 1747, 1671, 1393, 1843, 1826, 1555, 1843, 1784, - 1606, 1843, 1843, 1711, 1843, 1843, 1814, 1389, 1275, 1040, 1564, - 1414, 1252, 1610, 1495, 1343, 1753, 1592, 1405, 1804, 1720, 1475, - 1843, 1814, 1581, 1208, 1061, 856, 1349, 1148, 994, 1390, 1253, - 1111, 1495, 1343, 1178, 1770, 1465, 1234, 1814, 1581, 1342, 1040, - 793, 713, 1053, 895, 737, 1128, 1003, 861, 1277, 1094, 981, - 1475, 1192, 1019, 1581, 1342, 1098, 855, 570, 483, 833, 648, - 540, 948, 744, 572, 1009, 844, 636, 1234, 934, 685, 1342, - 1217, 984, 537, 318, 124, 603, 423, 350, 687, 479, 322, - 791, 581, 430, 987, 671, 488, 1098, 849, 597, 283, 27, - 0, 397, 222, 38, 513, 271, 124, 624, 325, 157, 737, - 484, 233, 849, 597, 343, 27, 0, 0, 141, 0, 0, - 256, 69, 0, 370, 87, 0, 484, 229, 0, 597, 343, - 87 -}; - -const int16_t WebRtcIsacfix_kPitchGain3[144] = { - 1843, 1843, 1711, 1843, 1818, 1606, 1843, 1827, 1511, 1814, 1639, - 1393, 1760, 1525, 1285, 1656, 1419, 1176, 1835, 1718, 1475, 1841, - 1650, 1387, 1648, 1498, 1287, 1600, 1411, 1176, 1522, 1299, 1040, - 1419, 1176, 928, 1773, 1461, 1128, 1532, 1355, 1202, 1429, 1260, - 1115, 1398, 1151, 1025, 1172, 1080, 790, 1176, 928, 677, 1475, - 1147, 1019, 1276, 1096, 922, 1214, 1010, 901, 1057, 893, 800, - 1040, 796, 734, 928, 677, 424, 1137, 897, 753, 1120, 830, - 710, 875, 751, 601, 795, 642, 583, 790, 544, 475, 677, - 474, 140, 987, 750, 482, 697, 573, 450, 691, 487, 303, - 661, 394, 332, 537, 303, 220, 424, 168, 0, 737, 484, - 229, 624, 348, 153, 441, 261, 136, 397, 166, 51, 283, - 27, 0, 168, 0, 0, 484, 229, 0, 370, 57, 0, - 256, 43, 0, 141, 0, 0, 27, 0, 0, 0, 0, - 0 -}; - - -const int16_t WebRtcIsacfix_kPitchGain4[144] = { - 1843, 1843, 1843, 1843, 1841, 1843, 1500, 1821, 1843, 1222, 1434, - 1656, 843, 1092, 1336, 504, 757, 1007, 1843, 1843, 1843, 1838, - 1791, 1843, 1265, 1505, 1599, 965, 1219, 1425, 730, 821, 1092, - 249, 504, 757, 1783, 1819, 1843, 1351, 1567, 1727, 1096, 1268, - 1409, 805, 961, 1131, 444, 670, 843, 0, 249, 504, 1425, - 1655, 1743, 1096, 1324, 1448, 822, 1019, 1199, 490, 704, 867, - 81, 450, 555, 0, 0, 249, 1247, 1428, 1530, 881, 1073, - 1283, 610, 759, 939, 278, 464, 645, 0, 200, 270, 0, - 0, 0, 935, 1163, 1410, 528, 790, 1068, 377, 499, 717, - 173, 240, 274, 0, 43, 62, 0, 0, 0, 684, 935, - 1182, 343, 551, 735, 161, 262, 423, 0, 55, 27, 0, - 0, 0, 0, 0, 0, 430, 684, 935, 87, 377, 597, - 0, 46, 256, 0, 0, 0, 0, 0, 0, 0, 0, - 0 -}; - - - -/* transform matrix in Q12*/ -const int16_t WebRtcIsacfix_kTransform[4][4] = { - { -2048, -2048, -2048, -2048 }, - { 2748, 916, -916, -2748 }, - { 2048, -2048, -2048, 2048 }, - { 916, -2748, 2748, -916 } -}; diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h deleted file mode 100644 index 59e1738bce16..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_gain_tables.h - * - * This file contains tables for the pitch filter side-info in the entropy - * coder. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_GAIN_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_GAIN_TABLES_H_ - -#include - -/********************* Pitch Filter Gain Coefficient Tables - * ************************/ -/* cdf for quantized pitch filter gains */ -extern const uint16_t WebRtcIsacfix_kPitchGainCdf[255]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsacfix_kLowerlimiGain[3]; -extern const int16_t WebRtcIsacfix_kUpperlimitGain[3]; -extern const uint16_t WebRtcIsacfix_kMultsGain[2]; - -/* mean values of pitch filter gains in Q12*/ -extern const int16_t WebRtcIsacfix_kPitchGain1[144]; -extern const int16_t WebRtcIsacfix_kPitchGain2[144]; -extern const int16_t WebRtcIsacfix_kPitchGain3[144]; -extern const int16_t WebRtcIsacfix_kPitchGain4[144]; - -/* size of cdf table */ -extern const uint16_t WebRtcIsacfix_kCdfTableSizeGain[1]; - -/* transform matrix */ -extern const int16_t WebRtcIsacfix_kTransform[4][4]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_GAIN_TABLES_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.c deleted file mode 100644 index 894716e73980..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.c +++ /dev/null @@ -1,306 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_lag_tables.c - * - * This file contains tables for the pitch filter side-info in the entropy coder. - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h" - - -/********************* Pitch Filter Gain Coefficient Tables ************************/ - -/* tables for use with small pitch gain */ - -/* cdf for quantized pitch filter lags */ -const uint16_t WebRtcIsacfix_kPitchLagCdf1Lo[127] = { - 0, 134, 336, 549, 778, 998, 1264, 1512, 1777, 2070, - 2423, 2794, 3051, 3361, 3708, 3979, 4315, 4610, 4933, 5269, - 5575, 5896, 6155, 6480, 6816, 7129, 7477, 7764, 8061, 8358, - 8718, 9020, 9390, 9783, 10177, 10543, 10885, 11342, 11795, 12213, - 12680, 13096, 13524, 13919, 14436, 14903, 15349, 15795, 16267, 16734, - 17266, 17697, 18130, 18632, 19080, 19447, 19884, 20315, 20735, 21288, - 21764, 22264, 22723, 23193, 23680, 24111, 24557, 25022, 25537, 26082, - 26543, 27090, 27620, 28139, 28652, 29149, 29634, 30175, 30692, 31273, - 31866, 32506, 33059, 33650, 34296, 34955, 35629, 36295, 36967, 37726, - 38559, 39458, 40364, 41293, 42256, 43215, 44231, 45253, 46274, 47359, - 48482, 49678, 50810, 51853, 53016, 54148, 55235, 56263, 57282, 58363, - 59288, 60179, 61076, 61806, 62474, 63129, 63656, 64160, 64533, 64856, - 65152, 65535, 65535, 65535, 65535, 65535, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf2Lo[20] = { - 0, 429, 3558, 5861, 8558, 11639, 15210, 19502, 24773, 31983, - 42602, 48567, 52601, 55676, 58160, 60172, 61889, 63235, 65383, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf3Lo[2] = { - 0, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf4Lo[10] = { - 0, 2966, 6368, 11182, 19431, 37793, 48532, 55353, 60626, 65535 -}; - -const uint16_t *WebRtcIsacfix_kPitchLagPtrLo[4] = { - WebRtcIsacfix_kPitchLagCdf1Lo, - WebRtcIsacfix_kPitchLagCdf2Lo, - WebRtcIsacfix_kPitchLagCdf3Lo, - WebRtcIsacfix_kPitchLagCdf4Lo -}; - -/* size of first cdf table */ -const uint16_t WebRtcIsacfix_kPitchLagSizeLo[1] = { - 128 -}; - -/* index limits and ranges */ -const int16_t WebRtcIsacfix_kLowerLimitLo[4] = { - -140, -9, 0, -4 -}; - -const int16_t WebRtcIsacfix_kUpperLimitLo[4] = { - -20, 9, 0, 4 -}; - -/* initial index for arithmetic decoder */ -const uint16_t WebRtcIsacfix_kInitIndLo[3] = { - 10, 1, 5 -}; - -/* mean values of pitch filter lags in Q10 */ - -const int16_t WebRtcIsacfix_kMeanLag2Lo[19] = { - -17627, -16207, -14409, -12319, -10253, -8200, -6054, -3986, -1948, -19, - 1937, 3974, 6064, 8155, 10229, 12270, 14296, 16127, 17520 -}; - -const int16_t WebRtcIsacfix_kMeanLag4Lo[9] = { - -7949, -6063, -4036, -1941, 38, 1977, 4060, 6059 -}; - - - -/* tables for use with medium pitch gain */ - -/* cdf for quantized pitch filter lags */ -const uint16_t WebRtcIsacfix_kPitchLagCdf1Mid[255] = { - 0, 28, 61, 88, 121, 149, 233, 331, 475, 559, - 624, 661, 689, 712, 745, 791, 815, 843, 866, 922, - 959, 1024, 1061, 1117, 1178, 1238, 1280, 1350, 1453, 1513, - 1564, 1625, 1671, 1741, 1788, 1904, 2072, 2421, 2626, 2770, - 2840, 2900, 2942, 3012, 3068, 3115, 3147, 3194, 3254, 3319, - 3366, 3520, 3678, 3780, 3850, 3911, 3957, 4032, 4106, 4185, - 4292, 4474, 4683, 4842, 5019, 5191, 5321, 5428, 5540, 5675, - 5763, 5847, 5959, 6127, 6304, 6564, 6839, 7090, 7263, 7421, - 7556, 7728, 7872, 7984, 8142, 8361, 8580, 8743, 8938, 9227, - 9409, 9539, 9674, 9795, 9930, 10060, 10177, 10382, 10614, 10861, - 11038, 11271, 11415, 11629, 11792, 12044, 12193, 12416, 12574, 12821, - 13007, 13235, 13445, 13654, 13901, 14134, 14488, 15000, 15703, 16285, - 16504, 16797, 17086, 17328, 17579, 17807, 17998, 18268, 18538, 18836, - 19087, 19274, 19474, 19716, 19935, 20270, 20833, 21303, 21532, 21741, - 21978, 22207, 22523, 22770, 23054, 23613, 23943, 24204, 24399, 24651, - 24832, 25074, 25270, 25549, 25759, 26015, 26150, 26424, 26713, 27048, - 27342, 27504, 27681, 27854, 28021, 28207, 28412, 28664, 28859, 29064, - 29278, 29548, 29748, 30107, 30377, 30656, 30856, 31164, 31452, 31755, - 32011, 32328, 32626, 32919, 33319, 33789, 34329, 34925, 35396, 35973, - 36443, 36964, 37551, 38156, 38724, 39357, 40023, 40908, 41587, 42602, - 43924, 45037, 45810, 46597, 47421, 48291, 49092, 50051, 51448, 52719, - 53440, 54241, 54944, 55977, 56676, 57299, 57872, 58389, 59059, 59688, - 60237, 60782, 61094, 61573, 61890, 62290, 62658, 63030, 63217, 63454, - 63622, 63882, 64003, 64273, 64427, 64529, 64581, 64697, 64758, 64902, - 65414, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf2Mid[36] = { - 0, 71, 335, 581, 836, 1039, 1323, 1795, 2258, 2608, - 3005, 3591, 4243, 5344, 7163, 10583, 16848, 28078, 49448, 57007, - 60357, 61850, 62837, 63437, 63872, 64188, 64377, 64614, 64774, 64949, - 65039, 65115, 65223, 65360, 65474, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf3Mid[2] = { - 0, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf4Mid[20] = { - 0, 28, 246, 459, 667, 1045, 1523, 2337, 4337, 11347, - 44231, 56709, 60781, 62243, 63161, 63969, 64608, 65062, 65502, 65535 -}; - -const uint16_t *WebRtcIsacfix_kPitchLagPtrMid[4] = { - WebRtcIsacfix_kPitchLagCdf1Mid, - WebRtcIsacfix_kPitchLagCdf2Mid, - WebRtcIsacfix_kPitchLagCdf3Mid, - WebRtcIsacfix_kPitchLagCdf4Mid -}; - -/* size of first cdf table */ -const uint16_t WebRtcIsacfix_kPitchLagSizeMid[1] = { - 256 -}; - -/* index limits and ranges */ -const int16_t WebRtcIsacfix_kLowerLimitMid[4] = { - -280, -17, 0, -9 -}; - -const int16_t WebRtcIsacfix_kUpperLimitMid[4] = { - -40, 17, 0, 9 -}; - -/* initial index for arithmetic decoder */ -const uint16_t WebRtcIsacfix_kInitIndMid[3] = { - 18, 1, 10 -}; - -/* mean values of pitch filter lags in Q10 */ - -const int16_t WebRtcIsacfix_kMeanLag2Mid[35] = { - -17297, -16250, -15416, -14343, -13341, -12363, -11270, - -10355, -9122, -8217, -7172, -6083, -5102, -4004, -3060, - -1982, -952, -18, 935, 1976, 3040, 4032, - 5082, 6065, 7257, 8202, 9264, 10225, 11242, - 12234, 13337, 14336, 15374, 16187, 17347 -}; - - -const int16_t WebRtcIsacfix_kMeanLag4Mid[19] = { - -8811, -8081, -7203, -6003, -5057, -4025, -2983, -1964, - -891, 29, 921, 1920, 2988, 4064, 5187, 6079, 7173, 8074, 8849 -}; - - -/* tables for use with large pitch gain */ - -/* cdf for quantized pitch filter lags */ -const uint16_t WebRtcIsacfix_kPitchLagCdf1Hi[511] = { - 0, 7, 18, 33, 69, 105, 156, 228, 315, 612, - 680, 691, 709, 724, 735, 738, 742, 746, 749, 753, - 756, 760, 764, 774, 782, 785, 789, 796, 800, 803, - 807, 814, 818, 822, 829, 832, 847, 854, 858, 869, - 876, 883, 898, 908, 934, 977, 1010, 1050, 1060, 1064, - 1075, 1078, 1086, 1089, 1093, 1104, 1111, 1122, 1133, 1136, - 1151, 1162, 1183, 1209, 1252, 1281, 1339, 1364, 1386, 1401, - 1411, 1415, 1426, 1430, 1433, 1440, 1448, 1455, 1462, 1477, - 1487, 1495, 1502, 1506, 1509, 1516, 1524, 1531, 1535, 1542, - 1553, 1556, 1578, 1589, 1611, 1625, 1639, 1643, 1654, 1665, - 1672, 1687, 1694, 1705, 1708, 1719, 1730, 1744, 1752, 1759, - 1791, 1795, 1820, 1867, 1886, 1915, 1936, 1943, 1965, 1987, - 2041, 2099, 2161, 2175, 2200, 2211, 2226, 2233, 2244, 2251, - 2266, 2280, 2287, 2298, 2309, 2316, 2331, 2342, 2356, 2378, - 2403, 2418, 2447, 2497, 2544, 2602, 2863, 2895, 2903, 2935, - 2950, 2971, 3004, 3011, 3018, 3029, 3040, 3062, 3087, 3127, - 3152, 3170, 3199, 3243, 3293, 3322, 3340, 3377, 3402, 3427, - 3474, 3518, 3543, 3579, 3601, 3637, 3659, 3706, 3731, 3760, - 3818, 3847, 3869, 3901, 3920, 3952, 4068, 4169, 4220, 4271, - 4524, 4571, 4604, 4632, 4672, 4730, 4777, 4806, 4857, 4904, - 4951, 5002, 5031, 5060, 5107, 5150, 5212, 5266, 5331, 5382, - 5432, 5490, 5544, 5610, 5700, 5762, 5812, 5874, 5972, 6022, - 6091, 6163, 6232, 6305, 6402, 6540, 6685, 6880, 7090, 7271, - 7379, 7452, 7542, 7625, 7687, 7770, 7843, 7911, 7966, 8024, - 8096, 8190, 8252, 8320, 8411, 8501, 8585, 8639, 8751, 8842, - 8918, 8986, 9066, 9127, 9203, 9269, 9345, 9406, 9464, 9536, - 9612, 9667, 9735, 9844, 9931, 10036, 10119, 10199, 10260, 10358, - 10441, 10514, 10666, 10734, 10872, 10951, 11053, 11125, 11223, 11324, - 11516, 11664, 11737, 11816, 11892, 12008, 12120, 12200, 12280, 12392, - 12490, 12576, 12685, 12812, 12917, 13003, 13108, 13210, 13300, 13384, - 13470, 13579, 13673, 13771, 13879, 13999, 14136, 14201, 14368, 14614, - 14759, 14867, 14958, 15030, 15121, 15189, 15280, 15385, 15461, 15555, - 15653, 15768, 15884, 15971, 16069, 16145, 16210, 16279, 16380, 16463, - 16539, 16615, 16688, 16818, 16919, 17017, 18041, 18338, 18523, 18649, - 18790, 18917, 19047, 19167, 19315, 19460, 19601, 19731, 19858, 20068, - 20173, 20318, 20466, 20625, 20741, 20911, 21045, 21201, 21396, 21588, - 21816, 22022, 22305, 22547, 22786, 23072, 23322, 23600, 23879, 24168, - 24433, 24769, 25120, 25511, 25895, 26289, 26792, 27219, 27683, 28077, - 28566, 29094, 29546, 29977, 30491, 30991, 31573, 32105, 32594, 33173, - 33788, 34497, 35181, 35833, 36488, 37255, 37921, 38645, 39275, 39894, - 40505, 41167, 41790, 42431, 43096, 43723, 44385, 45134, 45858, 46607, - 47349, 48091, 48768, 49405, 49955, 50555, 51167, 51985, 52611, 53078, - 53494, 53965, 54435, 54996, 55601, 56125, 56563, 56838, 57244, 57566, - 57967, 58297, 58771, 59093, 59419, 59647, 59886, 60143, 60461, 60693, - 60917, 61170, 61416, 61634, 61891, 62122, 62310, 62455, 62632, 62839, - 63103, 63436, 63639, 63805, 63906, 64015, 64192, 64355, 64475, 64558, - 64663, 64742, 64811, 64865, 64916, 64956, 64981, 65025, 65068, 65115, - 65195, 65314, 65419, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf2Hi[68] = { - 0, 7, 11, 22, 37, 52, 56, 59, 81, 85, - 89, 96, 115, 130, 137, 152, 170, 181, 193, 200, - 207, 233, 237, 259, 289, 318, 363, 433, 592, 992, - 1607, 3062, 6149, 12206, 25522, 48368, 58223, 61918, 63640, 64584, - 64943, 65098, 65206, 65268, 65294, 65335, 65350, 65372, 65387, 65402, - 65413, 65420, 65428, 65435, 65439, 65450, 65454, 65468, 65472, 65476, - 65483, 65491, 65498, 65505, 65516, 65520, 65528, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf3Hi[2] = { - 0, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf4Hi[35] = { - 0, 7, 19, 30, 41, 48, 63, 74, 82, 96, - 122, 152, 215, 330, 701, 2611, 10931, 48106, 61177, 64341, - 65112, 65238, 65309, 65338, 65364, 65379, 65401, 65427, 65453, - 65465, 65476, 65490, 65509, 65528, 65535 -}; - -const uint16_t *WebRtcIsacfix_kPitchLagPtrHi[4] = { - WebRtcIsacfix_kPitchLagCdf1Hi, - WebRtcIsacfix_kPitchLagCdf2Hi, - WebRtcIsacfix_kPitchLagCdf3Hi, - WebRtcIsacfix_kPitchLagCdf4Hi -}; - -/* size of first cdf table */ -const uint16_t WebRtcIsacfix_kPitchLagSizeHi[1] = { - 512 -}; - -/* index limits and ranges */ -const int16_t WebRtcIsacfix_kLowerLimitHi[4] = { - -552, -34, 0, -16 -}; - -const int16_t WebRtcIsacfix_kUpperLimitHi[4] = { - -80, 32, 0, 17 -}; - -/* initial index for arithmetic decoder */ -const uint16_t WebRtcIsacfix_kInitIndHi[3] = { - 34, 1, 18 -}; - -/* mean values of pitch filter lags */ - -const int16_t WebRtcIsacfix_kMeanLag2Hi[67] = { - -17482, -16896, -16220, -15929, -15329, -14848, -14336, -13807, -13312, -12800, -12218, -11720, - -11307, -10649, -10396, -9742, -9148, -8668, -8297, -7718, -7155, -6656, -6231, -5600, -5129, - -4610, -4110, -3521, -3040, -2525, -2016, -1506, -995, -477, -5, 469, 991, 1510, 2025, 2526, 3079, - 3555, 4124, 4601, 5131, 5613, 6194, 6671, 7140, 7645, 8207, 8601, 9132, 9728, 10359, 10752, 11302, - 11776, 12288, 12687, 13204, 13759, 14295, 14810, 15360, 15764, 16350 -}; - - -const int16_t WebRtcIsacfix_kMeanLag4Hi[34] = { - -8175, -7659, -7205, -6684, -6215, -5651, -5180, -4566, -4087, -3536, -3096, - -2532, -1990, -1482, -959, -440, 11, 451, 954, 1492, 2020, 2562, 3059, - 3577, 4113, 4618, 5134, 5724, 6060, 6758, 7015, 7716, 8066, 8741 -}; diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h deleted file mode 100644 index 228da26731f9..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_lag_tables.h - * - * This file contains tables for the pitch filter side-info in the entropy - * coder. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_LAG_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_LAG_TABLES_H_ - -#include - -/********************* Pitch Filter Lag Coefficient Tables - * ************************/ - -/* tables for use with small pitch gain */ - -/* cdfs for quantized pitch lags */ -extern const uint16_t WebRtcIsacfix_kPitchLagCdf1Lo[127]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf2Lo[20]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf3Lo[2]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf4Lo[10]; - -extern const uint16_t* WebRtcIsacfix_kPitchLagPtrLo[4]; - -/* size of first cdf table */ -extern const uint16_t WebRtcIsacfix_kPitchLagSizeLo[1]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsacfix_kLowerLimitLo[4]; -extern const int16_t WebRtcIsacfix_kUpperLimitLo[4]; - -/* initial index for arithmetic decoder */ -extern const uint16_t WebRtcIsacfix_kInitIndLo[3]; - -/* mean values of pitch filter lags */ -extern const int16_t WebRtcIsacfix_kMeanLag2Lo[19]; -extern const int16_t WebRtcIsacfix_kMeanLag4Lo[9]; - -/* tables for use with medium pitch gain */ - -/* cdfs for quantized pitch lags */ -extern const uint16_t WebRtcIsacfix_kPitchLagCdf1Mid[255]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf2Mid[36]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf3Mid[2]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf4Mid[20]; - -extern const uint16_t* WebRtcIsacfix_kPitchLagPtrMid[4]; - -/* size of first cdf table */ -extern const uint16_t WebRtcIsacfix_kPitchLagSizeMid[1]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsacfix_kLowerLimitMid[4]; -extern const int16_t WebRtcIsacfix_kUpperLimitMid[4]; - -/* initial index for arithmetic decoder */ -extern const uint16_t WebRtcIsacfix_kInitIndMid[3]; - -/* mean values of pitch filter lags */ -extern const int16_t WebRtcIsacfix_kMeanLag2Mid[35]; -extern const int16_t WebRtcIsacfix_kMeanLag4Mid[19]; - -/* tables for use with large pitch gain */ - -/* cdfs for quantized pitch lags */ -extern const uint16_t WebRtcIsacfix_kPitchLagCdf1Hi[511]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf2Hi[68]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf3Hi[2]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf4Hi[35]; - -extern const uint16_t* WebRtcIsacfix_kPitchLagPtrHi[4]; - -/* size of first cdf table */ -extern const uint16_t WebRtcIsacfix_kPitchLagSizeHi[1]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsacfix_kLowerLimitHi[4]; -extern const int16_t WebRtcIsacfix_kUpperLimitHi[4]; - -/* initial index for arithmetic decoder */ -extern const uint16_t WebRtcIsacfix_kInitIndHi[3]; - -/* mean values of pitch filter lags */ -extern const int16_t WebRtcIsacfix_kMeanLag2Hi[67]; -extern const int16_t WebRtcIsacfix_kMeanLag4Hi[34]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_LAG_TABLES_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/settings.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/settings.h deleted file mode 100644 index 03a2d05457d7..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/settings.h +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * settings.h - * - * Declaration of #defines used in the iSAC codec - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SETTINGS_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SETTINGS_H_ - -/* sampling frequency (Hz) */ -#define FS 16000 -/* 1.5 times Sampling frequency */ -#define FS_1_HALF (uint32_t)24000 -/* Three times Sampling frequency */ -#define FS3 (uint32_t)48000 -/* Eight times Sampling frequency */ -#define FS8 (uint32_t)128000 - -/* number of samples per frame (either 480 (30ms) or 960 (60ms)) */ -#define INITIAL_FRAMESAMPLES 960 - -/* miliseconds */ -#define FRAMESIZE 30 -/* number of samples per frame processed in the encoder (30ms) */ -#define FRAMESAMPLES 480 /* ((FRAMESIZE*FS)/1000) */ -#define FRAMESAMPLES_HALF 240 -/* max number of samples per frame (= 60 ms frame) */ -#define MAX_FRAMESAMPLES 960 -/* number of samples per 10ms frame */ -#define FRAMESAMPLES_10ms 160 /* ((10*FS)/1000) */ -/* Number of samples per 1 ms */ -#define SAMPLES_PER_MSEC 16 -/* number of subframes */ -#define SUBFRAMES 6 -/* length of a subframe */ -#define UPDATE 80 -/* length of half a subframe (low/high band) */ -#define HALF_SUBFRAMELEN 40 /* (UPDATE/2) */ -/* samples of look ahead (in a half-band, so actually half the samples of look - * ahead @ FS) */ -#define QLOOKAHEAD 24 /* 3 ms */ - -/* order of AR model in spectral entropy coder */ -#define AR_ORDER 6 -#define MAX_ORDER 13 -#define LEVINSON_MAX_ORDER 12 - -/* window length (masking analysis) */ -#define WINLEN 256 -/* order of low-band pole filter used to approximate masking curve */ -#define ORDERLO 12 -/* order of hi-band pole filter used to approximate masking curve */ -#define ORDERHI 6 - -#define KLT_NUM_AVG_GAIN 0 -#define KLT_NUM_AVG_SHAPE 0 -#define KLT_NUM_MODELS 3 -#define LPC_SHAPE_ORDER 18 /* (ORDERLO + ORDERHI) */ - -#define KLT_ORDER_GAIN 12 /* (2 * SUBFRAMES) */ -#define KLT_ORDER_SHAPE 108 /* (LPC_SHAPE_ORDER * SUBFRAMES) */ - -/* order for post_filter_bank */ -#define POSTQORDER 3 -/* order for pre-filterbank */ -#define QORDER 3 -/* for decimator */ -#define ALLPASSSECTIONS 2 -/* The number of composite all-pass filter factors */ -#define NUMBEROFCOMPOSITEAPSECTIONS 4 - -/* The number of all-pass filter factors in an upper or lower channel*/ -#define NUMBEROFCHANNELAPSECTIONS 2 - -#define DPMIN_Q10 -10240 /* -10.00 in Q10 */ -#define DPMAX_Q10 10240 /* 10.00 in Q10 */ -#define MINBITS_Q10 10240 /* 10.0 in Q10 */ - -/* array size for byte stream in number of Word16. */ -#define STREAM_MAXW16 \ - 300 /* The old maximum size still needed for the decoding */ -#define STREAM_MAXW16_30MS \ - 100 /* 100 Word16 = 200 bytes = 53.4 kbit/s @ 30 ms.framelength */ -#define STREAM_MAXW16_60MS \ - 200 /* 200 Word16 = 400 bytes = 53.4 kbit/s @ 60 ms.framelength */ -/* This is used only at the decoder bit-stream struct. - * - The encoder and decoder bitstream containers are of different size because - * old iSAC limited the encoded bitstream to 600 bytes. But newer versions - * restrict to shorter bitstream. - * - We add 10 bytes of guards to the internal bitstream container. The reason - * is that entropy decoder might read few bytes (3 according to our - * observations) more than the actual size of the bitstream. To avoid reading - * outside memory, in rare occasion of full-size bitstream we add 10 bytes - * of guard. */ -#define INTERNAL_STREAM_SIZE_W16 (STREAM_MAXW16 + 5) - -/* storage size for bit counts */ -//#define BIT_COUNTER_SIZE 30 -/* maximum order of any AR model or filter */ -#define MAX_AR_MODEL_ORDER 12 - -/* Maximum number of iterations allowed to limit payload size */ -#define MAX_PAYLOAD_LIMIT_ITERATION 1 - -/* Bandwidth estimator */ - -#define MIN_ISAC_BW 10000 /* Minimum bandwidth in bits per sec */ -#define MAX_ISAC_BW 32000 /* Maxmum bandwidth in bits per sec */ -#define MIN_ISAC_MD 5 /* Minimum Max Delay in ?? */ -#define MAX_ISAC_MD 25 /* Maxmum Max Delay in ?? */ -#define DELAY_CORRECTION_MAX 717 -#define DELAY_CORRECTION_MED 819 -#define Thld_30_60 18000 -#define Thld_60_30 27000 - -/* assumed header size; we don't know the exact number (header compression may - * be used) */ -#define HEADER_SIZE 35 /* bytes */ -#define INIT_FRAME_LEN 60 -#define INIT_BN_EST 20000 -#define INIT_BN_EST_Q7 2560000 /* 20 kbps in Q7 */ -#define INIT_REC_BN_EST_Q5 789312 /* INIT_BN_EST + INIT_HDR_RATE in Q5 */ - -/* 8738 in Q18 is ~ 1/30 */ -/* #define INIT_HDR_RATE (((HEADER_SIZE * 8 * 1000) * 8738) >> NUM_BITS_TO_SHIFT - * (INIT_FRAME_LEN)) */ -#define INIT_HDR_RATE 4666 -/* number of packets in a row for a high rate burst */ -#define BURST_LEN 3 -/* ms, max time between two full bursts */ -#define BURST_INTERVAL 800 -/* number of packets in a row for initial high rate burst */ -#define INIT_BURST_LEN 5 -/* bits/s, rate for the first BURST_LEN packets */ -#define INIT_RATE 10240000 /* INIT_BN_EST in Q9 */ - -/* For pitch analysis */ -#define PITCH_FRAME_LEN 240 /* (FRAMESAMPLES/2) 30 ms */ -#define PITCH_MAX_LAG 140 /* 57 Hz */ -#define PITCH_MIN_LAG 20 /* 400 Hz */ -#define PITCH_MIN_LAG_Q8 5120 /* 256 * PITCH_MIN_LAG */ -#define OFFSET_Q8 768 /* 256 * 3 */ - -#define PITCH_MAX_GAIN_Q12 1843 /* 0.45 */ -#define PITCH_LAG_SPAN2 65 /* (PITCH_MAX_LAG/2-PITCH_MIN_LAG/2+5) */ -#define PITCH_CORR_LEN2 60 /* 15 ms */ -#define PITCH_CORR_STEP2 60 /* (PITCH_FRAME_LEN/4) */ -#define PITCH_SUBFRAMES 4 -#define PITCH_SUBFRAME_LEN 60 /* (PITCH_FRAME_LEN/PITCH_SUBFRAMES) */ - -/* For pitch filter */ -#define PITCH_BUFFSIZE \ - 190 /* (PITCH_MAX_LAG + 50) Extra 50 for fraction and LP filters */ -#define PITCH_INTBUFFSIZE 430 /* (PITCH_FRAME_LEN+PITCH_BUFFSIZE) */ -#define PITCH_FRACS 8 -#define PITCH_FRACORDER 9 -#define PITCH_DAMPORDER 5 - -/* Order of high pass filter */ -#define HPORDER 2 - -/* PLC */ -#define DECAY_RATE \ - 10 /* Q15, 20% of decay every lost frame apllied linearly sample by sample*/ -#define PLC_WAS_USED 1 -#define PLC_NOT_USED 3 -#define RECOVERY_OVERLAP 80 -#define RESAMP_RES 256 -#define RESAMP_RES_BIT 8 - -/* Define Error codes */ -/* 6000 General */ -#define ISAC_MEMORY_ALLOCATION_FAILED 6010 -#define ISAC_MODE_MISMATCH 6020 -#define ISAC_DISALLOWED_BOTTLENECK 6030 -#define ISAC_DISALLOWED_FRAME_LENGTH 6040 -/* 6200 Bandwidth estimator */ -#define ISAC_RANGE_ERROR_BW_ESTIMATOR 6240 -/* 6400 Encoder */ -#define ISAC_ENCODER_NOT_INITIATED 6410 -#define ISAC_DISALLOWED_CODING_MODE 6420 -#define ISAC_DISALLOWED_FRAME_MODE_ENCODER 6430 -#define ISAC_DISALLOWED_BITSTREAM_LENGTH 6440 -#define ISAC_PAYLOAD_LARGER_THAN_LIMIT 6450 -/* 6600 Decoder */ -#define ISAC_DECODER_NOT_INITIATED 6610 -#define ISAC_EMPTY_PACKET 6620 -#define ISAC_PACKET_TOO_SHORT 6625 -#define ISAC_DISALLOWED_FRAME_MODE_DECODER 6630 -#define ISAC_RANGE_ERROR_DECODE_FRAME_LENGTH 6640 -#define ISAC_RANGE_ERROR_DECODE_BANDWIDTH 6650 -#define ISAC_RANGE_ERROR_DECODE_PITCH_GAIN 6660 -#define ISAC_RANGE_ERROR_DECODE_PITCH_LAG 6670 -#define ISAC_RANGE_ERROR_DECODE_LPC 6680 -#define ISAC_RANGE_ERROR_DECODE_SPECTRUM 6690 -#define ISAC_LENGTH_MISMATCH 6730 -/* 6800 Call setup formats */ -#define ISAC_INCOMPATIBLE_FORMATS 6810 - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SETTINGS_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.c deleted file mode 100644 index 4ef9a338ccb2..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.c +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * spectrum_ar_model_tables.c - * - * This file contains tables with AR coefficients, Gain coefficients - * and cosine tables. - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/********************* AR Coefficient Tables ************************/ - -/* cdf for quantized reflection coefficient 1 */ -const uint16_t WebRtcIsacfix_kRc1Cdf[12] = { - 0, 2, 4, 129, 7707, 57485, 65495, 65527, 65529, 65531, - 65533, 65535 -}; - -/* cdf for quantized reflection coefficient 2 */ -const uint16_t WebRtcIsacfix_kRc2Cdf[12] = { - 0, 2, 4, 7, 531, 25298, 64525, 65526, 65529, 65531, - 65533, 65535 -}; - -/* cdf for quantized reflection coefficient 3 */ -const uint16_t WebRtcIsacfix_kRc3Cdf[12] = { - 0, 2, 4, 6, 620, 22898, 64843, 65527, 65529, 65531, - 65533, 65535 -}; - -/* cdf for quantized reflection coefficient 4 */ -const uint16_t WebRtcIsacfix_kRc4Cdf[12] = { - 0, 2, 4, 6, 35, 10034, 60733, 65506, 65529, 65531, - 65533, 65535 -}; - -/* cdf for quantized reflection coefficient 5 */ -const uint16_t WebRtcIsacfix_kRc5Cdf[12] = { - 0, 2, 4, 6, 36, 7567, 56727, 65385, 65529, 65531, - 65533, 65535 -}; - -/* cdf for quantized reflection coefficient 6 */ -const uint16_t WebRtcIsacfix_kRc6Cdf[12] = { - 0, 2, 4, 6, 14, 6579, 57360, 65409, 65529, 65531, - 65533, 65535 -}; - -/* representation levels for quantized reflection coefficient 1 */ -const int16_t WebRtcIsacfix_kRc1Levels[11] = { - -32104, -29007, -23202, -15496, -9279, -2577, 5934, 17535, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 2 */ -const int16_t WebRtcIsacfix_kRc2Levels[11] = { - -32104, -29503, -23494, -15261, -7309, -1399, 6158, 16381, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 3 */ -const int16_t WebRtcIsacfix_kRc3Levels[11] = { - -32104, -29503, -23157, -15186, -7347, -1359, 5829, 17535, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 4 */ -const int16_t WebRtcIsacfix_kRc4Levels[11] = { - -32104, -29503, -24512, -15362, -6665, -342, 6596, 14585, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 5 */ -const int16_t WebRtcIsacfix_kRc5Levels[11] = { - -32104, -29503, -24512, -15005, -6564, -106, 7123, 14920, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 6 */ -const int16_t WebRtcIsacfix_kRc6Levels[11] = { - -32104, -29503, -24512, -15096, -6656, -37, 7036, 14847, 24512, 29503, 32104 -}; - -/* quantization boundary levels for reflection coefficients */ -const int16_t WebRtcIsacfix_kRcBound[12] = { - -32768, -31441, -27566, -21458, -13612, -4663, - 4663, 13612, 21458, 27566, 31441, 32767 -}; - -/* initial index for AR reflection coefficient quantizer and cdf table search */ -const uint16_t WebRtcIsacfix_kRcInitInd[6] = { - 5, 5, 5, 5, 5, 5 -}; - -/* pointers to AR cdf tables */ -const uint16_t *WebRtcIsacfix_kRcCdfPtr[AR_ORDER] = { - WebRtcIsacfix_kRc1Cdf, - WebRtcIsacfix_kRc2Cdf, - WebRtcIsacfix_kRc3Cdf, - WebRtcIsacfix_kRc4Cdf, - WebRtcIsacfix_kRc5Cdf, - WebRtcIsacfix_kRc6Cdf -}; - -/* pointers to AR representation levels tables */ -const int16_t *WebRtcIsacfix_kRcLevPtr[AR_ORDER] = { - WebRtcIsacfix_kRc1Levels, - WebRtcIsacfix_kRc2Levels, - WebRtcIsacfix_kRc3Levels, - WebRtcIsacfix_kRc4Levels, - WebRtcIsacfix_kRc5Levels, - WebRtcIsacfix_kRc6Levels -}; - - -/******************** GAIN Coefficient Tables ***********************/ - -/* cdf for Gain coefficient */ -const uint16_t WebRtcIsacfix_kGainCdf[19] = { - 0, 2, 4, 6, 8, 10, 12, 14, 16, 1172, - 11119, 29411, 51699, 64445, 65527, 65529, 65531, 65533, 65535 -}; - -/* representation levels for quantized squared Gain coefficient */ -const int32_t WebRtcIsacfix_kGain2Lev[18] = { - 128, 128, 128, 128, 128, 215, 364, 709, 1268, - 1960, 3405, 6078, 11286, 17827, 51918, 134498, 487432, 2048000 -}; - -/* quantization boundary levels for squared Gain coefficient */ -const int32_t WebRtcIsacfix_kGain2Bound[19] = { - 0, 21, 35, 59, 99, 166, 280, 475, 815, 1414, - 2495, 4505, 8397, 16405, 34431, 81359, 240497, 921600, 0x7FFFFFFF -}; - -/* pointers to Gain cdf table */ -const uint16_t *WebRtcIsacfix_kGainPtr[1] = { - WebRtcIsacfix_kGainCdf -}; - -/* gain initial index for gain quantizer and cdf table search */ -const uint16_t WebRtcIsacfix_kGainInitInd[1] = { - 11 -}; - - -/************************* Cosine Tables ****************************/ - -/* cosine table */ -const int16_t WebRtcIsacfix_kCos[6][60] = { - { 512, 512, 511, 510, 508, 507, 505, 502, 499, 496, - 493, 489, 485, 480, 476, 470, 465, 459, 453, 447, - 440, 433, 426, 418, 410, 402, 394, 385, 376, 367, - 357, 348, 338, 327, 317, 306, 295, 284, 273, 262, - 250, 238, 226, 214, 202, 190, 177, 165, 152, 139, - 126, 113, 100, 87, 73, 60, 47, 33, 20, 7 }, - { 512, 510, 508, 503, 498, 491, 483, 473, 462, 450, - 437, 422, 406, 389, 371, 352, 333, 312, 290, 268, - 244, 220, 196, 171, 145, 120, 93, 67, 40, 13, - -13, -40, -67, -93, -120, -145, -171, -196, -220, -244, - -268, -290, -312, -333, -352, -371, -389, -406, -422, -437, - -450, -462, -473, -483, -491, -498, -503, -508, -510, -512 }, - { 512, 508, 502, 493, 480, 465, 447, 426, 402, 376, - 348, 317, 284, 250, 214, 177, 139, 100, 60, 20, - -20, -60, -100, -139, -177, -214, -250, -284, -317, -348, - -376, -402, -426, -447, -465, -480, -493, -502, -508, -512, - -512, -508, -502, -493, -480, -465, -447, -426, -402, -376, - -348, -317, -284, -250, -214, -177, -139, -100, -60, -20 }, - { 511, 506, 495, 478, 456, 429, 398, 362, 322, 279, - 232, 183, 133, 80, 27, -27, -80, -133, -183, -232, - -279, -322, -362, -398, -429, -456, -478, -495, -506, -511, - -511, -506, -495, -478, -456, -429, -398, -362, -322, -279, - -232, -183, -133, -80, -27, 27, 80, 133, 183, 232, - 279, 322, 362, 398, 429, 456, 478, 495, 506, 511 }, - { 511, 502, 485, 459, 426, 385, 338, 284, 226, 165, - 100, 33, -33, -100, -165, -226, -284, -338, -385, -426, - -459, -485, -502, -511, -511, -502, -485, -459, -426, -385, - -338, -284, -226, -165, -100, -33, 33, 100, 165, 226, - 284, 338, 385, 426, 459, 485, 502, 511, 511, 502, - 485, 459, 426, 385, 338, 284, 226, 165, 100, 33 }, - { 510, 498, 473, 437, 389, 333, 268, 196, 120, 40, - -40, -120, -196, -268, -333, -389, -437, -473, -498, -510, - -510, -498, -473, -437, -389, -333, -268, -196, -120, -40, - 40, 120, 196, 268, 333, 389, 437, 473, 498, 510, - 510, 498, 473, 437, 389, 333, 268, 196, 120, 40, - -40, -120, -196, -268, -333, -389, -437, -473, -498, -510 } -}; diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h deleted file mode 100644 index 2282a369cb68..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * spectrum_ar_model_tables.h - * - * This file contains definitions of tables with AR coefficients, - * Gain coefficients and cosine tables. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/********************* AR Coefficient Tables ************************/ -/* cdf for quantized reflection coefficient 1 */ -extern const uint16_t WebRtcIsacfix_kRc1Cdf[12]; - -/* cdf for quantized reflection coefficient 2 */ -extern const uint16_t WebRtcIsacfix_kRc2Cdf[12]; - -/* cdf for quantized reflection coefficient 3 */ -extern const uint16_t WebRtcIsacfix_kRc3Cdf[12]; - -/* cdf for quantized reflection coefficient 4 */ -extern const uint16_t WebRtcIsacfix_kRc4Cdf[12]; - -/* cdf for quantized reflection coefficient 5 */ -extern const uint16_t WebRtcIsacfix_kRc5Cdf[12]; - -/* cdf for quantized reflection coefficient 6 */ -extern const uint16_t WebRtcIsacfix_kRc6Cdf[12]; - -/* representation levels for quantized reflection coefficient 1 */ -extern const int16_t WebRtcIsacfix_kRc1Levels[11]; - -/* representation levels for quantized reflection coefficient 2 */ -extern const int16_t WebRtcIsacfix_kRc2Levels[11]; - -/* representation levels for quantized reflection coefficient 3 */ -extern const int16_t WebRtcIsacfix_kRc3Levels[11]; - -/* representation levels for quantized reflection coefficient 4 */ -extern const int16_t WebRtcIsacfix_kRc4Levels[11]; - -/* representation levels for quantized reflection coefficient 5 */ -extern const int16_t WebRtcIsacfix_kRc5Levels[11]; - -/* representation levels for quantized reflection coefficient 6 */ -extern const int16_t WebRtcIsacfix_kRc6Levels[11]; - -/* quantization boundary levels for reflection coefficients */ -extern const int16_t WebRtcIsacfix_kRcBound[12]; - -/* initial indices for AR reflection coefficient quantizer and cdf table search - */ -extern const uint16_t WebRtcIsacfix_kRcInitInd[AR_ORDER]; - -/* pointers to AR cdf tables */ -extern const uint16_t* WebRtcIsacfix_kRcCdfPtr[AR_ORDER]; - -/* pointers to AR representation levels tables */ -extern const int16_t* WebRtcIsacfix_kRcLevPtr[AR_ORDER]; - -/******************** GAIN Coefficient Tables ***********************/ -/* cdf for Gain coefficient */ -extern const uint16_t WebRtcIsacfix_kGainCdf[19]; - -/* representation levels for quantized Gain coefficient */ -extern const int32_t WebRtcIsacfix_kGain2Lev[18]; - -/* squared quantization boundary levels for Gain coefficient */ -extern const int32_t WebRtcIsacfix_kGain2Bound[19]; - -/* pointer to Gain cdf table */ -extern const uint16_t* WebRtcIsacfix_kGainPtr[1]; - -/* Gain initial index for gain quantizer and cdf table search */ -extern const uint16_t WebRtcIsacfix_kGainInitInd[1]; - -/************************* Cosine Tables ****************************/ -/* Cosine table */ -extern const int16_t WebRtcIsacfix_kCos[6][60]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ \ - */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/structs.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/structs.h deleted file mode 100644 index 3044d5176b58..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/structs.h +++ /dev/null @@ -1,345 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * structs.h - * - * This header file contains all the structs used in the ISAC codec - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_STRUCTS_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_STRUCTS_H_ - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/bandwidth_info.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/* Bitstream struct for decoder */ -typedef struct Bitstreamstruct_dec { - uint16_t stream[INTERNAL_STREAM_SIZE_W16]; /* Array bytestream to decode */ - uint32_t W_upper; /* Upper boundary of interval W */ - uint32_t streamval; - uint16_t stream_index; /* Index to the current position in bytestream */ - int16_t full; /* 0 - first byte in memory filled, second empty*/ - /* 1 - both bytes are empty (we just filled the previous memory */ - - size_t stream_size; /* The size of stream in bytes. */ -} Bitstr_dec; - -/* Bitstream struct for encoder */ -typedef struct Bitstreamstruct_enc { - uint16_t - stream[STREAM_MAXW16_60MS]; /* Vector for adding encoded bytestream */ - uint32_t W_upper; /* Upper boundary of interval W */ - uint32_t streamval; - uint16_t stream_index; /* Index to the current position in bytestream */ - int16_t full; /* 0 - first byte in memory filled, second empty*/ - /* 1 - both bytes are empty (we just filled the previous memory */ - -} Bitstr_enc; - -typedef struct { - int16_t DataBufferLoQ0[WINLEN]; - int16_t DataBufferHiQ0[WINLEN]; - - int32_t CorrBufLoQQ[ORDERLO + 1]; - int32_t CorrBufHiQQ[ORDERHI + 1]; - - int16_t CorrBufLoQdom[ORDERLO + 1]; - int16_t CorrBufHiQdom[ORDERHI + 1]; - - int32_t PreStateLoGQ15[ORDERLO + 1]; - int32_t PreStateHiGQ15[ORDERHI + 1]; - - uint32_t OldEnergy; - -} MaskFiltstr_enc; - -typedef struct { - int16_t PostStateLoGQ0[ORDERLO + 1]; - int16_t PostStateHiGQ0[ORDERHI + 1]; - - uint32_t OldEnergy; - -} MaskFiltstr_dec; - -typedef struct { - // state vectors for each of the two analysis filters - - int32_t INSTAT1_fix[2 * (QORDER - 1)]; - int32_t INSTAT2_fix[2 * (QORDER - 1)]; - int16_t INLABUF1_fix[QLOOKAHEAD]; - int16_t INLABUF2_fix[QLOOKAHEAD]; - - /* High pass filter */ - int32_t HPstates_fix[HPORDER]; - -} PreFiltBankstr; - -typedef struct { - // state vectors for each of the two analysis filters - int32_t STATE_0_LOWER_fix[2 * POSTQORDER]; - int32_t STATE_0_UPPER_fix[2 * POSTQORDER]; - - /* High pass filter */ - - int32_t HPstates1_fix[HPORDER]; - int32_t HPstates2_fix[HPORDER]; - -} PostFiltBankstr; - -typedef struct { - /* data buffer for pitch filter */ - int16_t ubufQQ[PITCH_BUFFSIZE]; - - /* low pass state vector */ - int16_t ystateQQ[PITCH_DAMPORDER]; - - /* old lag and gain */ - int16_t oldlagQ7; - int16_t oldgainQ12; - -} PitchFiltstr; - -typedef struct { - // for inital estimator - int16_t dec_buffer16[PITCH_CORR_LEN2 + PITCH_CORR_STEP2 + PITCH_MAX_LAG / 2 - - PITCH_FRAME_LEN / 2 + 2]; - int32_t decimator_state32[2 * ALLPASSSECTIONS + 1]; - int16_t inbuf[QLOOKAHEAD]; - - PitchFiltstr PFstr_wght; - PitchFiltstr PFstr; - -} PitchAnalysisStruct; - -typedef struct { - /* Parameters used in PLC to avoid re-computation */ - - /* --- residual signals --- */ - int16_t prevPitchInvIn[FRAMESAMPLES / 2]; - int16_t prevPitchInvOut[PITCH_MAX_LAG + 10]; // [FRAMESAMPLES/2]; save 90 - int32_t prevHP[PITCH_MAX_LAG + 10]; // [FRAMESAMPLES/2]; save 90 - - int16_t decayCoeffPriodic; /* how much to supress a sample */ - int16_t decayCoeffNoise; - int16_t used; /* if PLC is used */ - - int16_t* lastPitchLP; // [FRAMESAMPLES/2]; saved 240; - - /* --- LPC side info --- */ - int16_t lofilt_coefQ15[ORDERLO]; - int16_t hifilt_coefQ15[ORDERHI]; - int32_t gain_lo_hiQ17[2]; - - /* --- LTP side info --- */ - int16_t AvgPitchGain_Q12; - int16_t lastPitchGain_Q12; - int16_t lastPitchLag_Q7; - - /* --- Add-overlap in recovery packet --- */ - int16_t overlapLP[RECOVERY_OVERLAP]; // [FRAMESAMPLES/2]; saved 160 - - int16_t pitchCycles; - int16_t A; - int16_t B; - size_t pitchIndex; - size_t stretchLag; - int16_t* prevPitchLP; // [ FRAMESAMPLES/2 ]; saved 240 - int16_t seed; - - int16_t std; -} PLCstr; - -/* Have instance of struct together with other iSAC structs */ -typedef struct { - int16_t prevFrameSizeMs; /* Previous frame size (in ms) */ - uint16_t prevRtpNumber; /* Previous RTP timestamp from received packet */ - /* (in samples relative beginning) */ - uint32_t prevSendTime; /* Send time for previous packet, from RTP header */ - uint32_t prevArrivalTime; /* Arrival time for previous packet (in ms using - timeGetTime()) */ - uint16_t prevRtpRate; /* rate of previous packet, derived from RTP timestamps - (in bits/s) */ - uint32_t lastUpdate; /* Time since the last update of the Bottle Neck estimate - (in samples) */ - uint32_t lastReduction; /* Time sinse the last reduction (in samples) */ - int32_t countUpdates; /* How many times the estimate was update in the - beginning */ - - /* The estimated bottle neck rate from there to here (in bits/s) */ - uint32_t recBw; - uint32_t recBwInv; - uint32_t recBwAvg; - uint32_t recBwAvgQ; - - uint32_t minBwInv; - uint32_t maxBwInv; - - /* The estimated mean absolute jitter value, as seen on this side (in ms) */ - int32_t recJitter; - int32_t recJitterShortTerm; - int32_t recJitterShortTermAbs; - int32_t recMaxDelay; - int32_t recMaxDelayAvgQ; - - int16_t recHeaderRate; /* (assumed) bitrate for headers (bps) */ - - uint32_t sendBwAvg; /* The estimated bottle neck rate from here to there (in - bits/s) */ - int32_t sendMaxDelayAvg; /* The estimated mean absolute jitter value, as seen - on the other siee (in ms) */ - - int16_t countRecPkts; /* number of packets received since last update */ - int16_t highSpeedRec; /* flag for marking that a high speed network has been - detected downstream */ - - /* number of consecutive pkts sent during which the bwe estimate has - remained at a value greater than the downstream threshold for determining - highspeed network */ - int16_t countHighSpeedRec; - - /* flag indicating bwe should not adjust down immediately for very late pckts - */ - int16_t inWaitPeriod; - - /* variable holding the time of the start of a window of time when - bwe should not adjust down immediately for very late pckts */ - uint32_t startWaitPeriod; - - /* number of consecutive pkts sent during which the bwe estimate has - remained at a value greater than the upstream threshold for determining - highspeed network */ - int16_t countHighSpeedSent; - - /* flag indicated the desired number of packets over threshold rate have been - sent and bwe will assume the connection is over broadband network */ - int16_t highSpeedSend; - - IsacBandwidthInfo external_bw_info; -} BwEstimatorstr; - -typedef struct { - /* boolean, flags if previous packet exceeded B.N. */ - int16_t PrevExceed; - /* ms */ - int16_t ExceedAgo; - /* packets left to send in current burst */ - int16_t BurstCounter; - /* packets */ - int16_t InitCounter; - /* ms remaining in buffer when next packet will be sent */ - int16_t StillBuffered; - -} RateModel; - -/* The following strutc is used to store data from encoding, to make it - fast and easy to construct a new bitstream with a different Bandwidth - estimate. All values (except framelength and minBytes) is double size to - handle 60 ms of data. -*/ -typedef struct { - /* Used to keep track of if it is first or second part of 60 msec packet */ - int startIdx; - - /* Frame length in samples */ - int16_t framelength; - - /* Pitch Gain */ - int16_t pitchGain_index[2]; - - /* Pitch Lag */ - int32_t meanGain[2]; - int16_t pitchIndex[PITCH_SUBFRAMES * 2]; - - /* LPC */ - int32_t LPCcoeffs_g[12 * 2]; /* KLT_ORDER_GAIN = 12 */ - int16_t LPCindex_s[108 * 2]; /* KLT_ORDER_SHAPE = 108 */ - int16_t LPCindex_g[12 * 2]; /* KLT_ORDER_GAIN = 12 */ - - /* Encode Spec */ - int16_t fre[FRAMESAMPLES]; - int16_t fim[FRAMESAMPLES]; - int16_t AvgPitchGain[2]; - - /* Used in adaptive mode only */ - int minBytes; - -} IsacSaveEncoderData; - -typedef struct { - Bitstr_enc bitstr_obj; - MaskFiltstr_enc maskfiltstr_obj; - PreFiltBankstr prefiltbankstr_obj; - PitchFiltstr pitchfiltstr_obj; - PitchAnalysisStruct pitchanalysisstr_obj; - RateModel rate_data_obj; - - int16_t buffer_index; - int16_t current_framesamples; - - int16_t data_buffer_fix[FRAMESAMPLES]; // the size was MAX_FRAMESAMPLES - - int16_t frame_nb; - int16_t BottleNeck; - int16_t MaxDelay; - int16_t new_framelength; - int16_t s2nr; - uint16_t MaxBits; - - int16_t bitstr_seed; - - IsacSaveEncoderData* SaveEnc_ptr; - int16_t payloadLimitBytes30; /* Maximum allowed number of bits for a 30 msec - packet */ - int16_t payloadLimitBytes60; /* Maximum allowed number of bits for a 30 msec - packet */ - int16_t maxPayloadBytes; /* Maximum allowed number of bits for both 30 and 60 - msec packet */ - int16_t maxRateInBytes; /* Maximum allowed rate in bytes per 30 msec packet */ - int16_t enforceFrameSize; /* If set iSAC will never change packet size */ - -} IsacFixEncoderInstance; - -typedef struct { - Bitstr_dec bitstr_obj; - MaskFiltstr_dec maskfiltstr_obj; - PostFiltBankstr postfiltbankstr_obj; - PitchFiltstr pitchfiltstr_obj; - PLCstr plcstr_obj; /* TS; for packet loss concealment */ -} IsacFixDecoderInstance; - -typedef struct { - IsacFixEncoderInstance ISACenc_obj; - IsacFixDecoderInstance ISACdec_obj; - BwEstimatorstr bwestimator_obj; - int16_t CodingMode; /* 0 = adaptive; 1 = instantaneous */ - int16_t errorcode; - int16_t initflag; /* 0 = nothing initiated; 1 = encoder or decoder */ - /* not initiated; 2 = all initiated */ -} ISACFIX_SubStruct; - -typedef struct { - int32_t lpcGains[12]; /* 6 lower-band & 6 upper-band we may need to double it - for 60*/ - /* */ - uint32_t W_upper; /* Upper boundary of interval W */ - uint32_t streamval; - uint16_t stream_index; /* Index to the current position in bytestream */ - int16_t full; /* 0 - first byte in memory filled, second empty*/ - /* 1 - both bytes are empty (we just filled the previous memory */ - uint16_t beforeLastWord; - uint16_t lastWord; -} transcode_obj; - -// Bitstr_enc myBitStr; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_STRUCTS_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform.c deleted file mode 100644 index 80b244b5f149..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform.c +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * WebRtcIsacfix_kTransform.c - * - * Transform functions - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/fft.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/third_party/fft/fft.h" - -/* Tables are defined in transform_tables.c file or ARM assembly files. */ -/* Cosine table 1 in Q14 */ -extern const int16_t WebRtcIsacfix_kCosTab1[FRAMESAMPLES/2]; -/* Sine table 1 in Q14 */ -extern const int16_t WebRtcIsacfix_kSinTab1[FRAMESAMPLES/2]; -/* Sine table 2 in Q14 */ -extern const int16_t WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4]; - -void WebRtcIsacfix_Time2SpecC(int16_t *inre1Q9, - int16_t *inre2Q9, - int16_t *outreQ7, - int16_t *outimQ7) -{ - - int k; - int32_t tmpreQ16[FRAMESAMPLES/2], tmpimQ16[FRAMESAMPLES/2]; - int16_t tmp1rQ14, tmp1iQ14; - int32_t xrQ16, xiQ16, yrQ16, yiQ16; - int32_t v1Q16, v2Q16; - int16_t factQ19, sh; - - /* Multiply with complex exponentials and combine into one complex vector */ - factQ19 = 16921; // 0.5/sqrt(240) in Q19 is round(.5/sqrt(240)*(2^19)) = 16921 - for (k = 0; k < FRAMESAMPLES/2; k++) { - tmp1rQ14 = WebRtcIsacfix_kCosTab1[k]; - tmp1iQ14 = WebRtcIsacfix_kSinTab1[k]; - xrQ16 = (tmp1rQ14 * inre1Q9[k] + tmp1iQ14 * inre2Q9[k]) >> 7; - xiQ16 = (tmp1rQ14 * inre2Q9[k] - tmp1iQ14 * inre1Q9[k]) >> 7; - // Q-domains below: (Q16*Q19>>16)>>3 = Q16 - tmpreQ16[k] = (WEBRTC_SPL_MUL_16_32_RSFT16(factQ19, xrQ16) + 4) >> 3; - tmpimQ16[k] = (WEBRTC_SPL_MUL_16_32_RSFT16(factQ19, xiQ16) + 4) >> 3; - } - - - xrQ16 = WebRtcSpl_MaxAbsValueW32(tmpreQ16, FRAMESAMPLES/2); - yrQ16 = WebRtcSpl_MaxAbsValueW32(tmpimQ16, FRAMESAMPLES/2); - if (yrQ16>xrQ16) { - xrQ16 = yrQ16; - } - - sh = WebRtcSpl_NormW32(xrQ16); - sh = sh-24; //if sh becomes >=0, then we should shift sh steps to the left, and the domain will become Q(16+sh) - //if sh becomes <0, then we should shift -sh steps to the right, and the domain will become Q(16+sh) - - //"Fastest" vectors - if (sh>=0) { - for (k=0; k> -sh); // Q(16+sh) - inre2Q9[k] = (int16_t)((tmpimQ16[k] + round) >> -sh); // Q(16+sh) - } - } - - /* Get DFT */ - WebRtcIsacfix_FftRadix16Fastest(inre1Q9, inre2Q9, -1); // real call - - //"Fastest" vectors - if (sh>=0) { - for (k=0; k> sh; // Q(16+sh) -> Q16 - tmpimQ16[k] = inre2Q9[k] >> sh; // Q(16+sh) -> Q16 - } - } else { - for (k=0; k Q16 - tmpimQ16[k] = inre2Q9[k] << -sh; // Q(16+sh) -> Q16 - } - } - - - /* Use symmetry to separate into two complex vectors and center frames in time around zero */ - for (k = 0; k < FRAMESAMPLES/4; k++) { - xrQ16 = tmpreQ16[k] + tmpreQ16[FRAMESAMPLES/2 - 1 - k]; - yiQ16 = -tmpreQ16[k] + tmpreQ16[FRAMESAMPLES/2 - 1 - k]; - xiQ16 = tmpimQ16[k] - tmpimQ16[FRAMESAMPLES/2 - 1 - k]; - yrQ16 = tmpimQ16[k] + tmpimQ16[FRAMESAMPLES/2 - 1 - k]; - tmp1rQ14 = -WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4 - 1 - k]; - tmp1iQ14 = WebRtcIsacfix_kSinTab2[k]; - v1Q16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, xrQ16) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, xiQ16); - v2Q16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, xrQ16) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, xiQ16); - outreQ7[k] = (int16_t)(v1Q16 >> 9); - outimQ7[k] = (int16_t)(v2Q16 >> 9); - v1Q16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, yrQ16) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, yiQ16); - v2Q16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, yrQ16) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, yiQ16); - // CalcLrIntQ(v1Q16, 9); - outreQ7[FRAMESAMPLES / 2 - 1 - k] = (int16_t)(v1Q16 >> 9); - // CalcLrIntQ(v2Q16, 9); - outimQ7[FRAMESAMPLES / 2 - 1 - k] = (int16_t)(v2Q16 >> 9); - - } -} - - -void WebRtcIsacfix_Spec2TimeC(int16_t *inreQ7, int16_t *inimQ7, int32_t *outre1Q16, int32_t *outre2Q16) -{ - - int k; - int16_t tmp1rQ14, tmp1iQ14; - int32_t xrQ16, xiQ16, yrQ16, yiQ16; - int32_t tmpInRe, tmpInIm, tmpInRe2, tmpInIm2; - int16_t factQ11; - int16_t sh; - - for (k = 0; k < FRAMESAMPLES/4; k++) { - /* Move zero in time to beginning of frames */ - tmp1rQ14 = -WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4 - 1 - k]; - tmp1iQ14 = WebRtcIsacfix_kSinTab2[k]; - - tmpInRe = inreQ7[k] * (1 << 9); // Q7 -> Q16 - tmpInIm = inimQ7[k] * (1 << 9); // Q7 -> Q16 - tmpInRe2 = inreQ7[FRAMESAMPLES / 2 - 1 - k] * (1 << 9); // Q7 -> Q16 - tmpInIm2 = inimQ7[FRAMESAMPLES / 2 - 1 - k] * (1 << 9); // Q7 -> Q16 - - xrQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInRe) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInIm); - xiQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInIm) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInRe); - yrQ16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInIm2) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInRe2); - yiQ16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInRe2) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInIm2); - - /* Combine into one vector, z = x + j * y */ - outre1Q16[k] = xrQ16 - yiQ16; - outre1Q16[FRAMESAMPLES/2 - 1 - k] = xrQ16 + yiQ16; - outre2Q16[k] = xiQ16 + yrQ16; - outre2Q16[FRAMESAMPLES/2 - 1 - k] = -xiQ16 + yrQ16; - } - - /* Get IDFT */ - tmpInRe = WebRtcSpl_MaxAbsValueW32(outre1Q16, 240); - tmpInIm = WebRtcSpl_MaxAbsValueW32(outre2Q16, 240); - if (tmpInIm>tmpInRe) { - tmpInRe = tmpInIm; - } - - sh = WebRtcSpl_NormW32(tmpInRe); - sh = sh-24; //if sh becomes >=0, then we should shift sh steps to the left, and the domain will become Q(16+sh) - //if sh becomes <0, then we should shift -sh steps to the right, and the domain will become Q(16+sh) - - //"Fastest" vectors - if (sh>=0) { - for (k=0; k<240; k++) { - inreQ7[k] = (int16_t)(outre1Q16[k] << sh); // Q(16+sh) - inimQ7[k] = (int16_t)(outre2Q16[k] << sh); // Q(16+sh) - } - } else { - int32_t round = 1 << (-sh - 1); - for (k=0; k<240; k++) { - inreQ7[k] = (int16_t)((outre1Q16[k] + round) >> -sh); // Q(16+sh) - inimQ7[k] = (int16_t)((outre2Q16[k] + round) >> -sh); // Q(16+sh) - } - } - - WebRtcIsacfix_FftRadix16Fastest(inreQ7, inimQ7, 1); // real call - - //"Fastest" vectors - if (sh>=0) { - for (k=0; k<240; k++) { - outre1Q16[k] = inreQ7[k] >> sh; // Q(16+sh) -> Q16 - outre2Q16[k] = inimQ7[k] >> sh; // Q(16+sh) -> Q16 - } - } else { - for (k=0; k<240; k++) { - outre1Q16[k] = inreQ7[k] * (1 << -sh); // Q(16+sh) -> Q16 - outre2Q16[k] = inimQ7[k] * (1 << -sh); // Q(16+sh) -> Q16 - } - } - - /* Divide through by the normalizing constant: */ - /* scale all values with 1/240, i.e. with 273 in Q16 */ - /* 273/65536 ~= 0.0041656 */ - /* 1/240 ~= 0.0041666 */ - for (k=0; k<240; k++) { - outre1Q16[k] = WEBRTC_SPL_MUL_16_32_RSFT16(273, outre1Q16[k]); - outre2Q16[k] = WEBRTC_SPL_MUL_16_32_RSFT16(273, outre2Q16[k]); - } - - /* Demodulate and separate */ - factQ11 = 31727; // sqrt(240) in Q11 is round(15.49193338482967*2048) = 31727 - for (k = 0; k < FRAMESAMPLES/2; k++) { - tmp1rQ14 = WebRtcIsacfix_kCosTab1[k]; - tmp1iQ14 = WebRtcIsacfix_kSinTab1[k]; - xrQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, outre1Q16[k]) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, outre2Q16[k]); - xiQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, outre2Q16[k]) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, outre1Q16[k]); - xrQ16 = WEBRTC_SPL_MUL_16_32_RSFT11(factQ11, xrQ16); - xiQ16 = WEBRTC_SPL_MUL_16_32_RSFT11(factQ11, xiQ16); - outre2Q16[k] = xiQ16; - outre1Q16[k] = xrQ16; - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_mips.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_mips.c deleted file mode 100644 index a87b3b54f267..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_mips.c +++ /dev/null @@ -1,1294 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/fft.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -// The tables are defined in transform_tables.c file. -extern const int16_t WebRtcIsacfix_kCosTab1[FRAMESAMPLES/2]; -extern const int16_t WebRtcIsacfix_kSinTab1[FRAMESAMPLES/2]; -extern const int16_t WebRtcIsacfix_kCosTab2[FRAMESAMPLES/4]; -extern const int16_t WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4]; - -// MIPS DSPr2 version of the WebRtcIsacfix_Time2Spec function -// is not bit-exact with the C version. -// The accuracy of the MIPS DSPr2 version is same or better. -void WebRtcIsacfix_Time2SpecMIPS(int16_t* inre1Q9, - int16_t* inre2Q9, - int16_t* outreQ7, - int16_t* outimQ7) { - int k = FRAMESAMPLES / 2; - int32_t tmpreQ16[FRAMESAMPLES / 2], tmpimQ16[FRAMESAMPLES / 2]; - int32_t r0, r1, r2, r3, r4, r5, r6, r7, r8, r9; - int32_t inre1, inre2, tmpre, tmpim, factor, max, max1; - int16_t* cosptr; - int16_t* sinptr; - - cosptr = (int16_t*)WebRtcIsacfix_kCosTab1; - sinptr = (int16_t*)WebRtcIsacfix_kSinTab1; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[inre1], %[inre1Q9], 0 \n\t" - "addiu %[inre2], %[inre2Q9], 0 \n\t" - "addiu %[tmpre], %[tmpreQ16], 0 \n\t" - "addiu %[tmpim], %[tmpimQ16], 0 \n\t" - "addiu %[factor], $zero, 16921 \n\t" - "mul %[max], $zero, $zero \n\t" - // Multiply with complex exponentials and combine into one complex vector. - // Also, calculate the maximal absolute value in the same loop. - "1: \n\t" -#if defined(MIPS_DSP_R2_LE) - "lwl %[r0], 0(%[inre1]) \n\t" - "lwl %[r2], 0(%[cosptr]) \n\t" - "lwl %[r3], 0(%[sinptr]) \n\t" - "lwl %[r1], 0(%[inre2]) \n\t" - "lwr %[r0], 0(%[inre1]) \n\t" - "lwr %[r2], 0(%[cosptr]) \n\t" - "lwr %[r3], 0(%[sinptr]) \n\t" - "lwr %[r1], 0(%[inre2]) \n\t" - "muleq_s.w.phr %[r4], %[r2], %[r0] \n\t" - "muleq_s.w.phr %[r5], %[r3], %[r0] \n\t" - "muleq_s.w.phr %[r6], %[r3], %[r1] \n\t" - "muleq_s.w.phr %[r7], %[r2], %[r1] \n\t" - "muleq_s.w.phl %[r8], %[r2], %[r0] \n\t" - "muleq_s.w.phl %[r0], %[r3], %[r0] \n\t" - "muleq_s.w.phl %[r3], %[r3], %[r1] \n\t" - "muleq_s.w.phl %[r1], %[r2], %[r1] \n\t" - "addiu %[k], %[k], -2 \n\t" - "addu %[r4], %[r4], %[r6] \n\t" - "subu %[r5], %[r7], %[r5] \n\t" - "sra %[r4], %[r4], 8 \n\t" - "sra %[r5], %[r5], 8 \n\t" - "mult $ac0, %[factor], %[r4] \n\t" - "mult $ac1, %[factor], %[r5] \n\t" - "addu %[r3], %[r8], %[r3] \n\t" - "subu %[r0], %[r1], %[r0] \n\t" - "sra %[r3], %[r3], 8 \n\t" - "sra %[r0], %[r0], 8 \n\t" - "mult $ac2, %[factor], %[r3] \n\t" - "mult $ac3, %[factor], %[r0] \n\t" - "extr_r.w %[r4], $ac0, 16 \n\t" - "extr_r.w %[r5], $ac1, 16 \n\t" - "addiu %[inre1], %[inre1], 4 \n\t" - "addiu %[inre2], %[inre2], 4 \n\t" - "extr_r.w %[r6], $ac2, 16 \n\t" - "extr_r.w %[r7], $ac3, 16 \n\t" - "addiu %[cosptr], %[cosptr], 4 \n\t" - "addiu %[sinptr], %[sinptr], 4 \n\t" - "shra_r.w %[r4], %[r4], 3 \n\t" - "shra_r.w %[r5], %[r5], 3 \n\t" - "sw %[r4], 0(%[tmpre]) \n\t" - "absq_s.w %[r4], %[r4] \n\t" - "sw %[r5], 0(%[tmpim]) \n\t" - "absq_s.w %[r5], %[r5] \n\t" - "shra_r.w %[r6], %[r6], 3 \n\t" - "shra_r.w %[r7], %[r7], 3 \n\t" - "sw %[r6], 4(%[tmpre]) \n\t" - "absq_s.w %[r6], %[r6] \n\t" - "sw %[r7], 4(%[tmpim]) \n\t" - "absq_s.w %[r7], %[r7] \n\t" - "slt %[r0], %[r4], %[r5] \n\t" - "movn %[r4], %[r5], %[r0] \n\t" - "slt %[r1], %[r6], %[r7] \n\t" - "movn %[r6], %[r7], %[r1] \n\t" - "slt %[r0], %[max], %[r4] \n\t" - "movn %[max], %[r4], %[r0] \n\t" - "slt %[r1], %[max], %[r6] \n\t" - "movn %[max], %[r6], %[r1] \n\t" - "addiu %[tmpre], %[tmpre], 8 \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[tmpim], %[tmpim], 8 \n\t" -#else // #if defined(MIPS_DSP_R2_LE) - "lh %[r0], 0(%[inre1]) \n\t" - "lh %[r1], 0(%[inre2]) \n\t" - "lh %[r2], 0(%[cosptr]) \n\t" - "lh %[r3], 0(%[sinptr]) \n\t" - "addiu %[k], %[k], -1 \n\t" - "mul %[r4], %[r0], %[r2] \n\t" - "mul %[r5], %[r1], %[r3] \n\t" - "mul %[r0], %[r0], %[r3] \n\t" - "mul %[r2], %[r1], %[r2] \n\t" - "addiu %[inre1], %[inre1], 2 \n\t" - "addiu %[inre2], %[inre2], 2 \n\t" - "addiu %[cosptr], %[cosptr], 2 \n\t" - "addiu %[sinptr], %[sinptr], 2 \n\t" - "addu %[r1], %[r4], %[r5] \n\t" - "sra %[r1], %[r1], 7 \n\t" - "sra %[r3], %[r1], 16 \n\t" - "andi %[r1], %[r1], 0xFFFF \n\t" - "sra %[r1], %[r1], 1 \n\t" - "mul %[r1], %[factor], %[r1] \n\t" - "mul %[r3], %[factor], %[r3] \n\t" - "subu %[r0], %[r2], %[r0] \n\t" - "sra %[r0], %[r0], 7 \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "sra %[r0], %[r0], 1 \n\t" - "mul %[r0], %[factor], %[r0] \n\t" - "mul %[r2], %[factor], %[r2] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r1], %[r1], 15 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r1], %[r1], 0x4000 \n\t" - "sra %[r1], %[r1], 15 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r1], %[r3], %[r1] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r1], %[r1], 3 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r1], %[r1], 4 \n\t" - "sra %[r1], %[r1], 3 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sw %[r1], 0(%[tmpre]) \n\t" - "addiu %[tmpre], %[tmpre], 4 \n\t" -#if defined(MIPS_DSP_R1_LE) - "absq_s.w %[r1], %[r1] \n\t" - "shra_r.w %[r0], %[r0], 15 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "negu %[r4], %[r1] \n\t" - "slt %[r3], %[r1], $zero \n\t" - "movn %[r1], %[r4], %[r3] \n\t" - "addiu %[r0], %[r0], 0x4000 \n\t" - "sra %[r0], %[r0], 15 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r2] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r0], %[r0], 3 \n\t" - "sw %[r0], 0(%[tmpim]) \n\t" - "absq_s.w %[r0], %[r0] \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r0], %[r0], 4 \n\t" - "sra %[r0], %[r0], 3 \n\t" - "sw %[r0], 0(%[tmpim]) \n\t" - "negu %[r2], %[r0] \n\t" - "slt %[r3], %[r0], $zero \n\t" - "movn %[r0], %[r2], %[r3] \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "slt %[r2], %[max], %[r1] \n\t" - "movn %[max], %[r1], %[r2] \n\t" - "slt %[r2], %[max], %[r0] \n\t" - "movn %[max], %[r0], %[r2] \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[tmpim], %[tmpim], 4 \n\t" -#endif // #if defined(MIPS_DSP_R2_LE) - // Calculate WebRtcSpl_NormW32(max). - // If max gets value >=0, we should shift max steps to the left, and the - // domain will be Q(16+shift). If max gets value <0, we should shift -max - // steps to the right, and the domain will be Q(16+max) - "clz %[max], %[max] \n\t" - "addiu %[max], %[max], -25 \n\t" - ".set pop \n\t" - : [k] "+r" (k), [inre1] "=&r" (inre1), [inre2] "=&r" (inre2), - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), - [r3] "=&r" (r3), [r4] "=&r" (r4), [tmpre] "=&r" (tmpre), - [tmpim] "=&r" (tmpim), [max] "=&r" (max), [factor] "=&r" (factor), -#if defined(MIPS_DSP_R2_LE) - [r6] "=&r" (r6), [r7] "=&r" (r7), [r8] "=&r" (r8), -#endif // #if defined(MIPS_DSP_R2_LE) - [r5] "=&r" (r5) - : [inre1Q9] "r" (inre1Q9), [inre2Q9] "r" (inre2Q9), - [tmpreQ16] "r" (tmpreQ16), [tmpimQ16] "r" (tmpimQ16), - [cosptr] "r" (cosptr), [sinptr] "r" (sinptr) - : "hi", "lo", "memory" -#if defined(MIPS_DSP_R2_LE) - , "$ac1hi", "$ac1lo", "$ac2hi", "$ac2lo", "$ac3hi", "$ac3lo" -#endif // #if defined(MIPS_DSP_R2_LE) - ); - - // "Fastest" vectors - k = FRAMESAMPLES / 4; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[tmpre], %[tmpreQ16], 0 \n\t" - "addiu %[tmpim], %[tmpimQ16], 0 \n\t" - "addiu %[inre1], %[inre1Q9], 0 \n\t" - "addiu %[inre2], %[inre2Q9], 0 \n\t" - "blez %[max], 2f \n\t" - " subu %[max1], $zero, %[max] \n\t" - "1: \n\t" - "lw %[r0], 0(%[tmpre]) \n\t" - "lw %[r1], 0(%[tmpim]) \n\t" - "lw %[r2], 4(%[tmpre]) \n\t" - "lw %[r3], 4(%[tmpim]) \n\t" - "addiu %[k], %[k], -1 \n\t" - "sllv %[r0], %[r0], %[max] \n\t" - "sllv %[r1], %[r1], %[max] \n\t" - "sllv %[r2], %[r2], %[max] \n\t" - "sllv %[r3], %[r3], %[max] \n\t" - "addiu %[tmpre], %[tmpre], 8 \n\t" - "addiu %[tmpim], %[tmpim], 8 \n\t" - "sh %[r0], 0(%[inre1]) \n\t" - "sh %[r1], 0(%[inre2]) \n\t" - "sh %[r2], 2(%[inre1]) \n\t" - "sh %[r3], 2(%[inre2]) \n\t" - "addiu %[inre1], %[inre1], 4 \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[inre2], %[inre2], 4 \n\t" - "b 4f \n\t" - " nop \n\t" - "2: \n\t" -#if !defined(MIPS_DSP_R1_LE) - "addiu %[r4], %[max1], -1 \n\t" - "addiu %[r5], $zero, 1 \n\t" - "sllv %[r4], %[r5], %[r4] \n\t" -#endif // #if !defined(MIPS_DSP_R1_LE) - "3: \n\t" - "lw %[r0], 0(%[tmpre]) \n\t" - "lw %[r1], 0(%[tmpim]) \n\t" - "lw %[r2], 4(%[tmpre]) \n\t" - "lw %[r3], 4(%[tmpim]) \n\t" - "addiu %[k], %[k], -1 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shrav_r.w %[r0], %[r0], %[max1] \n\t" - "shrav_r.w %[r1], %[r1], %[max1] \n\t" - "shrav_r.w %[r2], %[r2], %[max1] \n\t" - "shrav_r.w %[r3], %[r3], %[max1] \n\t" -#else // #if !defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r4] \n\t" - "addu %[r1], %[r1], %[r4] \n\t" - "addu %[r2], %[r2], %[r4] \n\t" - "addu %[r3], %[r3], %[r4] \n\t" - "srav %[r0], %[r0], %[max1] \n\t" - "srav %[r1], %[r1], %[max1] \n\t" - "srav %[r2], %[r2], %[max1] \n\t" - "srav %[r3], %[r3], %[max1] \n\t" -#endif // #if !defined(MIPS_DSP_R1_LE) - "addiu %[tmpre], %[tmpre], 8 \n\t" - "addiu %[tmpim], %[tmpim], 8 \n\t" - "sh %[r0], 0(%[inre1]) \n\t" - "sh %[r1], 0(%[inre2]) \n\t" - "sh %[r2], 2(%[inre1]) \n\t" - "sh %[r3], 2(%[inre2]) \n\t" - "addiu %[inre1], %[inre1], 4 \n\t" - "bgtz %[k], 3b \n\t" - " addiu %[inre2], %[inre2], 4 \n\t" - "4: \n\t" - ".set pop \n\t" - : [tmpre] "=&r" (tmpre), [tmpim] "=&r" (tmpim), [inre1] "=&r" (inre1), - [inre2] "=&r" (inre2), [k] "+r" (k), [max1] "=&r" (max1), -#if !defined(MIPS_DSP_R1_LE) - [r4] "=&r" (r4), [r5] "=&r" (r5), -#endif // #if !defined(MIPS_DSP_R1_LE) - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3) - : [tmpreQ16] "r" (tmpreQ16), [tmpimQ16] "r" (tmpimQ16), - [inre1Q9] "r" (inre1Q9), [inre2Q9] "r" (inre2Q9), [max] "r" (max) - : "memory" - ); - - // Get DFT - WebRtcIsacfix_FftRadix16Fastest(inre1Q9, inre2Q9, -1); // real call - - // "Fastest" vectors and - // Use symmetry to separate into two complex vectors - // and center frames in time around zero - // merged into one loop - cosptr = (int16_t*)WebRtcIsacfix_kCosTab2; - sinptr = (int16_t*)WebRtcIsacfix_kSinTab2; - k = FRAMESAMPLES / 4; - factor = FRAMESAMPLES - 2; // offset for FRAMESAMPLES / 2 - 1 array member - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[inre1], %[inre1Q9], 0 \n\t" - "addiu %[inre2], %[inre2Q9], 0 \n\t" - "addiu %[tmpre], %[outreQ7], 0 \n\t" - "addiu %[tmpim], %[outimQ7], 0 \n\t" - "bltz %[max], 2f \n\t" - " subu %[max1], $zero, %[max] \n\t" - "1: \n\t" -#if !defined(MIPS_DSP_R1_LE) - "addu %[r4], %[inre1], %[offset] \n\t" - "addu %[r5], %[inre2], %[offset] \n\t" -#endif // #if !defined(MIPS_DSP_R1_LE) - "lh %[r0], 0(%[inre1]) \n\t" - "lh %[r1], 0(%[inre2]) \n\t" -#if defined(MIPS_DSP_R1_LE) - "lhx %[r2], %[offset](%[inre1]) \n\t" - "lhx %[r3], %[offset](%[inre2]) \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "lh %[r2], 0(%[r4]) \n\t" - "lh %[r3], 0(%[r5]) \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "srav %[r0], %[r0], %[max] \n\t" - "srav %[r1], %[r1], %[max] \n\t" - "srav %[r2], %[r2], %[max] \n\t" - "srav %[r3], %[r3], %[max] \n\t" - "addu %[r4], %[r0], %[r2] \n\t" - "subu %[r0], %[r2], %[r0] \n\t" - "subu %[r2], %[r1], %[r3] \n\t" - "addu %[r1], %[r1], %[r3] \n\t" - "lh %[r3], 0(%[cosptr]) \n\t" - "lh %[r5], 0(%[sinptr]) \n\t" - "andi %[r6], %[r4], 0xFFFF \n\t" - "sra %[r4], %[r4], 16 \n\t" - "mul %[r7], %[r3], %[r6] \n\t" - "mul %[r8], %[r3], %[r4] \n\t" - "mul %[r6], %[r5], %[r6] \n\t" - "mul %[r4], %[r5], %[r4] \n\t" - "addiu %[k], %[k], -1 \n\t" - "addiu %[inre1], %[inre1], 2 \n\t" - "addiu %[inre2], %[inre2], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r7], %[r7], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r7], %[r7], 0x2000 \n\t" - "sra %[r7], %[r7], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r8], %[r8], 2 \n\t" - "addu %[r8], %[r8], %[r7] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r6], %[r6], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r6], %[r6], 0x2000 \n\t" - "sra %[r6], %[r6], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r4], %[r4], 2 \n\t" - "addu %[r4], %[r4], %[r6] \n\t" - "andi %[r6], %[r2], 0xFFFF \n\t" - "sra %[r2], %[r2], 16 \n\t" - "mul %[r7], %[r5], %[r6] \n\t" - "mul %[r9], %[r5], %[r2] \n\t" - "mul %[r6], %[r3], %[r6] \n\t" - "mul %[r2], %[r3], %[r2] \n\t" - "addiu %[cosptr], %[cosptr], 2 \n\t" - "addiu %[sinptr], %[sinptr], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r7], %[r7], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r7], %[r7], 0x2000 \n\t" - "sra %[r7], %[r7], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r9], %[r9], 2 \n\t" - "addu %[r9], %[r7], %[r9] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r6], %[r6], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r6], %[r6], 0x2000 \n\t" - "sra %[r6], %[r6], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r2], %[r2], 2 \n\t" - "addu %[r2], %[r6], %[r2] \n\t" - "subu %[r8], %[r8], %[r9] \n\t" - "sra %[r8], %[r8], 9 \n\t" - "addu %[r2], %[r4], %[r2] \n\t" - "sra %[r2], %[r2], 9 \n\t" - "sh %[r8], 0(%[tmpre]) \n\t" - "sh %[r2], 0(%[tmpim]) \n\t" - - "andi %[r4], %[r1], 0xFFFF \n\t" - "sra %[r1], %[r1], 16 \n\t" - "andi %[r6], %[r0], 0xFFFF \n\t" - "sra %[r0], %[r0], 16 \n\t" - "mul %[r7], %[r5], %[r4] \n\t" - "mul %[r9], %[r5], %[r1] \n\t" - "mul %[r4], %[r3], %[r4] \n\t" - "mul %[r1], %[r3], %[r1] \n\t" - "mul %[r8], %[r3], %[r0] \n\t" - "mul %[r3], %[r3], %[r6] \n\t" - "mul %[r6], %[r5], %[r6] \n\t" - "mul %[r0], %[r5], %[r0] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r7], %[r7], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r7], %[r7], 0x2000 \n\t" - "sra %[r7], %[r7], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r9], %[r9], 2 \n\t" - "addu %[r9], %[r9], %[r7] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r4], %[r4], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r4], %[r4], 0x2000 \n\t" - "sra %[r4], %[r4], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r1], %[r1], 2 \n\t" - "addu %[r1], %[r1], %[r4] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r3], %[r3], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r3], %[r3], 0x2000 \n\t" - "sra %[r3], %[r3], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r8], %[r8], 2 \n\t" - "addu %[r8], %[r8], %[r3] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r6], %[r6], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r6], %[r6], 0x2000 \n\t" - "sra %[r6], %[r6], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r0], %[r0], 2 \n\t" - "addu %[r0], %[r0], %[r6] \n\t" - "addu %[r3], %[tmpre], %[offset] \n\t" - "addu %[r2], %[tmpim], %[offset] \n\t" - "addu %[r9], %[r9], %[r8] \n\t" - "negu %[r9], %[r9] \n\t" - "sra %[r9], %[r9], 9 \n\t" - "subu %[r0], %[r0], %[r1] \n\t" - "addiu %[offset], %[offset], -4 \n\t" - "sh %[r9], 0(%[r3]) \n\t" - "sh %[r0], 0(%[r2]) \n\t" - "addiu %[tmpre], %[tmpre], 2 \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[tmpim], %[tmpim], 2 \n\t" - "b 3f \n\t" - " nop \n\t" - "2: \n\t" -#if !defined(MIPS_DSP_R1_LE) - "addu %[r4], %[inre1], %[offset] \n\t" - "addu %[r5], %[inre2], %[offset] \n\t" -#endif // #if !defined(MIPS_DSP_R1_LE) - "lh %[r0], 0(%[inre1]) \n\t" - "lh %[r1], 0(%[inre2]) \n\t" -#if defined(MIPS_DSP_R1_LE) - "lhx %[r2], %[offset](%[inre1]) \n\t" - "lhx %[r3], %[offset](%[inre2]) \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "lh %[r2], 0(%[r4]) \n\t" - "lh %[r3], 0(%[r5]) \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sllv %[r0], %[r0], %[max1] \n\t" - "sllv %[r1], %[r1], %[max1] \n\t" - "sllv %[r2], %[r2], %[max1] \n\t" - "sllv %[r3], %[r3], %[max1] \n\t" - "addu %[r4], %[r0], %[r2] \n\t" - "subu %[r0], %[r2], %[r0] \n\t" - "subu %[r2], %[r1], %[r3] \n\t" - "addu %[r1], %[r1], %[r3] \n\t" - "lh %[r3], 0(%[cosptr]) \n\t" - "lh %[r5], 0(%[sinptr]) \n\t" - "andi %[r6], %[r4], 0xFFFF \n\t" - "sra %[r4], %[r4], 16 \n\t" - "mul %[r7], %[r3], %[r6] \n\t" - "mul %[r8], %[r3], %[r4] \n\t" - "mul %[r6], %[r5], %[r6] \n\t" - "mul %[r4], %[r5], %[r4] \n\t" - "addiu %[k], %[k], -1 \n\t" - "addiu %[inre1], %[inre1], 2 \n\t" - "addiu %[inre2], %[inre2], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r7], %[r7], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r7], %[r7], 0x2000 \n\t" - "sra %[r7], %[r7], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r8], %[r8], 2 \n\t" - "addu %[r8], %[r8], %[r7] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r6], %[r6], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r6], %[r6], 0x2000 \n\t" - "sra %[r6], %[r6], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r4], %[r4], 2 \n\t" - "addu %[r4], %[r4], %[r6] \n\t" - "andi %[r6], %[r2], 0xFFFF \n\t" - "sra %[r2], %[r2], 16 \n\t" - "mul %[r7], %[r5], %[r6] \n\t" - "mul %[r9], %[r5], %[r2] \n\t" - "mul %[r6], %[r3], %[r6] \n\t" - "mul %[r2], %[r3], %[r2] \n\t" - "addiu %[cosptr], %[cosptr], 2 \n\t" - "addiu %[sinptr], %[sinptr], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r7], %[r7], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r7], %[r7], 0x2000 \n\t" - "sra %[r7], %[r7], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r9], %[r9], 2 \n\t" - "addu %[r9], %[r7], %[r9] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r6], %[r6], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r6], %[r6], 0x2000 \n\t" - "sra %[r6], %[r6], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r2], %[r2], 2 \n\t" - "addu %[r2], %[r6], %[r2] \n\t" - "subu %[r8], %[r8], %[r9] \n\t" - "sra %[r8], %[r8], 9 \n\t" - "addu %[r2], %[r4], %[r2] \n\t" - "sra %[r2], %[r2], 9 \n\t" - "sh %[r8], 0(%[tmpre]) \n\t" - "sh %[r2], 0(%[tmpim]) \n\t" - "andi %[r4], %[r1], 0xFFFF \n\t" - "sra %[r1], %[r1], 16 \n\t" - "andi %[r6], %[r0], 0xFFFF \n\t" - "sra %[r0], %[r0], 16 \n\t" - "mul %[r7], %[r5], %[r4] \n\t" - "mul %[r9], %[r5], %[r1] \n\t" - "mul %[r4], %[r3], %[r4] \n\t" - "mul %[r1], %[r3], %[r1] \n\t" - "mul %[r8], %[r3], %[r0] \n\t" - "mul %[r3], %[r3], %[r6] \n\t" - "mul %[r6], %[r5], %[r6] \n\t" - "mul %[r0], %[r5], %[r0] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r7], %[r7], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r7], %[r7], 0x2000 \n\t" - "sra %[r7], %[r7], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r9], %[r9], 2 \n\t" - "addu %[r9], %[r9], %[r7] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r4], %[r4], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r4], %[r4], 0x2000 \n\t" - "sra %[r4], %[r4], 14 \n\t" -#endif - "sll %[r1], %[r1], 2 \n\t" - "addu %[r1], %[r1], %[r4] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r3], %[r3], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r3], %[r3], 0x2000 \n\t" - "sra %[r3], %[r3], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r8], %[r8], 2 \n\t" - "addu %[r8], %[r8], %[r3] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r6], %[r6], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r6], %[r6], 0x2000 \n\t" - "sra %[r6], %[r6], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r0], %[r0], 2 \n\t" - "addu %[r0], %[r0], %[r6] \n\t" - "addu %[r3], %[tmpre], %[offset] \n\t" - "addu %[r2], %[tmpim], %[offset] \n\t" - "addu %[r9], %[r9], %[r8] \n\t" - "negu %[r9], %[r9] \n\t" - "sra %[r9], %[r9], 9 \n\t" - "subu %[r0], %[r0], %[r1] \n\t" - "sra %[r0], %[r0], 9 \n\t" - "addiu %[offset], %[offset], -4 \n\t" - "sh %[r9], 0(%[r3]) \n\t" - "sh %[r0], 0(%[r2]) \n\t" - "addiu %[tmpre], %[tmpre], 2 \n\t" - "bgtz %[k], 2b \n\t" - " addiu %[tmpim], %[tmpim], 2 \n\t" - "3: \n\t" - ".set pop \n\t" - : [inre1] "=&r" (inre1), [inre2] "=&r" (inre2), [tmpre] "=&r" (tmpre), - [tmpim] "=&r" (tmpim), [offset] "+r" (factor), [k] "+r" (k), - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7), - [r8] "=&r" (r8), [r9] "=&r" (r9), [max1] "=&r" (max1) - : [inre1Q9] "r" (inre1Q9), [inre2Q9] "r" (inre2Q9), - [outreQ7] "r" (outreQ7), [outimQ7] "r" (outimQ7), - [max] "r" (max), [cosptr] "r" (cosptr), [sinptr] "r" (sinptr) - : "hi", "lo", "memory" - ); -} - -void WebRtcIsacfix_Spec2TimeMIPS(int16_t *inreQ7, - int16_t *inimQ7, - int32_t *outre1Q16, - int32_t *outre2Q16) { - int k = FRAMESAMPLES / 4; - int16_t* inre; - int16_t* inim; - int32_t* outre1; - int32_t* outre2; - int16_t* cosptr = (int16_t*)WebRtcIsacfix_kCosTab2; - int16_t* sinptr = (int16_t*)WebRtcIsacfix_kSinTab2; - int32_t r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, max, max1; -#if defined(MIPS_DSP_R1_LE) - int32_t offset = FRAMESAMPLES - 4; -#else // #if defined(MIPS_DSP_R1_LE) - int32_t offset = FRAMESAMPLES - 2; -#endif // #if defined(MIPS_DSP_R1_LE) - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[inre], %[inreQ7], 0 \n\t" - "addiu %[inim] , %[inimQ7], 0 \n\t" - "addiu %[outre1], %[outre1Q16], 0 \n\t" - "addiu %[outre2], %[outre2Q16], 0 \n\t" - "mul %[max], $zero, $zero \n\t" - "1: \n\t" -#if defined(MIPS_DSP_R1_LE) - // Process two samples in one iteration avoiding left shift before - // multiplication. MaxAbsValueW32 function inlined into the loop. - "addu %[r8], %[inre], %[offset] \n\t" - "addu %[r9], %[inim], %[offset] \n\t" - "lwl %[r4], 0(%[r8]) \n\t" - "lwl %[r5], 0(%[r9]) \n\t" - "lwl %[r0], 0(%[inre]) \n\t" - "lwl %[r1], 0(%[inim]) \n\t" - "lwl %[r2], 0(%[cosptr]) \n\t" - "lwl %[r3], 0(%[sinptr]) \n\t" - "lwr %[r4], 0(%[r8]) \n\t" - "lwr %[r5], 0(%[r9]) \n\t" - "lwr %[r0], 0(%[inre]) \n\t" - "lwr %[r1], 0(%[inim]) \n\t" - "lwr %[r2], 0(%[cosptr]) \n\t" - "lwr %[r3], 0(%[sinptr]) \n\t" - "packrl.ph %[r4], %[r4], %[r4] \n\t" - "packrl.ph %[r5], %[r5], %[r5] \n\t" - "muleq_s.w.phr %[r6], %[r0], %[r2] \n\t" - "muleq_s.w.phr %[r7], %[r1], %[r3] \n\t" - "muleq_s.w.phr %[r8], %[r4], %[r2] \n\t" - "muleq_s.w.phr %[r9], %[r5], %[r3] \n\t" - "addiu %[k], %[k], -2 \n\t" - "addiu %[cosptr], %[cosptr], 4 \n\t" - "addiu %[sinptr], %[sinptr], 4 \n\t" - "addiu %[inre], %[inre], 4 \n\t" - "addiu %[inim], %[inim], 4 \n\t" - "shra_r.w %[r6], %[r6], 6 \n\t" - "shra_r.w %[r7], %[r7], 6 \n\t" - "shra_r.w %[r8], %[r8], 6 \n\t" - "shra_r.w %[r9], %[r9], 6 \n\t" - "addu %[r6], %[r6], %[r7] \n\t" - "subu %[r9], %[r9], %[r8] \n\t" - "subu %[r7], %[r6], %[r9] \n\t" - "addu %[r6], %[r6], %[r9] \n\t" - "sw %[r7], 0(%[outre1]) \n\t" - "absq_s.w %[r7], %[r7] \n\t" - "slt %[r8], %[max], %[r7] \n\t" - "movn %[max], %[r7], %[r8] \n\t" - "sll %[r7], %[offset], 1 \n\t" - "addu %[r7], %[outre1], %[r7] \n\t" - "sw %[r6], 4(%[r7]) \n\t" - "absq_s.w %[r6], %[r6] \n\t" - "slt %[r8], %[max], %[r6] \n\t" - "movn %[max], %[r6], %[r8] \n\t" - "muleq_s.w.phl %[r6], %[r0], %[r2] \n\t" - "muleq_s.w.phl %[r7], %[r1], %[r3] \n\t" - "muleq_s.w.phl %[r8], %[r4], %[r2] \n\t" - "muleq_s.w.phl %[r9], %[r5], %[r3] \n\t" - "shra_r.w %[r6], %[r6], 6 \n\t" - "shra_r.w %[r7], %[r7], 6 \n\t" - "shra_r.w %[r8], %[r8], 6 \n\t" - "shra_r.w %[r9], %[r9], 6 \n\t" - "addu %[r6], %[r6], %[r7] \n\t" - "subu %[r9], %[r9], %[r8] \n\t" - "subu %[r7], %[r6], %[r9] \n\t" - "addu %[r6], %[r6], %[r9] \n\t" - "sw %[r7], 4(%[outre1]) \n\t" - "absq_s.w %[r7], %[r7] \n\t" - "slt %[r8], %[max], %[r7] \n\t" - "movn %[max], %[r7], %[r8] \n\t" - "sll %[r7], %[offset], 1 \n\t" - "addu %[r7], %[outre1], %[r7] \n\t" - "sw %[r6], 0(%[r7]) \n\t" - "absq_s.w %[r6], %[r6] \n\t" - "slt %[r8], %[max], %[r6] \n\t" - "movn %[max], %[r6], %[r8] \n\t" - "muleq_s.w.phr %[r6], %[r1], %[r2] \n\t" - "muleq_s.w.phr %[r7], %[r0], %[r3] \n\t" - "muleq_s.w.phr %[r8], %[r5], %[r2] \n\t" - "muleq_s.w.phr %[r9], %[r4], %[r3] \n\t" - "addiu %[outre1], %[outre1], 8 \n\t" - "shra_r.w %[r6], %[r6], 6 \n\t" - "shra_r.w %[r7], %[r7], 6 \n\t" - "shra_r.w %[r8], %[r8], 6 \n\t" - "shra_r.w %[r9], %[r9], 6 \n\t" - "subu %[r6], %[r6], %[r7] \n\t" - "addu %[r9], %[r9], %[r8] \n\t" - "subu %[r7], %[r6], %[r9] \n\t" - "addu %[r6], %[r9], %[r6] \n\t" - "negu %[r6], %[r6] \n\t" - "sw %[r7], 0(%[outre2]) \n\t" - "absq_s.w %[r7], %[r7] \n\t" - "slt %[r8], %[max], %[r7] \n\t" - "movn %[max], %[r7], %[r8] \n\t" - "sll %[r7], %[offset], 1 \n\t" - "addu %[r7], %[outre2], %[r7] \n\t" - "sw %[r6], 4(%[r7]) \n\t" - "absq_s.w %[r6], %[r6] \n\t" - "slt %[r8], %[max], %[r6] \n\t" - "movn %[max], %[r6], %[r8] \n\t" - "muleq_s.w.phl %[r6], %[r1], %[r2] \n\t" - "muleq_s.w.phl %[r7], %[r0], %[r3] \n\t" - "muleq_s.w.phl %[r8], %[r5], %[r2] \n\t" - "muleq_s.w.phl %[r9], %[r4], %[r3] \n\t" - "addiu %[offset], %[offset], -8 \n\t" - "shra_r.w %[r6], %[r6], 6 \n\t" - "shra_r.w %[r7], %[r7], 6 \n\t" - "shra_r.w %[r8], %[r8], 6 \n\t" - "shra_r.w %[r9], %[r9], 6 \n\t" - "subu %[r6], %[r6], %[r7] \n\t" - "addu %[r9], %[r9], %[r8] \n\t" - "subu %[r7], %[r6], %[r9] \n\t" - "addu %[r6], %[r9], %[r6] \n\t" - "negu %[r6], %[r6] \n\t" - "sw %[r7], 4(%[outre2]) \n\t" - "absq_s.w %[r7], %[r7] \n\t" - "slt %[r8], %[max], %[r7] \n\t" - "movn %[max], %[r7], %[r8] \n\t" - "sll %[r7], %[offset], 1 \n\t" - "addu %[r7], %[outre2], %[r7] \n\t" - "sw %[r6], 0(%[r7]) \n\t" - "absq_s.w %[r6], %[r6] \n\t" - "slt %[r8], %[max], %[r6] \n\t" - "movn %[max], %[r6], %[r8] \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[outre2], %[outre2], 8 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "lh %[r0], 0(%[inre]) \n\t" - "lh %[r1], 0(%[inim]) \n\t" - "lh %[r4], 0(%[cosptr]) \n\t" - "lh %[r5], 0(%[sinptr]) \n\t" - "addiu %[k], %[k], -1 \n\t" - "mul %[r2], %[r0], %[r4] \n\t" - "mul %[r0], %[r0], %[r5] \n\t" - "mul %[r3], %[r1], %[r5] \n\t" - "mul %[r1], %[r1], %[r4] \n\t" - "addiu %[cosptr], %[cosptr], 2 \n\t" - "addiu %[sinptr], %[sinptr], 2 \n\t" - "addu %[r8], %[inre], %[offset] \n\t" - "addu %[r9], %[inim], %[offset] \n\t" - "addiu %[r2], %[r2], 16 \n\t" - "sra %[r2], %[r2], 5 \n\t" - "addiu %[r0], %[r0], 16 \n\t" - "sra %[r0], %[r0], 5 \n\t" - "addiu %[r3], %[r3], 16 \n\t" - "sra %[r3], %[r3], 5 \n\t" - "lh %[r6], 0(%[r8]) \n\t" - "lh %[r7], 0(%[r9]) \n\t" - "addiu %[r1], %[r1], 16 \n\t" - "sra %[r1], %[r1], 5 \n\t" - "mul %[r8], %[r7], %[r4] \n\t" - "mul %[r7], %[r7], %[r5] \n\t" - "mul %[r9], %[r6], %[r4] \n\t" - "mul %[r6], %[r6], %[r5] \n\t" - "addu %[r2], %[r2], %[r3] \n\t" - "subu %[r1], %[r1], %[r0] \n\t" - "sll %[r0], %[offset], 1 \n\t" - "addu %[r4], %[outre1], %[r0] \n\t" - "addu %[r5], %[outre2], %[r0] \n\t" - "addiu %[r8], %[r8], 16 \n\t" - "sra %[r8], %[r8], 5 \n\t" - "addiu %[r7], %[r7], 16 \n\t" - "sra %[r7], %[r7], 5 \n\t" - "addiu %[r6], %[r6], 16 \n\t" - "sra %[r6], %[r6], 5 \n\t" - "addiu %[r9], %[r9], 16 \n\t" - "sra %[r9], %[r9], 5 \n\t" - "addu %[r8], %[r8], %[r6] \n\t" - "negu %[r8], %[r8] \n\t" - "subu %[r7], %[r7], %[r9] \n\t" - "subu %[r6], %[r2], %[r7] \n\t" - "addu %[r0], %[r2], %[r7] \n\t" - "addu %[r3], %[r1], %[r8] \n\t" - "subu %[r1], %[r8], %[r1] \n\t" - "sw %[r6], 0(%[outre1]) \n\t" - "sw %[r0], 0(%[r4]) \n\t" - "sw %[r3], 0(%[outre2]) \n\t" - "sw %[r1], 0(%[r5]) \n\t" - "addiu %[outre1], %[outre1], 4 \n\t" - "addiu %[offset], %[offset], -4 \n\t" - "addiu %[inre], %[inre], 2 \n\t" - "addiu %[inim], %[inim], 2 \n\t" - // Inlined WebRtcSpl_MaxAbsValueW32 - "negu %[r5], %[r6] \n\t" - "slt %[r2], %[r6], $zero \n\t" - "movn %[r6], %[r5], %[r2] \n\t" - "negu %[r5], %[r0] \n\t" - "slt %[r2], %[r0], $zero \n\t" - "movn %[r0], %[r5], %[r2] \n\t" - "negu %[r5], %[r3] \n\t" - "slt %[r2], %[r3], $zero \n\t" - "movn %[r3], %[r5], %[r2] \n\t" - "negu %[r5], %[r1] \n\t" - "slt %[r2], %[r1], $zero \n\t" - "movn %[r1], %[r5], %[r2] \n\t" - "slt %[r2], %[r6], %[r0] \n\t" - "slt %[r5], %[r3], %[r1] \n\t" - "movn %[r6], %[r0], %[r2] \n\t" - "movn %[r3], %[r1], %[r5] \n\t" - "slt %[r2], %[r6], %[r3] \n\t" - "movn %[r6], %[r3], %[r2] \n\t" - "slt %[r2], %[max], %[r6] \n\t" - "movn %[max], %[r6], %[r2] \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[outre2], %[outre2], 4 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "clz %[max], %[max] \n\t" - "addiu %[max], %[max], -25 \n\t" - ".set pop \n\t" - : [inre] "=&r" (inre), [inim] "=&r" (inim), - [outre1] "=&r" (outre1), [outre2] "=&r" (outre2), - [offset] "+r" (offset), [k] "+r" (k), [r0] "=&r" (r0), - [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), - [r7] "=&r" (r7), [r8] "=&r" (r8), [r9] "=&r" (r9), - [max] "=&r" (max) - : [inreQ7] "r" (inreQ7), [inimQ7] "r" (inimQ7), - [cosptr] "r" (cosptr), [sinptr] "r" (sinptr), - [outre1Q16] "r" (outre1Q16), [outre2Q16] "r" (outre2Q16) - : "hi", "lo", "memory" - ); - - // "Fastest" vectors - k = FRAMESAMPLES / 4; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[inre], %[inreQ7], 0 \n\t" - "addiu %[inim], %[inimQ7], 0 \n\t" - "addiu %[outre1], %[outre1Q16], 0 \n\t" - "addiu %[outre2], %[outre2Q16], 0 \n\t" - "bltz %[max], 2f \n\t" - " subu %[max1], $zero, %[max] \n\t" - "1: \n\t" - "lw %[r0], 0(%[outre1]) \n\t" - "lw %[r1], 0(%[outre2]) \n\t" - "lw %[r2], 4(%[outre1]) \n\t" - "lw %[r3], 4(%[outre2]) \n\t" - "sllv %[r0], %[r0], %[max] \n\t" - "sllv %[r1], %[r1], %[max] \n\t" - "sllv %[r2], %[r2], %[max] \n\t" - "sllv %[r3], %[r3], %[max] \n\t" - "addiu %[k], %[k], -1 \n\t" - "addiu %[outre1], %[outre1], 8 \n\t" - "addiu %[outre2], %[outre2], 8 \n\t" - "sh %[r0], 0(%[inre]) \n\t" - "sh %[r1], 0(%[inim]) \n\t" - "sh %[r2], 2(%[inre]) \n\t" - "sh %[r3], 2(%[inim]) \n\t" - "addiu %[inre], %[inre], 4 \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[inim], %[inim], 4 \n\t" - "b 4f \n\t" - " nop \n\t" - "2: \n\t" -#if !defined(MIPS_DSP_R1_LE) - "addiu %[r4], $zero, 1 \n\t" - "addiu %[r5], %[max1], -1 \n\t" - "sllv %[r4], %[r4], %[r5] \n\t" -#endif // #if !defined(MIPS_DSP_R1_LE) - "3: \n\t" - "lw %[r0], 0(%[outre1]) \n\t" - "lw %[r1], 0(%[outre2]) \n\t" - "lw %[r2], 4(%[outre1]) \n\t" - "lw %[r3], 4(%[outre2]) \n\t" -#if defined(MIPS_DSP_R1_LE) - "shrav_r.w %[r0], %[r0], %[max1] \n\t" - "shrav_r.w %[r1], %[r1], %[max1] \n\t" - "shrav_r.w %[r2], %[r2], %[max1] \n\t" - "shrav_r.w %[r3], %[r3], %[max1] \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r4] \n\t" - "addu %[r1], %[r1], %[r4] \n\t" - "addu %[r2], %[r2], %[r4] \n\t" - "addu %[r3], %[r3], %[r4] \n\t" - "srav %[r0], %[r0], %[max1] \n\t" - "srav %[r1], %[r1], %[max1] \n\t" - "srav %[r2], %[r2], %[max1] \n\t" - "srav %[r3], %[r3], %[max1] \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addiu %[outre1], %[outre1], 8 \n\t" - "addiu %[outre2], %[outre2], 8 \n\t" - "sh %[r0], 0(%[inre]) \n\t" - "sh %[r1], 0(%[inim]) \n\t" - "sh %[r2], 2(%[inre]) \n\t" - "sh %[r3], 2(%[inim]) \n\t" - "addiu %[k], %[k], -1 \n\t" - "addiu %[inre], %[inre], 4 \n\t" - "bgtz %[k], 3b \n\t" - " addiu %[inim], %[inim], 4 \n\t" - "4: \n\t" - ".set pop \n\t" - : [k] "+r" (k), [max1] "=&r" (max1), [r0] "=&r" (r0), - [inre] "=&r" (inre), [inim] "=&r" (inim), - [outre1] "=&r" (outre1), [outre2] "=&r" (outre2), -#if !defined(MIPS_DSP_R1_LE) - [r4] "=&r" (r4), [r5] "=&r" (r5), -#endif // #if !defined(MIPS_DSP_R1_LE) - [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3) - : [max] "r" (max), [inreQ7] "r" (inreQ7), - [inimQ7] "r" (inimQ7), [outre1Q16] "r" (outre1Q16), - [outre2Q16] "r" (outre2Q16) - : "memory" - ); - - WebRtcIsacfix_FftRadix16Fastest(inreQ7, inimQ7, 1); // real call - - // All the remaining processing is done inside a single loop to avoid - // unnecessary memory accesses. MIPS DSPr2 version processes two samples - // at a time. - cosptr = (int16_t*)WebRtcIsacfix_kCosTab1; - sinptr = (int16_t*)WebRtcIsacfix_kSinTab1; - k = FRAMESAMPLES / 2; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[inre], %[inreQ7], 0 \n\t" - "addiu %[inim], %[inimQ7], 0 \n\t" - "addiu %[outre1], %[outre1Q16], 0 \n\t" - "addiu %[outre2], %[outre2Q16], 0 \n\t" - "addiu %[r4], $zero, 273 \n\t" - "addiu %[r5], $zero, 31727 \n\t" -#if defined(MIPS_DSP_R2_LE) - "addiu %[max], %[max], 16 \n\t" - "replv.ph %[r4], %[r4] \n\t" -#endif // #if defined(MIPS_DSP_R2_LE) - "bltz %[max], 2f \n\t" - " subu %[max1], $zero, %[max] \n\t" -#if defined(MIPS_DSP_R2_LE) - "addiu %[max], %[max], 1 \n\t" -#endif // #if defined(MIPS_DSP_R2_LE) - "1: \n\t" -#if defined(MIPS_DSP_R2_LE) - "lwl %[r0], 0(%[inre]) \n\t" - "lwl %[r1], 0(%[inim]) \n\t" - "lh %[r2], 0(%[cosptr]) \n\t" - "lwr %[r0], 0(%[inre]) \n\t" - "lwr %[r1], 0(%[inim]) \n\t" - "lh %[r3], 0(%[sinptr]) \n\t" - "muleq_s.w.phr %[r6], %[r0], %[r4] \n\t" - "muleq_s.w.phr %[r7], %[r1], %[r4] \n\t" - "muleq_s.w.phl %[r0], %[r0], %[r4] \n\t" - "muleq_s.w.phl %[r1], %[r1], %[r4] \n\t" - "addiu %[k], %[k], -2 \n\t" - "addiu %[inre], %[inre], 4 \n\t" - "addiu %[inim], %[inim], 4 \n\t" - "shrav_r.w %[r6], %[r6], %[max] \n\t" - "shrav_r.w %[r7], %[r7], %[max] \n\t" - "mult $ac0, %[r2], %[r6] \n\t" - "mult $ac1, %[r3], %[r7] \n\t" - "mult $ac2, %[r2], %[r7] \n\t" - "mult $ac3, %[r3], %[r6] \n\t" - "lh %[r2], 2(%[cosptr]) \n\t" - "lh %[r3], 2(%[sinptr]) \n\t" - "extr_r.w %[r6], $ac0, 14 \n\t" - "extr_r.w %[r7], $ac1, 14 \n\t" - "extr_r.w %[r8], $ac2, 14 \n\t" - "extr_r.w %[r9], $ac3, 14 \n\t" - "shrav_r.w %[r0], %[r0], %[max] \n\t" - "shrav_r.w %[r1], %[r1], %[max] \n\t" - "mult $ac0, %[r2], %[r0] \n\t" - "mult $ac1, %[r3], %[r1] \n\t" - "mult $ac2, %[r2], %[r1] \n\t" - "mult $ac3, %[r3], %[r0] \n\t" - "addiu %[cosptr], %[cosptr], 4 \n\t" - "extr_r.w %[r0], $ac0, 14 \n\t" - "extr_r.w %[r1], $ac1, 14 \n\t" - "extr_r.w %[r2], $ac2, 14 \n\t" - "extr_r.w %[r3], $ac3, 14 \n\t" - "subu %[r6], %[r6], %[r7] \n\t" - "addu %[r8], %[r8], %[r9] \n\t" - "mult $ac0, %[r5], %[r6] \n\t" - "mult $ac1, %[r5], %[r8] \n\t" - "addiu %[sinptr], %[sinptr], 4 \n\t" - "subu %[r0], %[r0], %[r1] \n\t" - "addu %[r2], %[r2], %[r3] \n\t" - "extr_r.w %[r1], $ac0, 11 \n\t" - "extr_r.w %[r3], $ac1, 11 \n\t" - "mult $ac2, %[r5], %[r0] \n\t" - "mult $ac3, %[r5], %[r2] \n\t" - "sw %[r1], 0(%[outre1]) \n\t" - "sw %[r3], 0(%[outre2]) \n\t" - "addiu %[outre1], %[outre1], 8 \n\t" - "extr_r.w %[r0], $ac2, 11 \n\t" - "extr_r.w %[r2], $ac3, 11 \n\t" - "sw %[r0], -4(%[outre1]) \n\t" - "sw %[r2], 4(%[outre2]) \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[outre2], %[outre2], 8 \n\t" - "b 3f \n\t" -#else // #if defined(MIPS_DSP_R2_LE) - "lh %[r0], 0(%[inre]) \n\t" - "lh %[r1], 0(%[inim]) \n\t" - "addiu %[k], %[k], -1 \n\t" - "srav %[r0], %[r0], %[max] \n\t" - "srav %[r1], %[r1], %[max] \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "sra %[r0], %[r0], 1 \n\t" - "sra %[r3], %[r1], 16 \n\t" - "andi %[r1], %[r1], 0xFFFF \n\t" - "sra %[r1], %[r1], 1 \n\t" - "mul %[r2], %[r2], %[r4] \n\t" - "mul %[r0], %[r0], %[r4] \n\t" - "mul %[r3], %[r3], %[r4] \n\t" - "mul %[r1], %[r1], %[r4] \n\t" - "addiu %[inre], %[inre], 2 \n\t" - "addiu %[inim], %[inim], 2 \n\t" - "lh %[r6], 0(%[cosptr]) \n\t" - "lh %[r7], 0(%[sinptr]) \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r0], %[r0], 15 \n\t" - "shra_r.w %[r1], %[r1], 15 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r0], %[r0], 0x4000 \n\t" - "addiu %[r1], %[r1], 0x4000 \n\t" - "sra %[r0], %[r0], 15 \n\t" - "sra %[r1], %[r1], 15 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r2], %[r0] \n\t" - "addu %[r1], %[r3], %[r1] \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "mul %[r9], %[r2], %[r6] \n\t" - "mul %[r2], %[r2], %[r7] \n\t" - "mul %[r8], %[r0], %[r6] \n\t" - "mul %[r0], %[r0], %[r7] \n\t" - "sra %[r3], %[r3], 16 \n\t" - "andi %[r1], %[r1], 0xFFFF \n\t" - "sll %[r9], %[r9], 2 \n\t" - "sll %[r2], %[r2], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r8], %[r8], 14 \n\t" - "shra_r.w %[r0], %[r0], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r8], %[r8], 0x2000 \n\t" - "addiu %[r0], %[r0], 0x2000 \n\t" - "sra %[r8], %[r8], 14 \n\t" - "sra %[r0], %[r0], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r9], %[r9], %[r8] \n\t" - "addu %[r2], %[r2], %[r0] \n\t" - "mul %[r0], %[r3], %[r6] \n\t" - "mul %[r3], %[r3], %[r7] \n\t" - "mul %[r8], %[r1], %[r6] \n\t" - "mul %[r1], %[r1], %[r8] \n\t" - "addiu %[cosptr], %[cosptr], 2 \n\t" - "addiu %[sinptr], %[sinptr], 2 \n\t" - "sll %[r0], %[r0], 2 \n\t" - "sll %[r3], %[r3], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r8], %[r8], 14 \n\t" - "shra_r.w %[r1], %[r1], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r8], %[r8], 0x2000 \n\t" - "addiu %[r1], %[r1], 0x2000 \n\t" - "sra %[r8], %[r8], 14 \n\t" - "sra %[r1], %[r1], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r8] \n\t" - "addu %[r3], %[r3], %[r1] \n\t" - "subu %[r9], %[r9], %[r3] \n\t" - "addu %[r0], %[r0], %[r2] \n\t" - "sra %[r1], %[r9], 16 \n\t" - "andi %[r9], %[r9], 0xFFFF \n\t" - "mul %[r1], %[r1], %[r5] \n\t" - "mul %[r9], %[r9], %[r5] \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "mul %[r2], %[r2], %[r5] \n\t" - "mul %[r0], %[r0], %[r5] \n\t" - "sll %[r1], %[r1], 5 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r9], %[r9], 11 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r9], %[r9], 0x400 \n\t" - "sra %[r9], %[r9], 11 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r1], %[r1], %[r9] \n\t" - "sll %[r2], %[r2], 5 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r0], %[r0], 11 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r0], %[r0], 0x400 \n\t" - "sra %[r0], %[r0], 11 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r2] \n\t" - "sw %[r1], 0(%[outre1]) \n\t" - "addiu %[outre1], %[outre1], 4 \n\t" - "sw %[r0], 0(%[outre2]) \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[outre2], %[outre2], 4 \n\t" - "b 3f \n\t" - " nop \n\t" -#endif // #if defined(MIPS_DSP_R2_LE) - "2: \n\t" -#if defined(MIPS_DSP_R2_LE) - "addiu %[max1], %[max1], -1 \n\t" - "21: \n\t" - "lwl %[r0], 0(%[inre]) \n\t" - "lwl %[r1], 0(%[inim]) \n\t" - "lh %[r2], 0(%[cosptr]) \n\t" - "lwr %[r0], 0(%[inre]) \n\t" - "lwr %[r1], 0(%[inim]) \n\t" - "lh %[r3], 0(%[sinptr]) \n\t" - "muleq_s.w.phr %[r6], %[r0], %[r4] \n\t" - "muleq_s.w.phr %[r7], %[r1], %[r4] \n\t" - "muleq_s.w.phl %[r0], %[r0], %[r4] \n\t" - "muleq_s.w.phl %[r1], %[r1], %[r4] \n\t" - "addiu %[k], %[k], -2 \n\t" - "addiu %[inre], %[inre], 4 \n\t" - "addiu %[inim], %[inim], 4 \n\t" - "sllv %[r6], %[r6], %[max1] \n\t" - "sllv %[r7], %[r7], %[max1] \n\t" - "mult $ac0, %[r2], %[r6] \n\t" - "mult $ac1, %[r3], %[r7] \n\t" - "mult $ac2, %[r2], %[r7] \n\t" - "mult $ac3, %[r3], %[r6] \n\t" - "lh %[r2], 2(%[cosptr]) \n\t" - "lh %[r3], 2(%[sinptr]) \n\t" - "extr_r.w %[r6], $ac0, 14 \n\t" - "extr_r.w %[r7], $ac1, 14 \n\t" - "extr_r.w %[r8], $ac2, 14 \n\t" - "extr_r.w %[r9], $ac3, 14 \n\t" - "sllv %[r0], %[r0], %[max1] \n\t" - "sllv %[r1], %[r1], %[max1] \n\t" - "mult $ac0, %[r2], %[r0] \n\t" - "mult $ac1, %[r3], %[r1] \n\t" - "mult $ac2, %[r2], %[r1] \n\t" - "mult $ac3, %[r3], %[r0] \n\t" - "addiu %[cosptr], %[cosptr], 4 \n\t" - "extr_r.w %[r0], $ac0, 14 \n\t" - "extr_r.w %[r1], $ac1, 14 \n\t" - "extr_r.w %[r2], $ac2, 14 \n\t" - "extr_r.w %[r3], $ac3, 14 \n\t" - "subu %[r6], %[r6], %[r7] \n\t" - "addu %[r8], %[r8], %[r9] \n\t" - "mult $ac0, %[r5], %[r6] \n\t" - "mult $ac1, %[r5], %[r8] \n\t" - "addiu %[sinptr], %[sinptr], 4 \n\t" - "subu %[r0], %[r0], %[r1] \n\t" - "addu %[r2], %[r2], %[r3] \n\t" - "extr_r.w %[r1], $ac0, 11 \n\t" - "extr_r.w %[r3], $ac1, 11 \n\t" - "mult $ac2, %[r5], %[r0] \n\t" - "mult $ac3, %[r5], %[r2] \n\t" - "sw %[r1], 0(%[outre1]) \n\t" - "sw %[r3], 0(%[outre2]) \n\t" - "addiu %[outre1], %[outre1], 8 \n\t" - "extr_r.w %[r0], $ac2, 11 \n\t" - "extr_r.w %[r2], $ac3, 11 \n\t" - "sw %[r0], -4(%[outre1]) \n\t" - "sw %[r2], 4(%[outre2]) \n\t" - "bgtz %[k], 21b \n\t" - " addiu %[outre2], %[outre2], 8 \n\t" - "b 3f \n\t" - " nop \n\t" -#else // #if defined(MIPS_DSP_R2_LE) - "lh %[r0], 0(%[inre]) \n\t" - "lh %[r1], 0(%[inim]) \n\t" - "addiu %[k], %[k], -1 \n\t" - "sllv %[r0], %[r0], %[max1] \n\t" - "sllv %[r1], %[r1], %[max1] \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "sra %[r0], %[r0], 1 \n\t" - "sra %[r3], %[r1], 16 \n\t" - "andi %[r1], %[r1], 0xFFFF \n\t" - "sra %[r1], %[r1], 1 \n\t" - "mul %[r2], %[r2], %[r4] \n\t" - "mul %[r0], %[r0], %[r4] \n\t" - "mul %[r3], %[r3], %[r4] \n\t" - "mul %[r1], %[r1], %[r4] \n\t" - "addiu %[inre], %[inre], 2 \n\t" - "addiu %[inim], %[inim], 2 \n\t" - "lh %[r6], 0(%[cosptr]) \n\t" - "lh %[r7], 0(%[sinptr]) \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r0], %[r0], 15 \n\t" - "shra_r.w %[r1], %[r1], 15 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r0], %[r0], 0x4000 \n\t" - "addiu %[r1], %[r1], 0x4000 \n\t" - "sra %[r0], %[r0], 15 \n\t" - "sra %[r1], %[r1], 15 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r2], %[r0] \n\t" - "addu %[r1], %[r3], %[r1] \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "mul %[r9], %[r2], %[r6] \n\t" - "mul %[r2], %[r2], %[r7] \n\t" - "mul %[r8], %[r0], %[r6] \n\t" - "mul %[r0], %[r0], %[r7] \n\t" - "sra %[r3], %[r1], 16 \n\t" - "andi %[r1], %[r1], 0xFFFF \n\t" - "sll %[r9], %[r9], 2 \n\t" - "sll %[r2], %[r2], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r8], %[r8], 14 \n\t" - "shra_r.w %[r0], %[r0], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r8], %[r8], 0x2000 \n\t" - "addiu %[r0], %[r0], 0x2000 \n\t" - "sra %[r8], %[r8], 14 \n\t" - "sra %[r0], %[r0], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r9], %[r9], %[r8] \n\t" - "addu %[r2], %[r2], %[r0] \n\t" - "mul %[r0], %[r3], %[r6] \n\t" - "mul %[r3], %[r3], %[r7] \n\t" - "mul %[r8], %[r1], %[r6] \n\t" - "mul %[r1], %[r1], %[r7] \n\t" - "addiu %[cosptr], %[cosptr], 2 \n\t" - "addiu %[sinptr], %[sinptr], 2 \n\t" - "sll %[r0], %[r0], 2 \n\t" - "sll %[r3], %[r3], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r8], %[r8], 14 \n\t" - "shra_r.w %[r1], %[r1], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r8], %[r8], 0x2000 \n\t" - "addiu %[r1], %[r1], 0x2000 \n\t" - "sra %[r8], %[r8], 14 \n\t" - "sra %[r1], %[r1], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r8] \n\t" - "addu %[r3], %[r3], %[r1] \n\t" - "subu %[r9], %[r9], %[r3] \n\t" - "addu %[r0], %[r0], %[r2] \n\t" - "sra %[r1], %[r9], 16 \n\t" - "andi %[r9], %[r9], 0xFFFF \n\t" - "mul %[r1], %[r1], %[r5] \n\t" - "mul %[r9], %[r9], %[r5] \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "mul %[r2], %[r2], %[r5] \n\t" - "mul %[r0], %[r0], %[r5] \n\t" - "sll %[r1], %[r1], 5 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r9], %[r9], 11 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r9], %[r9], 0x400 \n\t" - "sra %[r9], %[r9], 11 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r1], %[r1], %[r9] \n\t" - "sll %[r2], %[r2], 5 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r0], %[r0], 11 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r0], %[r0], 0x400 \n\t" - "sra %[r0], %[r0], 11 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r2] \n\t" - "sw %[r1], 0(%[outre1]) \n\t" - "addiu %[outre1], %[outre1], 4 \n\t" - "sw %[r0], 0(%[outre2]) \n\t" - "bgtz %[k], 2b \n\t" - " addiu %[outre2], %[outre2], 4 \n\t" -#endif // #if defined(MIPS_DSP_R2_LE) - "3: \n\t" - ".set pop \n\t" - : [k] "+r" (k), [r0] "=&r" (r0), [r1] "=&r" (r1), - [r2] "=&r" (r2), [r3] "=&r" (r3), [r4] "=&r" (r4), - [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7), - [r8] "=&r" (r8), [r9] "=&r" (r9), [max1] "=&r" (max1), - [inre] "=&r" (inre), [inim] "=&r" (inim), - [outre1] "=&r" (outre1), [outre2] "=&r" (outre2) - : [max] "r" (max), [inreQ7] "r" (inreQ7), - [inimQ7] "r" (inimQ7), [cosptr] "r" (cosptr), - [sinptr] "r" (sinptr), [outre1Q16] "r" (outre1Q16), - [outre2Q16] "r" (outre2Q16) - : "hi", "lo", "memory" -#if defined(MIPS_DSP_R2_LE) - , "$ac1hi", "$ac1lo", "$ac2hi", "$ac2lo", "$ac3hi", "$ac3lo" -#endif // #if defined(MIPS_DSP_R2_LE) - ); -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.c deleted file mode 100644 index 79dadc46008d..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.c +++ /dev/null @@ -1,479 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/fft.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -// Tables are defined in transform_tables.c file. -// Cosine table 1 in Q14. -extern const int16_t WebRtcIsacfix_kCosTab1[FRAMESAMPLES/2]; -// Sine table 1 in Q14. -extern const int16_t WebRtcIsacfix_kSinTab1[FRAMESAMPLES/2]; -// Sine table 2 in Q14. -extern const int16_t WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4]; - -static inline int32_t ComplexMulAndFindMaxNeon(int16_t* inre1Q9, - int16_t* inre2Q9, - int32_t* outreQ16, - int32_t* outimQ16) { - int k; - const int16_t* kCosTab = &WebRtcIsacfix_kCosTab1[0]; - const int16_t* kSinTab = &WebRtcIsacfix_kSinTab1[0]; - // 0.5 / sqrt(240) in Q19 is round((.5 / sqrt(240)) * (2^19)) = 16921. - // Use "16921 << 5" and vqdmulh, instead of ">> 26" as in the C code. - int32_t fact = 16921 << 5; - int32x4_t factq = vdupq_n_s32(fact); - uint32x4_t max_r = vdupq_n_u32(0); - uint32x4_t max_i = vdupq_n_u32(0); - - for (k = 0; k < FRAMESAMPLES/2; k += 8) { - int16x8_t tmpr = vld1q_s16(kCosTab); - int16x8_t tmpi = vld1q_s16(kSinTab); - int16x8_t inre1 = vld1q_s16(inre1Q9); - int16x8_t inre2 = vld1q_s16(inre2Q9); - kCosTab += 8; - kSinTab += 8; - inre1Q9 += 8; - inre2Q9 += 8; - - // Use ">> 26", instead of ">> 7", ">> 16" and then ">> 3" as in the C code. - int32x4_t tmp0 = vmull_s16(vget_low_s16(tmpr), vget_low_s16(inre1)); - int32x4_t tmp1 = vmull_s16(vget_low_s16(tmpr), vget_low_s16(inre2)); - tmp0 = vmlal_s16(tmp0, vget_low_s16(tmpi), vget_low_s16(inre2)); - tmp1 = vmlsl_s16(tmp1, vget_low_s16(tmpi), vget_low_s16(inre1)); -#if defined(WEBRTC_ARCH_ARM64) - int32x4_t tmp2 = vmull_high_s16(tmpr, inre1); - int32x4_t tmp3 = vmull_high_s16(tmpr, inre2); - tmp2 = vmlal_high_s16(tmp2, tmpi, inre2); - tmp3 = vmlsl_high_s16(tmp3, tmpi, inre1); -#else - int32x4_t tmp2 = vmull_s16(vget_high_s16(tmpr), vget_high_s16(inre1)); - int32x4_t tmp3 = vmull_s16(vget_high_s16(tmpr), vget_high_s16(inre2)); - tmp2 = vmlal_s16(tmp2, vget_high_s16(tmpi), vget_high_s16(inre2)); - tmp3 = vmlsl_s16(tmp3, vget_high_s16(tmpi), vget_high_s16(inre1)); -#endif - - int32x4_t outr_0 = vqdmulhq_s32(tmp0, factq); - int32x4_t outr_1 = vqdmulhq_s32(tmp2, factq); - int32x4_t outi_0 = vqdmulhq_s32(tmp1, factq); - int32x4_t outi_1 = vqdmulhq_s32(tmp3, factq); - vst1q_s32(outreQ16, outr_0); - outreQ16 += 4; - vst1q_s32(outreQ16, outr_1); - outreQ16 += 4; - vst1q_s32(outimQ16, outi_0); - outimQ16 += 4; - vst1q_s32(outimQ16, outi_1); - outimQ16 += 4; - - // Find the absolute maximum in the vectors. - tmp0 = vabsq_s32(outr_0); - tmp1 = vabsq_s32(outr_1); - tmp2 = vabsq_s32(outi_0); - tmp3 = vabsq_s32(outi_1); - // vabs doesn't change the value of 0x80000000. - // Use u32 so we don't lose the value 0x80000000. - max_r = vmaxq_u32(max_r, vreinterpretq_u32_s32(tmp0)); - max_i = vmaxq_u32(max_i, vreinterpretq_u32_s32(tmp2)); - max_r = vmaxq_u32(max_r, vreinterpretq_u32_s32(tmp1)); - max_i = vmaxq_u32(max_i, vreinterpretq_u32_s32(tmp3)); - } - - max_r = vmaxq_u32(max_r, max_i); -#if defined(WEBRTC_ARCH_ARM64) - uint32_t maximum = vmaxvq_u32(max_r); -#else - uint32x2_t max32x2_r = vmax_u32(vget_low_u32(max_r), vget_high_u32(max_r)); - max32x2_r = vpmax_u32(max32x2_r, max32x2_r); - uint32_t maximum = vget_lane_u32(max32x2_r, 0); -#endif - - return (int32_t)maximum; -} - -static inline void PreShiftW32toW16Neon(int32_t* inre, - int32_t* inim, - int16_t* outre, - int16_t* outim, - int32_t sh) { - int k; - int32x4_t sh32x4 = vdupq_n_s32(sh); - for (k = 0; k < FRAMESAMPLES/2; k += 16) { - int32x4x4_t inre32x4x4 = vld4q_s32(inre); - int32x4x4_t inim32x4x4 = vld4q_s32(inim); - inre += 16; - inim += 16; - inre32x4x4.val[0] = vrshlq_s32(inre32x4x4.val[0], sh32x4); - inre32x4x4.val[1] = vrshlq_s32(inre32x4x4.val[1], sh32x4); - inre32x4x4.val[2] = vrshlq_s32(inre32x4x4.val[2], sh32x4); - inre32x4x4.val[3] = vrshlq_s32(inre32x4x4.val[3], sh32x4); - inim32x4x4.val[0] = vrshlq_s32(inim32x4x4.val[0], sh32x4); - inim32x4x4.val[1] = vrshlq_s32(inim32x4x4.val[1], sh32x4); - inim32x4x4.val[2] = vrshlq_s32(inim32x4x4.val[2], sh32x4); - inim32x4x4.val[3] = vrshlq_s32(inim32x4x4.val[3], sh32x4); - int16x4x4_t outre16x4x4; - int16x4x4_t outim16x4x4; - outre16x4x4.val[0] = vmovn_s32(inre32x4x4.val[0]); - outre16x4x4.val[1] = vmovn_s32(inre32x4x4.val[1]); - outre16x4x4.val[2] = vmovn_s32(inre32x4x4.val[2]); - outre16x4x4.val[3] = vmovn_s32(inre32x4x4.val[3]); - outim16x4x4.val[0] = vmovn_s32(inim32x4x4.val[0]); - outim16x4x4.val[1] = vmovn_s32(inim32x4x4.val[1]); - outim16x4x4.val[2] = vmovn_s32(inim32x4x4.val[2]); - outim16x4x4.val[3] = vmovn_s32(inim32x4x4.val[3]); - vst4_s16(outre, outre16x4x4); - vst4_s16(outim, outim16x4x4); - outre += 16; - outim += 16; - } -} - -static inline void PostShiftAndSeparateNeon(int16_t* inre, - int16_t* inim, - int16_t* outre, - int16_t* outim, - int32_t sh) { - int k; - int16_t* inre1 = inre; - int16_t* inre2 = &inre[FRAMESAMPLES/2 - 4]; - int16_t* inim1 = inim; - int16_t* inim2 = &inim[FRAMESAMPLES/2 - 4]; - int16_t* outre1 = outre; - int16_t* outre2 = &outre[FRAMESAMPLES/2 - 4]; - int16_t* outim1 = outim; - int16_t* outim2 = &outim[FRAMESAMPLES/2 - 4]; - const int16_t* kSinTab1 = &WebRtcIsacfix_kSinTab2[0]; - const int16_t* kSinTab2 = &WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4 -4]; - // By vshl, we effectively did "<< (-sh - 23)", instead of "<< (-sh)", - // ">> 14" and then ">> 9" as in the C code. - int32x4_t shift = vdupq_n_s32(-sh - 23); - - for (k = 0; k < FRAMESAMPLES/4; k += 4) { - int16x4_t tmpi = vld1_s16(kSinTab1); - kSinTab1 += 4; - int16x4_t tmpr = vld1_s16(kSinTab2); - kSinTab2 -= 4; - int16x4_t inre_0 = vld1_s16(inre1); - inre1 += 4; - int16x4_t inre_1 = vld1_s16(inre2); - inre2 -= 4; - int16x4_t inim_0 = vld1_s16(inim1); - inim1 += 4; - int16x4_t inim_1 = vld1_s16(inim2); - inim2 -= 4; - tmpr = vneg_s16(tmpr); - inre_1 = vrev64_s16(inre_1); - inim_1 = vrev64_s16(inim_1); - tmpr = vrev64_s16(tmpr); - - int16x4_t xr = vqadd_s16(inre_0, inre_1); - int16x4_t xi = vqsub_s16(inim_0, inim_1); - int16x4_t yr = vqadd_s16(inim_0, inim_1); - int16x4_t yi = vqsub_s16(inre_1, inre_0); - - int32x4_t outr0 = vmull_s16(tmpr, xr); - int32x4_t outi0 = vmull_s16(tmpi, xr); - int32x4_t outr1 = vmull_s16(tmpi, yr); - int32x4_t outi1 = vmull_s16(tmpi, yi); - outr0 = vmlsl_s16(outr0, tmpi, xi); - outi0 = vmlal_s16(outi0, tmpr, xi); - outr1 = vmlal_s16(outr1, tmpr, yi); - outi1 = vmlsl_s16(outi1, tmpr, yr); - - outr0 = vshlq_s32(outr0, shift); - outi0 = vshlq_s32(outi0, shift); - outr1 = vshlq_s32(outr1, shift); - outi1 = vshlq_s32(outi1, shift); - outr1 = vnegq_s32(outr1); - - int16x4_t outre_0 = vmovn_s32(outr0); - int16x4_t outim_0 = vmovn_s32(outi0); - int16x4_t outre_1 = vmovn_s32(outr1); - int16x4_t outim_1 = vmovn_s32(outi1); - outre_1 = vrev64_s16(outre_1); - outim_1 = vrev64_s16(outim_1); - - vst1_s16(outre1, outre_0); - outre1 += 4; - vst1_s16(outim1, outim_0); - outim1 += 4; - vst1_s16(outre2, outre_1); - outre2 -= 4; - vst1_s16(outim2, outim_1); - outim2 -= 4; - } -} - -void WebRtcIsacfix_Time2SpecNeon(int16_t* inre1Q9, - int16_t* inre2Q9, - int16_t* outreQ7, - int16_t* outimQ7) { - int32_t tmpreQ16[FRAMESAMPLES/2], tmpimQ16[FRAMESAMPLES/2]; - int32_t max; - int32_t sh; - - // Multiply with complex exponentials and combine into one complex vector. - // And find the maximum. - max = ComplexMulAndFindMaxNeon(inre1Q9, inre2Q9, tmpreQ16, tmpimQ16); - - sh = (int32_t)WebRtcSpl_NormW32(max); - sh = sh - 24; - - // If sh becomes >= 0, then we should shift sh steps to the left, - // and the domain will become Q(16 + sh). - // If sh becomes < 0, then we should shift -sh steps to the right, - // and the domain will become Q(16 + sh). - PreShiftW32toW16Neon(tmpreQ16, tmpimQ16, inre1Q9, inre2Q9, sh); - - // Get DFT. - WebRtcIsacfix_FftRadix16Fastest(inre1Q9, inre2Q9, -1); - - // If sh >= 0, shift sh steps to the right, - // If sh < 0, shift -sh steps to the left. - // Use symmetry to separate into two complex vectors - // and center frames in time around zero. - PostShiftAndSeparateNeon(inre1Q9, inre2Q9, outreQ7, outimQ7, sh); -} - -static inline int32_t TransformAndFindMaxNeon(int16_t* inre, - int16_t* inim, - int32_t* outre, - int32_t* outim) { - int k; - int16_t* inre1 = inre; - int16_t* inre2 = &inre[FRAMESAMPLES/2 - 4]; - int16_t* inim1 = inim; - int16_t* inim2 = &inim[FRAMESAMPLES/2 - 4]; - int32_t* outre1 = outre; - int32_t* outre2 = &outre[FRAMESAMPLES/2 - 4]; - int32_t* outim1 = outim; - int32_t* outim2 = &outim[FRAMESAMPLES/2 - 4]; - const int16_t* kSinTab1 = &WebRtcIsacfix_kSinTab2[0]; - const int16_t* kSinTab2 = &WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4 - 4]; - uint32x4_t max_r = vdupq_n_u32(0); - uint32x4_t max_i = vdupq_n_u32(0); - - // Use ">> 5", instead of "<< 9" and then ">> 14" as in the C code. - for (k = 0; k < FRAMESAMPLES/4; k += 4) { - int16x4_t tmpi = vld1_s16(kSinTab1); - kSinTab1 += 4; - int16x4_t tmpr = vld1_s16(kSinTab2); - kSinTab2 -= 4; - int16x4_t inre_0 = vld1_s16(inre1); - inre1 += 4; - int16x4_t inre_1 = vld1_s16(inre2); - inre2 -= 4; - int16x4_t inim_0 = vld1_s16(inim1); - inim1 += 4; - int16x4_t inim_1 = vld1_s16(inim2); - inim2 -= 4; - tmpr = vneg_s16(tmpr); - inre_1 = vrev64_s16(inre_1); - inim_1 = vrev64_s16(inim_1); - tmpr = vrev64_s16(tmpr); - - int32x4_t xr = vmull_s16(tmpr, inre_0); - int32x4_t xi = vmull_s16(tmpr, inim_0); - int32x4_t yr = vmull_s16(tmpr, inim_1); - int32x4_t yi = vmull_s16(tmpi, inim_1); - xr = vmlal_s16(xr, tmpi, inim_0); - xi = vmlsl_s16(xi, tmpi, inre_0); - yr = vmlal_s16(yr, tmpi, inre_1); - yi = vmlsl_s16(yi, tmpr, inre_1); - yr = vnegq_s32(yr); - - xr = vshrq_n_s32(xr, 5); - xi = vshrq_n_s32(xi, 5); - yr = vshrq_n_s32(yr, 5); - yi = vshrq_n_s32(yi, 5); - - int32x4_t outr0 = vsubq_s32(xr, yi); - int32x4_t outr1 = vaddq_s32(xr, yi); - int32x4_t outi0 = vaddq_s32(xi, yr); - int32x4_t outi1 = vsubq_s32(yr, xi); - - // Find the absolute maximum in the vectors. - int32x4_t tmp0 = vabsq_s32(outr0); - int32x4_t tmp1 = vabsq_s32(outr1); - int32x4_t tmp2 = vabsq_s32(outi0); - int32x4_t tmp3 = vabsq_s32(outi1); - // vabs doesn't change the value of 0x80000000. - // Use u32 so we don't lose the value 0x80000000. - max_r = vmaxq_u32(max_r, vreinterpretq_u32_s32(tmp0)); - max_i = vmaxq_u32(max_i, vreinterpretq_u32_s32(tmp2)); - max_r = vmaxq_u32(max_r, vreinterpretq_u32_s32(tmp1)); - max_i = vmaxq_u32(max_i, vreinterpretq_u32_s32(tmp3)); - - // Store the vectors. - outr1 = vrev64q_s32(outr1); - outi1 = vrev64q_s32(outi1); - int32x4_t outr_1 = vcombine_s32(vget_high_s32(outr1), vget_low_s32(outr1)); - int32x4_t outi_1 = vcombine_s32(vget_high_s32(outi1), vget_low_s32(outi1)); - - vst1q_s32(outre1, outr0); - outre1 += 4; - vst1q_s32(outim1, outi0); - outim1 += 4; - vst1q_s32(outre2, outr_1); - outre2 -= 4; - vst1q_s32(outim2, outi_1); - outim2 -= 4; - } - - max_r = vmaxq_u32(max_r, max_i); -#if defined(WEBRTC_ARCH_ARM64) - uint32_t maximum = vmaxvq_u32(max_r); -#else - uint32x2_t max32x2_r = vmax_u32(vget_low_u32(max_r), vget_high_u32(max_r)); - max32x2_r = vpmax_u32(max32x2_r, max32x2_r); - uint32_t maximum = vget_lane_u32(max32x2_r, 0); -#endif - - return (int32_t)maximum; -} - -static inline void PostShiftAndDivideAndDemodulateNeon(int16_t* inre, - int16_t* inim, - int32_t* outre1, - int32_t* outre2, - int32_t sh) { - int k; - int16_t* p_inre = inre; - int16_t* p_inim = inim; - int32_t* p_outre1 = outre1; - int32_t* p_outre2 = outre2; - const int16_t* kCosTab = &WebRtcIsacfix_kCosTab1[0]; - const int16_t* kSinTab = &WebRtcIsacfix_kSinTab1[0]; - int32x4_t shift = vdupq_n_s32(-sh - 16); - // Divide through by the normalizing constant: - // scale all values with 1/240, i.e. with 273 in Q16. - // 273/65536 ~= 0.0041656 - // 1/240 ~= 0.0041666 - int16x8_t scale = vdupq_n_s16(273); - // Sqrt(240) in Q11 is round(15.49193338482967 * 2048) = 31727. - int factQ19 = 31727 << 16; - int32x4_t fact = vdupq_n_s32(factQ19); - - for (k = 0; k < FRAMESAMPLES/2; k += 8) { - int16x8_t inre16x8 = vld1q_s16(p_inre); - int16x8_t inim16x8 = vld1q_s16(p_inim); - p_inre += 8; - p_inim += 8; - int16x8_t tmpr = vld1q_s16(kCosTab); - int16x8_t tmpi = vld1q_s16(kSinTab); - kCosTab += 8; - kSinTab += 8; - // By vshl and vmull, we effectively did "<< (-sh - 16)", - // instead of "<< (-sh)" and ">> 16" as in the C code. - int32x4_t outre1_0 = vmull_s16(vget_low_s16(inre16x8), vget_low_s16(scale)); - int32x4_t outre2_0 = vmull_s16(vget_low_s16(inim16x8), vget_low_s16(scale)); -#if defined(WEBRTC_ARCH_ARM64) - int32x4_t outre1_1 = vmull_high_s16(inre16x8, scale); - int32x4_t outre2_1 = vmull_high_s16(inim16x8, scale); -#else - int32x4_t outre1_1 = vmull_s16(vget_high_s16(inre16x8), - vget_high_s16(scale)); - int32x4_t outre2_1 = vmull_s16(vget_high_s16(inim16x8), - vget_high_s16(scale)); -#endif - - outre1_0 = vshlq_s32(outre1_0, shift); - outre1_1 = vshlq_s32(outre1_1, shift); - outre2_0 = vshlq_s32(outre2_0, shift); - outre2_1 = vshlq_s32(outre2_1, shift); - - // Demodulate and separate. - int32x4_t tmpr_0 = vmovl_s16(vget_low_s16(tmpr)); - int32x4_t tmpi_0 = vmovl_s16(vget_low_s16(tmpi)); -#if defined(WEBRTC_ARCH_ARM64) - int32x4_t tmpr_1 = vmovl_high_s16(tmpr); - int32x4_t tmpi_1 = vmovl_high_s16(tmpi); -#else - int32x4_t tmpr_1 = vmovl_s16(vget_high_s16(tmpr)); - int32x4_t tmpi_1 = vmovl_s16(vget_high_s16(tmpi)); -#endif - - int64x2_t xr0 = vmull_s32(vget_low_s32(tmpr_0), vget_low_s32(outre1_0)); - int64x2_t xi0 = vmull_s32(vget_low_s32(tmpr_0), vget_low_s32(outre2_0)); - int64x2_t xr2 = vmull_s32(vget_low_s32(tmpr_1), vget_low_s32(outre1_1)); - int64x2_t xi2 = vmull_s32(vget_low_s32(tmpr_1), vget_low_s32(outre2_1)); - xr0 = vmlsl_s32(xr0, vget_low_s32(tmpi_0), vget_low_s32(outre2_0)); - xi0 = vmlal_s32(xi0, vget_low_s32(tmpi_0), vget_low_s32(outre1_0)); - xr2 = vmlsl_s32(xr2, vget_low_s32(tmpi_1), vget_low_s32(outre2_1)); - xi2 = vmlal_s32(xi2, vget_low_s32(tmpi_1), vget_low_s32(outre1_1)); - -#if defined(WEBRTC_ARCH_ARM64) - int64x2_t xr1 = vmull_high_s32(tmpr_0, outre1_0); - int64x2_t xi1 = vmull_high_s32(tmpr_0, outre2_0); - int64x2_t xr3 = vmull_high_s32(tmpr_1, outre1_1); - int64x2_t xi3 = vmull_high_s32(tmpr_1, outre2_1); - xr1 = vmlsl_high_s32(xr1, tmpi_0, outre2_0); - xi1 = vmlal_high_s32(xi1, tmpi_0, outre1_0); - xr3 = vmlsl_high_s32(xr3, tmpi_1, outre2_1); - xi3 = vmlal_high_s32(xi3, tmpi_1, outre1_1); -#else - int64x2_t xr1 = vmull_s32(vget_high_s32(tmpr_0), vget_high_s32(outre1_0)); - int64x2_t xi1 = vmull_s32(vget_high_s32(tmpr_0), vget_high_s32(outre2_0)); - int64x2_t xr3 = vmull_s32(vget_high_s32(tmpr_1), vget_high_s32(outre1_1)); - int64x2_t xi3 = vmull_s32(vget_high_s32(tmpr_1), vget_high_s32(outre2_1)); - xr1 = vmlsl_s32(xr1, vget_high_s32(tmpi_0), vget_high_s32(outre2_0)); - xi1 = vmlal_s32(xi1, vget_high_s32(tmpi_0), vget_high_s32(outre1_0)); - xr3 = vmlsl_s32(xr3, vget_high_s32(tmpi_1), vget_high_s32(outre2_1)); - xi3 = vmlal_s32(xi3, vget_high_s32(tmpi_1), vget_high_s32(outre1_1)); -#endif - - outre1_0 = vcombine_s32(vrshrn_n_s64(xr0, 10), vrshrn_n_s64(xr1, 10)); - outre2_0 = vcombine_s32(vrshrn_n_s64(xi0, 10), vrshrn_n_s64(xi1, 10)); - outre1_1 = vcombine_s32(vrshrn_n_s64(xr2, 10), vrshrn_n_s64(xr3, 10)); - outre2_1 = vcombine_s32(vrshrn_n_s64(xi2, 10), vrshrn_n_s64(xi3, 10)); - outre1_0 = vqdmulhq_s32(outre1_0, fact); - outre2_0 = vqdmulhq_s32(outre2_0, fact); - outre1_1 = vqdmulhq_s32(outre1_1, fact); - outre2_1 = vqdmulhq_s32(outre2_1, fact); - - vst1q_s32(p_outre1, outre1_0); - p_outre1 += 4; - vst1q_s32(p_outre1, outre1_1); - p_outre1 += 4; - vst1q_s32(p_outre2, outre2_0); - p_outre2 += 4; - vst1q_s32(p_outre2, outre2_1); - p_outre2 += 4; - } -} - -void WebRtcIsacfix_Spec2TimeNeon(int16_t* inreQ7, - int16_t* inimQ7, - int32_t* outre1Q16, - int32_t* outre2Q16) { - int32_t max; - int32_t sh; - - max = TransformAndFindMaxNeon(inreQ7, inimQ7, outre1Q16, outre2Q16); - - - sh = (int32_t)WebRtcSpl_NormW32(max); - sh = sh - 24; - // If sh becomes >= 0, then we should shift sh steps to the left, - // and the domain will become Q(16 + sh). - // If sh becomes < 0, then we should shift -sh steps to the right, - // and the domain will become Q(16 + sh). - - // "Fastest" vectors. - PreShiftW32toW16Neon(outre1Q16, outre2Q16, inreQ7, inimQ7, sh); - - // Get IDFT. - WebRtcIsacfix_FftRadix16Fastest(inreQ7, inimQ7, 1); - - PostShiftAndDivideAndDemodulateNeon(inreQ7, inimQ7, outre1Q16, outre2Q16, sh); -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_tables.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_tables.c deleted file mode 100644 index e661effddef9..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_tables.c +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * This file contains trigonometric functions look-up tables used in - * transform functions WebRtcIsacfix_Time2Spec and WebRtcIsacfix_Spec2Time. - */ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/* Cosine table 1 in Q14. */ -const int16_t WebRtcIsacfix_kCosTab1[FRAMESAMPLES/2] = { - 16384, 16383, 16378, 16371, 16362, 16349, 16333, 16315, 16294, 16270, - 16244, 16214, 16182, 16147, 16110, 16069, 16026, 15980, 15931, 15880, - 15826, 15769, 15709, 15647, 15582, 15515, 15444, 15371, 15296, 15218, - 15137, 15053, 14968, 14879, 14788, 14694, 14598, 14500, 14399, 14295, - 14189, 14081, 13970, 13856, 13741, 13623, 13502, 13380, 13255, 13128, - 12998, 12867, 12733, 12597, 12458, 12318, 12176, 12031, 11885, 11736, - 11585, 11433, 11278, 11121, 10963, 10803, 10641, 10477, 10311, 10143, - 9974, 9803, 9630, 9456, 9280, 9102, 8923, 8743, 8561, 8377, - 8192, 8006, 7818, 7629, 7438, 7246, 7053, 6859, 6664, 6467, - 6270, 6071, 5872, 5671, 5469, 5266, 5063, 4859, 4653, 4447, - 4240, 4033, 3825, 3616, 3406, 3196, 2986, 2775, 2563, 2351, - 2139, 1926, 1713, 1499, 1285, 1072, 857, 643, 429, 214, - 0, -214, -429, -643, -857, -1072, -1285, -1499, -1713, -1926, - -2139, -2351, -2563, -2775, -2986, -3196, -3406, -3616, -3825, -4033, - -4240, -4447, -4653, -4859, -5063, -5266, -5469, -5671, -5872, -6071, - -6270, -6467, -6664, -6859, -7053, -7246, -7438, -7629, -7818, -8006, - -8192, -8377, -8561, -8743, -8923, -9102, -9280, -9456, -9630, -9803, - -9974, -10143, -10311, -10477, -10641, -10803, -10963, -11121, -11278, -11433, - -11585, -11736, -11885, -12031, -12176, -12318, -12458, -12597, -12733, - -12867, -12998, -13128, -13255, -13380, -13502, -13623, -13741, -13856, - -13970, -14081, -14189, -14295, -14399, -14500, -14598, -14694, -14788, - -14879, -14968, -15053, -15137, -15218, -15296, -15371, -15444, -15515, - -15582, -15647, -15709, -15769, -15826, -15880, -15931, -15980, -16026, - -16069, -16110, -16147, -16182, -16214, -16244, -16270, -16294, -16315, - -16333, -16349, -16362, -16371, -16378, -16383 -}; - -/* Sine table 1 in Q14. */ -const int16_t WebRtcIsacfix_kSinTab1[FRAMESAMPLES/2] = { - 0, 214, 429, 643, 857, 1072, 1285, 1499, 1713, 1926, - 2139, 2351, 2563, 2775, 2986, 3196, 3406, 3616, 3825, 4033, - 4240, 4447, 4653, 4859, 5063, 5266, 5469, 5671, 5872, 6071, - 6270, 6467, 6664, 6859, 7053, 7246, 7438, 7629, 7818, 8006, - 8192, 8377, 8561, 8743, 8923, 9102, 9280, 9456, 9630, 9803, - 9974, 10143, 10311, 10477, 10641, 10803, 10963, 11121, 11278, 11433, - 11585, 11736, 11885, 12031, 12176, 12318, 12458, 12597, 12733, 12867, - 12998, 13128, 13255, 13380, 13502, 13623, 13741, 13856, 13970, 14081, - 14189, 14295, 14399, 14500, 14598, 14694, 14788, 14879, 14968, 15053, - 15137, 15218, 15296, 15371, 15444, 15515, 15582, 15647, 15709, 15769, - 15826, 15880, 15931, 15980, 16026, 16069, 16110, 16147, 16182, 16214, - 16244, 16270, 16294, 16315, 16333, 16349, 16362, 16371, 16378, 16383, - 16384, 16383, 16378, 16371, 16362, 16349, 16333, 16315, 16294, 16270, - 16244, 16214, 16182, 16147, 16110, 16069, 16026, 15980, 15931, 15880, - 15826, 15769, 15709, 15647, 15582, 15515, 15444, 15371, 15296, 15218, - 15137, 15053, 14968, 14879, 14788, 14694, 14598, 14500, 14399, 14295, - 14189, 14081, 13970, 13856, 13741, 13623, 13502, 13380, 13255, 13128, - 12998, 12867, 12733, 12597, 12458, 12318, 12176, 12031, 11885, 11736, - 11585, 11433, 11278, 11121, 10963, 10803, 10641, 10477, 10311, 10143, - 9974, 9803, 9630, 9456, 9280, 9102, 8923, 8743, 8561, 8377, - 8192, 8006, 7818, 7629, 7438, 7246, 7053, 6859, 6664, 6467, - 6270, 6071, 5872, 5671, 5469, 5266, 5063, 4859, 4653, 4447, - 4240, 4033, 3825, 3616, 3406, 3196, 2986, 2775, 2563, 2351, - 2139, 1926, 1713, 1499, 1285, 1072, 857, 643, 429, 214 -}; - - -/* Sine table 2 in Q14. */ -const int16_t WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4] = { - 16384, -16381, 16375, -16367, 16356, -16342, 16325, -16305, 16283, -16257, - 16229, -16199, 16165, -16129, 16090, -16048, 16003, -15956, 15906, -15853, - 15798, -15739, 15679, -15615, 15549, -15480, 15408, -15334, 15257, -15178, - 15095, -15011, 14924, -14834, 14741, -14647, 14549, -14449, 14347, -14242, - 14135, -14025, 13913, -13799, 13682, -13563, 13441, -13318, 13192, -13063, - 12933, -12800, 12665, -12528, 12389, -12247, 12104, -11958, 11810, -11661, - 11509, -11356, 11200, -11042, 10883, -10722, 10559, -10394, 10227, -10059, - 9889, -9717, 9543, -9368, 9191, -9013, 8833, -8652, 8469, -8285, - 8099, -7912, 7723, -7534, 7342, -7150, 6957, -6762, 6566, -6369, - 6171, -5971, 5771, -5570, 5368, -5165, 4961, -4756, 4550, -4344, - 4137, -3929, 3720, -3511, 3301, -3091, 2880, -2669, 2457, -2245, - 2032, -1819, 1606, -1392, 1179, -965, 750, -536, 322, -107 -}; - -#if defined(MIPS32_LE) -/* Cosine table 2 in Q14. Used only on MIPS platforms. */ -const int16_t WebRtcIsacfix_kCosTab2[FRAMESAMPLES/4] = { - 107, -322, 536, -750, 965, -1179, 1392, -1606, 1819, -2032, - 2245, -2457, 2669, -2880, 3091, -3301, 3511, -3720, 3929, -4137, - 4344, -4550, 4756, -4961, 5165, -5368, 5570, -5771, 5971, -6171, - 6369, -6566, 6762, -6957, 7150, -7342, 7534, -7723, 7912, -8099, - 8285, -8469, 8652, -8833, 9013, -9191, 9368, -9543, 9717, -9889, - 10059, -10227, 10394, -10559, 10722, -10883, 11042, -11200, 11356, -11509, - 11661, -11810, 11958, -12104, 12247, -12389, 12528, -12665, 12800, -12933, - 13063, -13192, 13318, -13441, 13563, -13682, 13799, -13913, 14025, -14135, - 14242, -14347, 14449, -14549, 14647, -14741, 14834, -14924, 15011, -15095, - 15178, -15257, 15334, -15408, 15480, -15549, 15615, -15679, 15739, -15798, - 15853, -15906, 15956, -16003, 16048, -16090, 16129, -16165, 16199, -16229, - 16257, -16283, 16305, -16325, 16342, -16356, 16367, -16375, 16381, -16384 -}; -#endif diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc deleted file mode 100644 index 433ec534feac..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "system_wrappers/include/cpu_features_wrapper.h" -#include "test/gtest.h" - -static const int kSamples = FRAMESAMPLES / 2; -static const int32_t spec2time_out_expected_1[kSamples] = { - -3366470, -2285227, -3415765, -2310215, -3118030, -2222470, -3030254, - -2192091, -3423170, -2216041, -3305541, -2171936, -3195767, -2095779, - -3153304, -2157560, -3071167, -2032108, -3101190, -1972016, -3103824, - -2089118, -3139811, -1898337, -3102801, -2055082, -3029665, -1854140, - -2962586, -1966454, -3071167, -1894588, -2851743, -1917315, -2848087, - -1594932, -2799242, -1462184, -2845887, -1437599, -2691776, -1329637, - -2770659, -1268491, -2625161, -1578991, -2460299, -1186385, -2365613, - -1039354, -2322608, -958518, -2271749, -789860, -2254538, -850308, - -2384436, -850959, -2133734, -587678, -2093316, -495115, -1973364, - -475177, -1801282, -173507, -1848516, -158015, -1792018, -62648, - -1643313, 214746, -1500758, 267077, -1450193, 560521, -1521579, - 675283, -1345408, 857559, -1300822, 1116332, -1294533, 1241117, - -1070027, 1263503, -983816, 1529821, -1019586, 1910421, -955420, - 2073688, -836459, 2401105, -653905, 2690474, -731425, 2930131, - -935234, 3299500, -875978, 3523432, -878906, 3924822, -1081630, - 4561267, -1203023, 5105274, -1510983, 6052762, -2294646, 7021597, - -3108053, 8826736, -4935222, 11678789, -8442713, 18725700, -21526692, - 25420577, 19589811, -28108666, 12634054, -14483066, 6263217, -9979706, - 3665661, -7909736, 2531530, -6434896, 1700772, -5525393, 1479473, - -4894262, 1231760, -4353044, 1032940, -3786590, 941152, -3331614, - 665090, -2851619, 830696, -2762201, 958007, -2483118, 788233, - -2184965, 804825, -1967306, 1007255, -1862474, 920889, -1457506, - 755406, -1405841, 890230, -1302124, 1161599, -701867, 1154163, - -1083366, 1204743, -513581, 1547264, -650636, 1493384, -285543, - 1771863, -277906, 1841343, -9078, 1751863, 230222, 1819578, - 207170, 1978972, 398137, 2106468, 552155, 1997624, 685213, - 2129520, 601078, 2238736, 944591, 2441879, 1194178, 2355280, - 986124, 2393328, 1049005, 2417944, 1208368, 2489516, 1352023, - 2572118, 1445283, 2856081, 1532997, 2742279, 1615877, 2915274, - 1808036, 2856871, 1806936, 3241747, 1622461, 2978558, 1841297, - 3010378, 1923666, 3271367, 2126700, 3070935, 1956958, 3107588, - 2128405, 3288872, 2114911, 3315952, 2406651, 3344038, 2370199, - 3368980, 2144361, 3305030, 2183803, 3401450, 2523102, 3405463, - 2452475, 3463355, 2421678, 3551968, 2431949, 3477251, 2148125, - 3244489, 2174090}; -static const int32_t spec2time_out_expected_2[kSamples] = { - 1691694, -2499988, -2035547, 1060469, 988634, -2044502, -306271, - 2041000, 201454, -2289456, 93694, 2129427, -369152, -1887834, - 860796, 2089102, -929424, -1673956, 1395291, 1785651, -1619673, - -1380109, 1963449, 1093311, -2111007, -840456, 2372786, 578119, - -2242702, 89774, 2463304, -132717, -2121480, 643634, 2277636, - -1125999, -1995858, 1543748, 2227861, -1483779, -1495491, 2102642, - 1833876, -1920568, -958378, 2485101, 772261, -2454257, -24942, - 2918714, 136838, -2500453, 816118, 3039735, -746560, -2365815, - 1586396, 2714951, -1511696, -1942334, 2571792, 2182827, -2325335, - -1311543, 3055970, 1367220, -2737182, -110626, 3889222, 631008, - -3280879, 853066, 4122279, -706638, -3334449, 2148311, 3993512, - -1846301, -3004894, 3426779, 3329522, -3165264, -2242423, 4756866, - 2557711, -4131280, -805259, 5702711, 1120592, -4852821, 743664, - 6476444, -621186, -5465828, 2815787, 6768835, -3017442, -5338409, - 5658126, 6838454, -5492288, -4682382, 8874947, 6153814, -8832561, - -2649251, 12817398, 4237692, -13000247, 1190661, 18986363, -115738, - -19693978, 9908367, 30660381, -10632635, -37962068, 47022884, 89744622, - -42087632, 40279224, -88869341, -47542383, 38572364, 10441576, -30339718, - -9926740, 19896578, 28009, -18886612, -1124047, 13232498, -4150304, - -12770551, 2637074, 9051831, -6162211, -8713972, 4557937, 5489716, - -6862312, -5532349, 5415449, 2791310, -6999367, -2790102, 5375806, - 546222, -6486452, -821261, 4994973, -1278840, -5645501, 1060484, - 3996285, -2503954, -4653629, 2220549, 3036977, -3282133, -3318585, - 2780636, 1789880, -4004589, -2041031, 3105373, 574819, -3992722, - -971004, 3001703, -676739, -3841508, 417284, 2897970, -1427018, - -3058480, 1189948, 2210960, -2268992, -2603272, 1949785, 1576172, - -2720404, -1891738, 2309456, 769178, -2975646, -707150, 2424652, - -88039, -2966660, -65452, 2320780, -957557, -2798978, 744640, - 1879794, -1672081, -2365319, 1253309, 1366383, -2204082, -1544367, - 1801452, 613828, -2531994, -983847, 2064842, 118326, -2613790, - -203220, 2219635, -730341, -2641861, 563557, 1765434, -1329916, - -2272927, 1037138, 1266725, -1939220, -1588643, 1754528, 816552, - -2376303, -1099167, 1864999, 122477, -2422762, -400027, 1889228, - -579916, -2490353, 287139, 2011318, -1176657, -2502978, 812896, - 1116502, -1940211}; -static const int16_t time2spec_out_expected_1[kSamples] = { - 20342, 23889, -10063, -9419, 3242, 7280, -2012, -5029, 332, 4478, - -97, -3244, -891, 3117, 773, -2204, -1335, 2009, 1236, -1469, - -1562, 1277, 1366, -815, -1619, 599, 1449, -177, -1507, 116, - 1294, 263, -1338, -244, 1059, 553, -1045, -549, 829, 826, - -731, -755, 516, 909, -427, -853, 189, 1004, -184, -828, - -108, 888, 72, -700, -280, 717, 342, -611, -534, 601, - 534, -374, -646, 399, 567, -171, -720, 234, 645, -11, - -712, -26, 593, 215, -643, -172, 536, 361, -527, -403, - 388, 550, -361, -480, 208, 623, -206, -585, 41, 578, - 12, -504, -182, 583, 218, -437, -339, 499, 263, -354, - -450, 347, 456, -193, -524, 212, 475, -74, -566, 94, - 511, 112, -577, -201, 408, 217, -546, -295, 338, 387, - -13, 4, -46, 2, -76, 103, -83, 108, -55, 100, - -150, 131, -156, 141, -171, 179, -190, 128, -227, 172, - -214, 215, -189, 265, -244, 322, -335, 337, -352, 358, - -368, 362, -355, 366, -381, 403, -395, 411, -392, 446, - -458, 504, -449, 507, -464, 452, -491, 481, -534, 486, - -516, 560, -535, 525, -537, 559, -554, 570, -616, 591, - -585, 627, -509, 588, -584, 547, -610, 580, -614, 635, - -620, 655, -554, 546, -591, 642, -590, 660, -656, 629, - -604, 620, -580, 617, -645, 648, -573, 612, -604, 584, - -571, 597, -562, 627, -550, 560, -606, 529, -584, 568, - -503, 532, -463, 512, -440, 399, -457, 437, -349, 278, - -317, 257, -220, 163, -8, -61, 18, -161, 367, -1306}; -static const int16_t time2spec_out_expected_2[kSamples] = { - 14283, -11552, -15335, 6626, 7554, -2150, -6309, 1307, 4523, -4, - -3908, -314, 3001, 914, -2715, -1042, 2094, 1272, -1715, -1399, - 1263, 1508, -1021, -1534, 735, 1595, -439, -1447, 155, 1433, - 22, -1325, -268, 1205, 424, -1030, -608, 950, 643, -733, - -787, 661, 861, -502, -888, 331, 852, -144, -849, 19, - 833, 99, -826, -154, 771, 368, -735, -459, 645, 513, - -491, -604, 431, 630, -314, -598, 183, 622, -78, -612, - -48, 641, 154, -645, -257, 610, 281, -529, -444, 450, - 441, -327, -506, 274, 476, -232, -570, 117, 554, -86, - -531, -21, 572, 151, -606, -221, 496, 322, -407, -388, - 407, 394, -268, -428, 280, 505, -115, -588, 19, 513, - -29, -539, -109, 468, 173, -501, -242, 442, 278, -478, - -680, 656, -659, 656, -669, 602, -688, 612, -667, 612, - -642, 627, -648, 653, -676, 596, -680, 655, -649, 678, - -672, 587, -608, 637, -645, 637, -620, 556, -580, 553, - -635, 518, -599, 583, -501, 536, -544, 473, -552, 583, - -511, 541, -532, 563, -486, 461, -453, 486, -388, 424, - -416, 432, -374, 399, -462, 364, -346, 293, -329, 331, - -313, 281, -247, 309, -337, 241, -190, 207, -194, 179, - -163, 155, -156, 117, -135, 107, -126, 29, -22, 81, - -8, 17, -61, -10, 8, -37, 80, -44, 72, -88, - 65, -89, 130, -114, 181, -215, 189, -245, 260, -288, - 294, -339, 344, -396, 407, -429, 438, -439, 485, -556, - 629, -612, 637, -645, 661, -737, 829, -830, 831, -1041}; - -class TransformTest : public ::testing::Test { - protected: - // Pass a function pointer to the Tester function. - void Time2SpecTester(Time2Spec Time2SpecFunction) { - // WebRtcIsacfix_Time2Spec functions hard coded the buffer lengths. It's a - // large buffer but we have to test it here. - int16_t data_in_1[kSamples] = {0}; - int16_t data_in_2[kSamples] = {0}; - int16_t data_out_1[kSamples] = {0}; - int16_t data_out_2[kSamples] = {0}; - - for (int i = 0; i < kSamples; i++) { - data_in_1[i] = i * i + 1777; - data_in_2[i] = WEBRTC_SPL_WORD16_MAX / (i + 1) + 17; - } - - Time2SpecFunction(data_in_1, data_in_2, data_out_1, data_out_2); - - for (int i = 0; i < kSamples; i++) { - // We don't require bit-exact for ARM assembly code. - EXPECT_LE(abs(time2spec_out_expected_1[i] - data_out_1[i]), 1); - EXPECT_LE(abs(time2spec_out_expected_2[i] - data_out_2[i]), 1); - } - } - - // Pass a function pointer to the Tester function. - void Spec2TimeTester(Spec2Time Spec2TimeFunction) { - // WebRtcIsacfix_Spec2Time functions hard coded the buffer lengths. It's a - // large buffer but we have to test it here. - int16_t data_in_1[kSamples] = {0}; - int16_t data_in_2[kSamples] = {0}; - int32_t data_out_1[kSamples] = {0}; - int32_t data_out_2[kSamples] = {0}; - for (int i = 0; i < kSamples; i++) { - data_in_1[i] = i * i + 1777; - data_in_2[i] = WEBRTC_SPL_WORD16_MAX / (i + 1) + 17; - } - - Spec2TimeFunction(data_in_1, data_in_2, data_out_1, data_out_2); - - for (int i = 0; i < kSamples; i++) { - // We don't require bit-exact for ARM assembly code. - EXPECT_LE(abs(spec2time_out_expected_1[i] - data_out_1[i]), 16); - EXPECT_LE(abs(spec2time_out_expected_2[i] - data_out_2[i]), 16); - } - } -}; - -TEST_F(TransformTest, Time2SpecTest) { - Time2SpecTester(WebRtcIsacfix_Time2SpecC); -#if defined(WEBRTC_HAS_NEON) - Time2SpecTester(WebRtcIsacfix_Time2SpecNeon); -#endif -} - -TEST_F(TransformTest, Spec2TimeTest) { - Spec2TimeTester(WebRtcIsacfix_Spec2TimeC); -#if defined(WEBRTC_HAS_NEON) - Spec2TimeTester(WebRtcIsacfix_Spec2TimeNeon); -#endif -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc b/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc deleted file mode 100644 index 903ac64affde..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/include/isacfix.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/tools/audio_codec_speed_test.h" -#include "rtc_base/checks.h" - -using std::string; - -namespace webrtc { - -static const int kIsacBlockDurationMs = 30; -static const int kIsacInputSamplingKhz = 16; -static const int kIsacOutputSamplingKhz = 16; - -class IsacSpeedTest : public AudioCodecSpeedTest { - protected: - IsacSpeedTest(); - void SetUp() override; - void TearDown() override; - float EncodeABlock(int16_t* in_data, - uint8_t* bit_stream, - size_t max_bytes, - size_t* encoded_bytes) override; - float DecodeABlock(const uint8_t* bit_stream, - size_t encoded_bytes, - int16_t* out_data) override; - ISACFIX_MainStruct* ISACFIX_main_inst_; -}; - -IsacSpeedTest::IsacSpeedTest() - : AudioCodecSpeedTest(kIsacBlockDurationMs, - kIsacInputSamplingKhz, - kIsacOutputSamplingKhz), - ISACFIX_main_inst_(NULL) {} - -void IsacSpeedTest::SetUp() { - AudioCodecSpeedTest::SetUp(); - - // Check whether the allocated buffer for the bit stream is large enough. - EXPECT_GE(max_bytes_, static_cast(STREAM_MAXW16_60MS)); - - // Create encoder memory. - EXPECT_EQ(0, WebRtcIsacfix_Create(&ISACFIX_main_inst_)); - EXPECT_EQ(0, WebRtcIsacfix_EncoderInit(ISACFIX_main_inst_, 1)); - WebRtcIsacfix_DecoderInit(ISACFIX_main_inst_); - // Set bitrate and block length. - EXPECT_EQ(0, WebRtcIsacfix_Control(ISACFIX_main_inst_, bit_rate_, - block_duration_ms_)); -} - -void IsacSpeedTest::TearDown() { - AudioCodecSpeedTest::TearDown(); - // Free memory. - EXPECT_EQ(0, WebRtcIsacfix_Free(ISACFIX_main_inst_)); -} - -float IsacSpeedTest::EncodeABlock(int16_t* in_data, - uint8_t* bit_stream, - size_t max_bytes, - size_t* encoded_bytes) { - // ISAC takes 10 ms everycall - const int subblocks = block_duration_ms_ / 10; - const int subblock_length = 10 * input_sampling_khz_; - int value = 0; - - clock_t clocks = clock(); - size_t pointer = 0; - for (int idx = 0; idx < subblocks; idx++, pointer += subblock_length) { - value = - WebRtcIsacfix_Encode(ISACFIX_main_inst_, &in_data[pointer], bit_stream); - if (idx == subblocks - 1) - EXPECT_GT(value, 0); - else - EXPECT_EQ(0, value); - } - clocks = clock() - clocks; - *encoded_bytes = static_cast(value); - RTC_DCHECK_LE(*encoded_bytes, max_bytes); - return 1000.0 * clocks / CLOCKS_PER_SEC; -} - -float IsacSpeedTest::DecodeABlock(const uint8_t* bit_stream, - size_t encoded_bytes, - int16_t* out_data) { - int value; - int16_t audio_type; - clock_t clocks = clock(); - value = WebRtcIsacfix_Decode(ISACFIX_main_inst_, bit_stream, encoded_bytes, - out_data, &audio_type); - clocks = clock() - clocks; - EXPECT_EQ(output_length_sample_, static_cast(value)); - return 1000.0 * clocks / CLOCKS_PER_SEC; -} - -TEST_P(IsacSpeedTest, IsacEncodeDecodeTest) { - size_t kDurationSec = 400; // Test audio length in second. - EncodeDecode(kDurationSec); -} - -const coding_param param_set[] = { - std::make_tuple(1, - 32000, - string("audio_coding/speech_mono_16kHz"), - string("pcm"), - true)}; - -INSTANTIATE_TEST_SUITE_P(AllTest, - IsacSpeedTest, - ::testing::ValuesIn(param_set)); - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc b/third_party/libwebrtc/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc deleted file mode 100644 index cafca75e46a2..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc +++ /dev/null @@ -1,346 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "api/array_view.h" -#include "api/audio_codecs/isac/audio_decoder_isac_fix.h" -#include "api/audio_codecs/isac/audio_decoder_isac_float.h" -#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" -#include "modules/audio_coding/test/PCMFile.h" -#include "rtc_base/checks.h" -#include "rtc_base/strings/string_builder.h" -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -namespace webrtc { -namespace { - -constexpr int kPayloadType = 42; - -enum class IsacImpl { kFixed, kFloat }; - -absl::string_view IsacImplToString(IsacImpl impl) { - switch (impl) { - case IsacImpl::kFixed: - return "fixed"; - case IsacImpl::kFloat: - return "float"; - } -} - -std::unique_ptr GetPcmTestFileReader(int sample_rate_hz) { - std::string filename; - switch (sample_rate_hz) { - case 16000: - filename = test::ResourcePath("audio_coding/testfile16kHz", "pcm"); - break; - case 32000: - filename = test::ResourcePath("audio_coding/testfile32kHz", "pcm"); - break; - default: - RTC_DCHECK_NOTREACHED() - << "No test file available for " << sample_rate_hz << " Hz."; - } - auto pcm_file = std::make_unique(); - pcm_file->ReadStereo(false); - pcm_file->Open(filename, sample_rate_hz, "rb", /*auto_rewind=*/true); - pcm_file->FastForward(/*num_10ms_blocks=*/100); // Skip initial silence. - RTC_CHECK(!pcm_file->EndOfFile()); - return pcm_file; -} - -// Returns a view to the interleaved samples of an AudioFrame object. -rtc::ArrayView AudioFrameToView(const AudioFrame& audio_frame) { - return {audio_frame.data(), - audio_frame.samples_per_channel() * audio_frame.num_channels()}; -} - -std::unique_ptr CreateEncoder(IsacImpl impl, - int sample_rate_hz, - int frame_size_ms, - int bitrate_bps) { - RTC_CHECK(sample_rate_hz == 16000 || sample_rate_hz == 32000); - RTC_CHECK(frame_size_ms == 30 || frame_size_ms == 60); - RTC_CHECK_GT(bitrate_bps, 0); - switch (impl) { - case IsacImpl::kFixed: { - AudioEncoderIsacFix::Config config; - config.bit_rate = bitrate_bps; - config.frame_size_ms = frame_size_ms; - RTC_CHECK_EQ(16000, sample_rate_hz); - return AudioEncoderIsacFix::MakeAudioEncoder(config, kPayloadType); - } - case IsacImpl::kFloat: { - AudioEncoderIsacFloat::Config config; - config.bit_rate = bitrate_bps; - config.frame_size_ms = frame_size_ms; - config.sample_rate_hz = sample_rate_hz; - return AudioEncoderIsacFloat::MakeAudioEncoder(config, kPayloadType); - } - } -} - -std::unique_ptr CreateDecoder(IsacImpl impl, int sample_rate_hz) { - RTC_CHECK(sample_rate_hz == 16000 || sample_rate_hz == 32000); - switch (impl) { - case IsacImpl::kFixed: { - webrtc::AudioDecoderIsacFix::Config config; - RTC_CHECK_EQ(16000, sample_rate_hz); - return webrtc::AudioDecoderIsacFix::MakeAudioDecoder(config); - } - case IsacImpl::kFloat: { - webrtc::AudioDecoderIsacFloat::Config config; - config.sample_rate_hz = sample_rate_hz; - return webrtc::AudioDecoderIsacFloat::MakeAudioDecoder(config); - } - } -} - -struct EncoderTestParams { - IsacImpl impl; - int sample_rate_hz; - int frame_size_ms; -}; - -class EncoderTest : public testing::TestWithParam { - protected: - EncoderTest() = default; - IsacImpl GetIsacImpl() const { return GetParam().impl; } - int GetSampleRateHz() const { return GetParam().sample_rate_hz; } - int GetFrameSizeMs() const { return GetParam().frame_size_ms; } -}; - -TEST_P(EncoderTest, TestConfig) { - for (int bitrate_bps : {10000, 21000, 32000}) { - SCOPED_TRACE(bitrate_bps); - auto encoder = CreateEncoder(GetIsacImpl(), GetSampleRateHz(), - GetFrameSizeMs(), bitrate_bps); - EXPECT_EQ(GetSampleRateHz(), encoder->SampleRateHz()); - EXPECT_EQ(size_t{1}, encoder->NumChannels()); - EXPECT_EQ(bitrate_bps, encoder->GetTargetBitrate()); - } -} - -// Encodes an input audio sequence with a low and a high target bitrate and -// checks that the number of produces bytes in the first case is less than that -// of the second case. -TEST_P(EncoderTest, TestDifferentBitrates) { - auto pcm_file = GetPcmTestFileReader(GetSampleRateHz()); - constexpr int kLowBps = 20000; - constexpr int kHighBps = 25000; - auto encoder_low = CreateEncoder(GetIsacImpl(), GetSampleRateHz(), - GetFrameSizeMs(), kLowBps); - auto encoder_high = CreateEncoder(GetIsacImpl(), GetSampleRateHz(), - GetFrameSizeMs(), kHighBps); - int num_bytes_low = 0; - int num_bytes_high = 0; - constexpr int kNumFrames = 12; - for (int i = 0; i < kNumFrames; ++i) { - AudioFrame in; - pcm_file->Read10MsData(in); - rtc::Buffer low, high; - encoder_low->Encode(/*rtp_timestamp=*/0, AudioFrameToView(in), &low); - encoder_high->Encode(/*rtp_timestamp=*/0, AudioFrameToView(in), &high); - num_bytes_low += low.size(); - num_bytes_high += high.size(); - } - EXPECT_LT(num_bytes_low, num_bytes_high); -} - -// Encodes an input audio sequence first with a low, then with a high target -// bitrate *using the same encoder* and checks that the number of emitted bytes -// in the first case is less than in the second case. -TEST_P(EncoderTest, TestDynamicBitrateChange) { - constexpr int kLowBps = 20000; - constexpr int kHighBps = 25000; - constexpr int kStartBps = 30000; - auto encoder = CreateEncoder(GetIsacImpl(), GetSampleRateHz(), - GetFrameSizeMs(), kStartBps); - std::map num_bytes; - constexpr int kNumFrames = 200; // 2 seconds. - for (int bitrate_bps : {kLowBps, kHighBps}) { - auto pcm_file = GetPcmTestFileReader(GetSampleRateHz()); - encoder->OnReceivedTargetAudioBitrate(bitrate_bps); - for (int i = 0; i < kNumFrames; ++i) { - AudioFrame in; - pcm_file->Read10MsData(in); - rtc::Buffer buf; - encoder->Encode(/*rtp_timestamp=*/0, AudioFrameToView(in), &buf); - num_bytes[bitrate_bps] += buf.size(); - } - } - // kHighBps / kLowBps == 1.25, so require the high-bitrate run to produce at - // least 1.195 times the number of bytes. - EXPECT_LT(1.195 * num_bytes[kLowBps], num_bytes[kHighBps]); -} - -// Checks that, given a target bitrate, the encoder does not overshoot too much. -TEST_P(EncoderTest, DoNotOvershootTargetBitrate) { - for (int bitrate_bps : {10000, 15000, 20000, 26000, 32000}) { - SCOPED_TRACE(bitrate_bps); - auto pcm_file = GetPcmTestFileReader(GetSampleRateHz()); - auto e = CreateEncoder(GetIsacImpl(), GetSampleRateHz(), GetFrameSizeMs(), - bitrate_bps); - int num_bytes = 0; - constexpr int kNumFrames = 200; // 2 seconds. - for (int i = 0; i < kNumFrames; ++i) { - AudioFrame in; - pcm_file->Read10MsData(in); - rtc::Buffer encoded; - e->Encode(/*rtp_timestamp=*/0, AudioFrameToView(in), &encoded); - num_bytes += encoded.size(); - } - // Inverse of the duration of `kNumFrames` 10 ms frames (unit: seconds^-1). - constexpr float kAudioDurationInv = 100.f / kNumFrames; - const int measured_bitrate_bps = 8 * num_bytes * kAudioDurationInv; - EXPECT_LT(measured_bitrate_bps, bitrate_bps + 2250); // Max 2250 bps extra. - } -} - -// Creates tests for different encoder configurations and implementations. -INSTANTIATE_TEST_SUITE_P( - IsacApiTest, - EncoderTest, - ::testing::ValuesIn([] { - std::vector cases; - for (IsacImpl impl : {IsacImpl::kFloat, IsacImpl::kFixed}) { - for (int frame_size_ms : {30, 60}) { - cases.push_back({impl, 16000, frame_size_ms}); - } - } - cases.push_back({IsacImpl::kFloat, 32000, 30}); - return cases; - }()), - [](const ::testing::TestParamInfo& info) { - rtc::StringBuilder b; - const auto& p = info.param; - b << IsacImplToString(p.impl) << "_" << p.sample_rate_hz << "_" - << p.frame_size_ms; - return b.Release(); - }); - -struct DecoderTestParams { - IsacImpl impl; - int sample_rate_hz; -}; - -class DecoderTest : public testing::TestWithParam { - protected: - DecoderTest() = default; - IsacImpl GetIsacImpl() const { return GetParam().impl; } - int GetSampleRateHz() const { return GetParam().sample_rate_hz; } -}; - -TEST_P(DecoderTest, TestConfig) { - auto decoder = CreateDecoder(GetIsacImpl(), GetSampleRateHz()); - EXPECT_EQ(GetSampleRateHz(), decoder->SampleRateHz()); - EXPECT_EQ(size_t{1}, decoder->Channels()); -} - -// Creates tests for different decoder configurations and implementations. -INSTANTIATE_TEST_SUITE_P( - IsacApiTest, - DecoderTest, - ::testing::ValuesIn({DecoderTestParams{IsacImpl::kFixed, 16000}, - DecoderTestParams{IsacImpl::kFloat, 16000}, - DecoderTestParams{IsacImpl::kFloat, 32000}}), - [](const ::testing::TestParamInfo& info) { - const auto& p = info.param; - return (rtc::StringBuilder() - << IsacImplToString(p.impl) << "_" << p.sample_rate_hz) - .Release(); - }); - -struct EncoderDecoderPairTestParams { - int sample_rate_hz; - int frame_size_ms; - IsacImpl encoder_impl; - IsacImpl decoder_impl; -}; - -class EncoderDecoderPairTest - : public testing::TestWithParam { - protected: - EncoderDecoderPairTest() = default; - int GetSampleRateHz() const { return GetParam().sample_rate_hz; } - int GetEncoderFrameSizeMs() const { return GetParam().frame_size_ms; } - IsacImpl GetEncoderIsacImpl() const { return GetParam().encoder_impl; } - IsacImpl GetDecoderIsacImpl() const { return GetParam().decoder_impl; } - int GetEncoderFrameSize() const { - return GetEncoderFrameSizeMs() * GetSampleRateHz() / 1000; - } -}; - -// Checks that the number of encoded and decoded samples match. -TEST_P(EncoderDecoderPairTest, EncodeDecode) { - auto pcm_file = GetPcmTestFileReader(GetSampleRateHz()); - auto encoder = CreateEncoder(GetEncoderIsacImpl(), GetSampleRateHz(), - GetEncoderFrameSizeMs(), /*bitrate_bps=*/20000); - auto decoder = CreateDecoder(GetDecoderIsacImpl(), GetSampleRateHz()); - const int encoder_frame_size = GetEncoderFrameSize(); - std::vector out(encoder_frame_size); - size_t num_encoded_samples = 0; - size_t num_decoded_samples = 0; - constexpr int kNumFrames = 12; - for (int i = 0; i < kNumFrames; ++i) { - AudioFrame in; - pcm_file->Read10MsData(in); - rtc::Buffer encoded; - encoder->Encode(/*rtp_timestamp=*/0, AudioFrameToView(in), &encoded); - num_encoded_samples += in.samples_per_channel(); - if (encoded.empty()) { - continue; - } - // Decode. - const std::vector parse_result = - decoder->ParsePayload(std::move(encoded), /*timestamp=*/0); - EXPECT_EQ(parse_result.size(), size_t{1}); - auto decode_result = parse_result[0].frame->Decode(out); - EXPECT_TRUE(decode_result.has_value()); - EXPECT_EQ(out.size(), decode_result->num_decoded_samples); - num_decoded_samples += decode_result->num_decoded_samples; - } - EXPECT_EQ(num_encoded_samples, num_decoded_samples); -} - -// Creates tests for different encoder frame sizes and different -// encoder/decoder implementations. -INSTANTIATE_TEST_SUITE_P( - IsacApiTest, - EncoderDecoderPairTest, - ::testing::ValuesIn([] { - std::vector cases; - for (int frame_size_ms : {30, 60}) { - for (IsacImpl enc : {IsacImpl::kFloat, IsacImpl::kFixed}) { - for (IsacImpl dec : {IsacImpl::kFloat, IsacImpl::kFixed}) { - cases.push_back({16000, frame_size_ms, enc, dec}); - } - } - } - cases.push_back({32000, 30, IsacImpl::kFloat, IsacImpl::kFloat}); - return cases; - }()), - [](const ::testing::TestParamInfo& info) { - rtc::StringBuilder b; - const auto& p = info.param; - b << p.sample_rate_hz << "_" << p.frame_size_ms << "_" - << IsacImplToString(p.encoder_impl) << "_" - << IsacImplToString(p.decoder_impl); - return b.Release(); - }); - -} // namespace -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h deleted file mode 100644 index fae2f3d4a78e..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_AUDIO_DECODER_ISAC_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_AUDIO_DECODER_ISAC_H_ - -#include "modules/audio_coding/codecs/isac/audio_decoder_isac_t.h" -#include "modules/audio_coding/codecs/isac/main/source/isac_float_type.h" - -namespace webrtc { - -using AudioDecoderIsacFloatImpl = AudioDecoderIsacT; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_AUDIO_ENCODER_ISAC_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h deleted file mode 100644 index dc32bcdde692..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_AUDIO_ENCODER_ISAC_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_AUDIO_ENCODER_ISAC_H_ - -#include "modules/audio_coding/codecs/isac/audio_encoder_isac_t.h" -#include "modules/audio_coding/codecs/isac/main/source/isac_float_type.h" - -namespace webrtc { - -using AudioEncoderIsacFloatImpl = AudioEncoderIsacT; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_AUDIO_ENCODER_ISAC_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/include/isac.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/include/isac.h deleted file mode 100644 index 3b05a8bcda6e..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/include/isac.h +++ /dev/null @@ -1,617 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_ISAC_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_ISAC_H_ - -#include - -#include "modules/audio_coding/codecs/isac/bandwidth_info.h" - -typedef struct WebRtcISACStruct ISACStruct; - -#if defined(__cplusplus) -extern "C" { -#endif - -/****************************************************************************** - * WebRtcIsac_Create(...) - * - * This function creates an ISAC instance, which will contain the state - * information for one coding/decoding channel. - * - * Input: - * - *ISAC_main_inst : a pointer to the coder instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsac_Create(ISACStruct** ISAC_main_inst); - -/****************************************************************************** - * WebRtcIsac_Free(...) - * - * This function frees the ISAC instance created at the beginning. - * - * Input: - * - ISAC_main_inst : an ISAC instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsac_Free(ISACStruct* ISAC_main_inst); - -/****************************************************************************** - * WebRtcIsac_EncoderInit(...) - * - * This function initializes an ISAC instance prior to the encoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - CodingMode : 0 -> Bit rate and frame length are - * automatically adjusted to available bandwidth - * on transmission channel, just valid if codec - * is created to work in wideband mode. - * 1 -> User sets a frame length and a target bit - * rate which is taken as the maximum - * short-term average bit rate. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsac_EncoderInit(ISACStruct* ISAC_main_inst, int16_t CodingMode); - -/****************************************************************************** - * WebRtcIsac_Encode(...) - * - * This function encodes 10ms audio blocks and inserts it into a package. - * Input speech length has 160 samples if operating at 16 kHz sampling - * rate, or 320 if operating at 32 kHz sampling rate. The encoder buffers the - * input audio until the whole frame is buffered then proceeds with encoding. - * - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - speechIn : input speech vector. - * - * Output: - * - encoded : the encoded data vector - * - * Return value: - * : >0 - Length (in bytes) of coded data - * : 0 - The buffer didn't reach the chosen - * frame-size so it keeps buffering speech - * samples. - * : -1 - Error - */ - -int WebRtcIsac_Encode(ISACStruct* ISAC_main_inst, - const int16_t* speechIn, - uint8_t* encoded); - -/****************************************************************************** - * WebRtcIsac_DecoderInit(...) - * - * This function initializes an ISAC instance prior to the decoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - */ - -void WebRtcIsac_DecoderInit(ISACStruct* ISAC_main_inst); - -/****************************************************************************** - * WebRtcIsac_UpdateBwEstimate(...) - * - * This function updates the estimate of the bandwidth. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - packet_size : size of the packet. - * - rtp_seq_number : the RTP number of the packet. - * - send_ts : the RTP send timestamp, given in samples - * - arr_ts : the arrival time of the packet (from NetEq) - * in samples. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsac_UpdateBwEstimate(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts); - -/****************************************************************************** - * WebRtcIsac_Decode(...) - * - * This function decodes an ISAC frame. At 16 kHz sampling rate, the length - * of the output audio could be either 480 or 960 samples, equivalent to - * 30 or 60 ms respectively. At 32 kHz sampling rate, the length of the - * output audio is 960 samples, which is 30 ms. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - len : bytes in encoded vector. - * - * Output: - * - decoded : The decoded vector. - * - * Return value : >0 - number of samples in decoded vector. - * -1 - Error. - */ - -int WebRtcIsac_Decode(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType); - -/****************************************************************************** - * WebRtcIsac_DecodePlc(...) - * - * This function conducts PLC for ISAC frame(s). Output speech length - * will be a multiple of frames, i.e. multiples of 30 ms audio. Therefore, - * the output is multiple of 480 samples if operating at 16 kHz and multiple - * of 960 if operating at 32 kHz. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - noOfLostFrames : Number of PLC frames to produce. - * - * Output: - * - decoded : The decoded vector. - * - * Return value : Number of samples in decoded PLC vector - */ - -size_t WebRtcIsac_DecodePlc(ISACStruct* ISAC_main_inst, - int16_t* decoded, - size_t noOfLostFrames); - -/****************************************************************************** - * WebRtcIsac_Control(...) - * - * This function sets the limit on the short-term average bit-rate and the - * frame length. Should be used only in Instantaneous mode. At 16 kHz sampling - * rate, an average bit-rate between 10000 to 32000 bps is valid and a - * frame-size of 30 or 60 ms is acceptable. At 32 kHz, an average bit-rate - * between 10000 to 56000 is acceptable, and the valid frame-size is 30 ms. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rate : limit on the short-term average bit rate, - * in bits/second. - * - framesize : frame-size in millisecond. - * - * Return value : 0 - ok - * -1 - Error - */ - -int16_t WebRtcIsac_Control(ISACStruct* ISAC_main_inst, - int32_t rate, - int framesize); - -void WebRtcIsac_SetInitialBweBottleneck(ISACStruct* ISAC_main_inst, - int bottleneck_bits_per_second); - -/****************************************************************************** - * WebRtcIsac_ControlBwe(...) - * - * This function sets the initial values of bottleneck and frame-size if - * iSAC is used in channel-adaptive mode. Therefore, this API is not - * applicable if the codec is created to operate in super-wideband mode. - * - * Through this API, users can enforce a frame-size for all values of - * bottleneck. Then iSAC will not automatically change the frame-size. - * - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rateBPS : initial value of bottleneck in bits/second - * 10000 <= rateBPS <= 56000 is accepted - * For default bottleneck set rateBPS = 0 - * - frameSizeMs : number of milliseconds per frame (30 or 60) - * - enforceFrameSize : 1 to enforce the given frame-size through - * out the adaptation process, 0 to let iSAC - * change the frame-size if required. - * - * Return value : 0 - ok - * -1 - Error - */ - -int16_t WebRtcIsac_ControlBwe(ISACStruct* ISAC_main_inst, - int32_t rateBPS, - int frameSizeMs, - int16_t enforceFrameSize); - -/****************************************************************************** - * WebRtcIsac_ReadFrameLen(...) - * - * This function returns the length of the frame represented in the packet. - * - * Input: - * - encoded : Encoded bit-stream - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - */ - -int16_t WebRtcIsac_ReadFrameLen(const ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - int16_t* frameLength); - -/****************************************************************************** - * WebRtcIsac_version(...) - * - * This function returns the version number. - * - * Output: - * - version : Pointer to character string - * - */ - -void WebRtcIsac_version(char* version); - -/****************************************************************************** - * WebRtcIsac_GetErrorCode(...) - * - * This function can be used to check the error code of an iSAC instance. When - * a function returns -1 a error code will be set for that instance. The - * function below extract the code of the last error that occurred in the - * specified instance. - * - * Input: - * - ISAC_main_inst : ISAC instance - * - * Return value : Error code - */ - -int16_t WebRtcIsac_GetErrorCode(ISACStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsac_GetUplinkBw(...) - * - * This function outputs the target bottleneck of the codec. In - * channel-adaptive mode, the target bottleneck is specified through in-band - * signalling retreived by bandwidth estimator. - * In channel-independent, also called instantaneous mode, the target - * bottleneck is provided to the encoder by calling xxx_control(...). If - * xxx_control is never called the default values is returned. The default - * value for bottleneck at 16 kHz encoder sampling rate is 32000 bits/sec, - * and it is 56000 bits/sec for 32 kHz sampling rate. - * Note that the output is the iSAC internal operating bottleneck which might - * differ slightly from the one provided through xxx_control(). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Output: - * - *bottleneck : bottleneck in bits/sec - * - * Return value : -1 if error happens - * 0 bit-rates computed correctly. - */ - -int16_t WebRtcIsac_GetUplinkBw(ISACStruct* ISAC_main_inst, int32_t* bottleneck); - -/****************************************************************************** - * WebRtcIsac_SetMaxPayloadSize(...) - * - * This function sets a limit for the maximum payload size of iSAC. The same - * value is used both for 30 and 60 ms packets. If the encoder sampling rate - * is 16 kHz the maximum payload size is between 120 and 400 bytes. If the - * encoder sampling rate is 32 kHz the maximum payload size is between 120 - * and 600 bytes. - * - * If an out of range limit is used, the function returns -1, but the closest - * valid value will be applied. - * - * --------------- - * IMPORTANT NOTES - * --------------- - * The size of a packet is limited to the minimum of 'max-payload-size' and - * 'max-rate.' For instance, let's assume the max-payload-size is set to - * 170 bytes, and max-rate is set to 40 kbps. Note that a limit of 40 kbps - * translates to 150 bytes for 30ms frame-size & 300 bytes for 60ms - * frame-size. Then a packet with a frame-size of 30 ms is limited to 150, - * i.e. min(170, 150), and a packet with 60 ms frame-size is limited to - * 170 bytes, i.e. min(170, 300). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxPayloadBytes : maximum size of the payload in bytes - * valid values are between 120 and 400 bytes - * if encoder sampling rate is 16 kHz. For - * 32 kHz encoder sampling rate valid values - * are between 120 and 600 bytes. - * - * Return value : 0 if successful - * -1 if error happens - */ - -int16_t WebRtcIsac_SetMaxPayloadSize(ISACStruct* ISAC_main_inst, - int16_t maxPayloadBytes); - -/****************************************************************************** - * WebRtcIsac_SetMaxRate(...) - * - * This function sets the maximum rate which the codec may not exceed for - * any signal packet. The maximum rate is defined and payload-size per - * frame-size in bits per second. - * - * The codec has a maximum rate of 53400 bits per second (200 bytes per 30 - * ms) if the encoder sampling rate is 16kHz, and 160 kbps (600 bytes/30 ms) - * if the encoder sampling rate is 32 kHz. - * - * It is possible to set a maximum rate between 32000 and 53400 bits/sec - * in wideband mode, and 32000 to 160000 bits/sec in super-wideband mode. - * - * If an out of range limit is used, the function returns -1, but the closest - * valid value will be applied. - * - * --------------- - * IMPORTANT NOTES - * --------------- - * The size of a packet is limited to the minimum of 'max-payload-size' and - * 'max-rate.' For instance, let's assume the max-payload-size is set to - * 170 bytes, and max-rate is set to 40 kbps. Note that a limit of 40 kbps - * translates to 150 bytes for 30ms frame-size & 300 bytes for 60ms - * frame-size. Then a packet with a frame-size of 30 ms is limited to 150, - * i.e. min(170, 150), and a packet with 60 ms frame-size is limited to - * 170 bytes, min(170, 300). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxRate : maximum rate in bits per second, - * valid values are 32000 to 53400 bits/sec in - * wideband mode, and 32000 to 160000 bits/sec in - * super-wideband mode. - * - * Return value : 0 if successful - * -1 if error happens - */ - -int16_t WebRtcIsac_SetMaxRate(ISACStruct* ISAC_main_inst, int32_t maxRate); - -/****************************************************************************** - * WebRtcIsac_DecSampRate() - * Return the sampling rate of the decoded audio. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : sampling frequency in Hertz. - * - */ - -uint16_t WebRtcIsac_DecSampRate(ISACStruct* ISAC_main_inst); - -/****************************************************************************** - * WebRtcIsac_EncSampRate() - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : sampling rate in Hertz. - * - */ - -uint16_t WebRtcIsac_EncSampRate(ISACStruct* ISAC_main_inst); - -/****************************************************************************** - * WebRtcIsac_SetDecSampRate() - * Set the sampling rate of the decoder. Initialization of the decoder WILL - * NOT overwrite the sampling rate of the encoder. The default value is 16 kHz - * which is set when the instance is created. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - sampRate : sampling rate in Hertz. - * - * Return value : 0 if successful - * -1 if failed. - */ - -int16_t WebRtcIsac_SetDecSampRate(ISACStruct* ISAC_main_inst, - uint16_t samp_rate_hz); - -/****************************************************************************** - * WebRtcIsac_SetEncSampRate() - * Set the sampling rate of the encoder. Initialization of the encoder WILL - * NOT overwrite the sampling rate of the encoder. The default value is 16 kHz - * which is set when the instance is created. The encoding-mode and the - * bottleneck remain unchanged by this call, however, the maximum rate and - * maximum payload-size will reset to their default value. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - sampRate : sampling rate in Hertz. - * - * Return value : 0 if successful - * -1 if failed. - */ - -int16_t WebRtcIsac_SetEncSampRate(ISACStruct* ISAC_main_inst, - uint16_t sample_rate_hz); - -/****************************************************************************** - * WebRtcIsac_GetNewBitStream(...) - * - * This function returns encoded data, with the received bwe-index in the - * stream. If the rate is set to a value less than bottleneck of codec - * the new bistream will be re-encoded with the given target rate. - * It should always return a complete packet, i.e. only called once - * even for 60 msec frames. - * - * NOTE 1! This function does not write in the ISACStruct, it is not allowed. - * NOTE 2! Currently not implemented for SWB mode. - * NOTE 3! Rates larger than the bottleneck of the codec will be limited - * to the current bottleneck. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - bweIndex : Index of bandwidth estimate to put in new - * bitstream - * - rate : target rate of the transcoder is bits/sec. - * Valid values are the accepted rate in iSAC, - * i.e. 10000 to 56000. - * - isRCU : if the new bit-stream is an RCU - * stream. Note that the rate parameter always indicates the target rate of the - * main payload, regardless of 'isRCU' value. - * - * Output: - * - encoded : The encoded data vector - * - * Return value : >0 - Length (in bytes) of coded data - * -1 - Error or called in SWB mode - * NOTE! No error code is written to - * the struct since it is only allowed to read - * the struct. - */ -int16_t WebRtcIsac_GetNewBitStream(ISACStruct* ISAC_main_inst, - int16_t bweIndex, - int16_t jitterInfo, - int32_t rate, - uint8_t* encoded, - int16_t isRCU); - -/**************************************************************************** - * WebRtcIsac_GetDownLinkBwIndex(...) - * - * This function returns index representing the Bandwidth estimate from - * other side to this side. - * - * Input: - * - ISAC_main_inst : iSAC struct - * - * Output: - * - bweIndex : Bandwidth estimate to transmit to other side. - * - */ - -int16_t WebRtcIsac_GetDownLinkBwIndex(ISACStruct* ISAC_main_inst, - int16_t* bweIndex, - int16_t* jitterInfo); - -/**************************************************************************** - * WebRtcIsac_UpdateUplinkBw(...) - * - * This function takes an index representing the Bandwidth estimate from - * this side to other side and updates BWE. - * - * Input: - * - ISAC_main_inst : iSAC struct - * - bweIndex : Bandwidth estimate from other side. - * - */ - -int16_t WebRtcIsac_UpdateUplinkBw(ISACStruct* ISAC_main_inst, int16_t bweIndex); - -/**************************************************************************** - * WebRtcIsac_ReadBwIndex(...) - * - * This function returns the index of the Bandwidth estimate from the bitstream. - * - * Input: - * - encoded : Encoded bitstream - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - bweIndex : Bandwidth estimate in bitstream - * - */ - -int16_t WebRtcIsac_ReadBwIndex(const uint8_t* encoded, int16_t* bweIndex); - -/******************************************************************************* - * WebRtcIsac_GetNewFrameLen(...) - * - * returns the frame lenght (in samples) of the next packet. In the case of - * channel-adaptive mode, iSAC decides on its frame lenght based on the - * estimated bottleneck this allows a user to prepare for the next packet (at - * the encoder) - * - * The primary usage is in CE to make the iSAC works in channel-adaptive mode - * - * Input: - * - ISAC_main_inst : iSAC struct - * - * Return Value : frame lenght in samples - * - */ - -int16_t WebRtcIsac_GetNewFrameLen(ISACStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsac_GetRedPayload(...) - * - * Populates "encoded" with the redundant payload of the recently encoded - * frame. This function has to be called once that WebRtcIsac_Encode(...) - * returns a positive value. Regardless of the frame-size this function will - * be called only once after encoding is completed. - * - * Input: - * - ISAC_main_inst : iSAC struct - * - * Output: - * - encoded : the encoded data vector - * - * - * Return value: - * : >0 - Length (in bytes) of coded data - * : -1 - Error - * - * - */ -int16_t WebRtcIsac_GetRedPayload(ISACStruct* ISAC_main_inst, uint8_t* encoded); - -/**************************************************************************** - * WebRtcIsac_DecodeRcu(...) - * - * This function decodes a redundant (RCU) iSAC frame. Function is called in - * NetEq with a stored RCU payload i case of packet loss. Output speech length - * will be a multiple of 480 samples: 480 or 960 samples, - * depending on the framesize (30 or 60 ms). - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC RCU frame(s) - * - len : bytes in encoded vector - * - * Output: - * - decoded : The decoded vector - * - * Return value : >0 - number of samples in decoded vector - * -1 - Error - */ -int WebRtcIsac_DecodeRcu(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType); - -/* If `inst` is a decoder but not an encoder: tell it what sample rate the - encoder is using, for bandwidth estimation purposes. */ -void WebRtcIsac_SetEncSampRateInDecoder(ISACStruct* inst, int sample_rate_hz); - -#if defined(__cplusplus) -} -#endif - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_ISAC_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.c deleted file mode 100644 index 9d5c6930b1bb..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.c +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/source/arith_routines.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - - -/* - * terminate and return byte stream; - * returns the number of bytes in the stream - */ -int WebRtcIsac_EncTerminate(Bitstr *streamdata) /* in-/output struct containing bitstream */ -{ - uint8_t *stream_ptr; - - - /* point to the right place in the stream buffer */ - stream_ptr = streamdata->stream + streamdata->stream_index; - - /* find minimum length (determined by current interval width) */ - if ( streamdata->W_upper > 0x01FFFFFF ) - { - streamdata->streamval += 0x01000000; - /* add carry to buffer */ - if (streamdata->streamval < 0x01000000) - { - /* propagate carry */ - while ( !(++(*--stream_ptr)) ); - /* put pointer back to the old value */ - stream_ptr = streamdata->stream + streamdata->stream_index; - } - /* write remaining data to bitstream */ - *stream_ptr++ = (uint8_t) (streamdata->streamval >> 24); - } - else - { - streamdata->streamval += 0x00010000; - /* add carry to buffer */ - if (streamdata->streamval < 0x00010000) - { - /* propagate carry */ - while ( !(++(*--stream_ptr)) ); - /* put pointer back to the old value */ - stream_ptr = streamdata->stream + streamdata->stream_index; - } - /* write remaining data to bitstream */ - *stream_ptr++ = (uint8_t) (streamdata->streamval >> 24); - *stream_ptr++ = (uint8_t) ((streamdata->streamval >> 16) & 0x00FF); - } - - /* calculate stream length */ - return (int)(stream_ptr - streamdata->stream); -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h deleted file mode 100644 index 3f9f6de7bb15..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * arith_routines.h - * - * Functions for arithmetic coding. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ARITH_ROUTINES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ARITH_ROUTINES_H_ - -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -int WebRtcIsac_EncLogisticMulti2( - Bitstr* streamdata, /* in-/output struct containing bitstream */ - int16_t* dataQ7, /* input: data vector */ - const uint16_t* - env, /* input: side info vector defining the width of the pdf */ - int N, /* input: data vector length */ - int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */ - -/* returns the number of bytes in the stream */ -int WebRtcIsac_EncTerminate( - Bitstr* streamdata); /* in-/output struct containing bitstream */ - -/* returns the number of bytes in the stream so far */ -int WebRtcIsac_DecLogisticMulti2( - int16_t* data, /* output: data vector */ - Bitstr* streamdata, /* in-/output struct containing bitstream */ - const uint16_t* - env, /* input: side info vector defining the width of the pdf */ - const int16_t* dither, /* input: dither vector */ - int N, /* input: data vector length */ - int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */ - -void WebRtcIsac_EncHistMulti( - Bitstr* streamdata, /* in-/output struct containing bitstream */ - const int* data, /* input: data vector */ - const uint16_t* const* cdf, /* input: array of cdf arrays */ - int N); /* input: data vector length */ - -int WebRtcIsac_DecHistBisectMulti( - int* data, /* output: data vector */ - Bitstr* streamdata, /* in-/output struct containing bitstream */ - const uint16_t* const* cdf, /* input: array of cdf arrays */ - const uint16_t* - cdf_size, /* input: array of cdf table sizes+1 (power of two: 2^k) */ - int N); /* input: data vector length */ - -int WebRtcIsac_DecHistOneStepMulti( - int* data, /* output: data vector */ - Bitstr* streamdata, /* in-/output struct containing bitstream */ - const uint16_t* const* cdf, /* input: array of cdf arrays */ - const uint16_t* - init_index, /* input: vector of initial cdf table search entries */ - int N); /* input: data vector length */ - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ARITH_ROUTINES_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c deleted file mode 100644 index e948979fd74f..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/arith_routines.h" - - -/* - * code symbols into arithmetic bytestream - */ -void WebRtcIsac_EncHistMulti(Bitstr *streamdata, /* in-/output struct containing bitstream */ - const int *data, /* input: data vector */ - const uint16_t *const *cdf, /* input: array of cdf arrays */ - const int N) /* input: data vector length */ -{ - uint32_t W_lower, W_upper; - uint32_t W_upper_LSB, W_upper_MSB; - uint8_t *stream_ptr; - uint8_t *stream_ptr_carry; - uint32_t cdf_lo, cdf_hi; - int k; - - - /* point to beginning of stream buffer */ - stream_ptr = streamdata->stream + streamdata->stream_index; - W_upper = streamdata->W_upper; - - for (k=N; k>0; k--) - { - /* fetch cdf_lower and cdf_upper from cdf tables */ - cdf_lo = (uint32_t) *(*cdf + *data); - cdf_hi = (uint32_t) *(*cdf++ + *data++ + 1); - - /* update interval */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - W_lower = W_upper_MSB * cdf_lo; - W_lower += (W_upper_LSB * cdf_lo) >> 16; - W_upper = W_upper_MSB * cdf_hi; - W_upper += (W_upper_LSB * cdf_hi) >> 16; - - /* shift interval such that it begins at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamdata->streamval += W_lower; - - /* handle carry */ - if (streamdata->streamval < W_lower) - { - /* propagate carry */ - stream_ptr_carry = stream_ptr; - while (!(++(*--stream_ptr_carry))); - } - - /* renormalize interval, store most significant byte of streamval and update streamval */ - while ( !(W_upper & 0xFF000000) ) /* W_upper < 2^24 */ - { - W_upper <<= 8; - *stream_ptr++ = (uint8_t) (streamdata->streamval >> 24); - streamdata->streamval <<= 8; - } - } - - /* calculate new stream_index */ - streamdata->stream_index = (int)(stream_ptr - streamdata->stream); - streamdata->W_upper = W_upper; - - return; -} - - - -/* - * function to decode more symbols from the arithmetic bytestream, using method of bisection - * cdf tables should be of size 2^k-1 (which corresponds to an alphabet size of 2^k-2) - */ -int WebRtcIsac_DecHistBisectMulti(int *data, /* output: data vector */ - Bitstr *streamdata, /* in-/output struct containing bitstream */ - const uint16_t *const *cdf, /* input: array of cdf arrays */ - const uint16_t *cdf_size, /* input: array of cdf table sizes+1 (power of two: 2^k) */ - const int N) /* input: data vector length */ -{ - uint32_t W_lower, W_upper; - uint32_t W_tmp; - uint32_t W_upper_LSB, W_upper_MSB; - uint32_t streamval; - const uint8_t *stream_ptr; - const uint16_t *cdf_ptr; - int size_tmp; - int k; - - W_lower = 0; //to remove warning -DH - stream_ptr = streamdata->stream + streamdata->stream_index; - W_upper = streamdata->W_upper; - if (W_upper == 0) - /* Should not be possible in normal operation */ - return -2; - - if (streamdata->stream_index == 0) /* first time decoder is called for this stream */ - { - /* read first word from bytestream */ - streamval = *stream_ptr << 24; - streamval |= *++stream_ptr << 16; - streamval |= *++stream_ptr << 8; - streamval |= *++stream_ptr; - } else { - streamval = streamdata->streamval; - } - - for (k=N; k>0; k--) - { - /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - - /* start halfway the cdf range */ - size_tmp = *cdf_size++ >> 1; - cdf_ptr = *cdf + (size_tmp - 1); - - /* method of bisection */ - for ( ;; ) - { - W_tmp = W_upper_MSB * *cdf_ptr; - W_tmp += (W_upper_LSB * *cdf_ptr) >> 16; - size_tmp >>= 1; - if (size_tmp == 0) break; - if (streamval > W_tmp) - { - W_lower = W_tmp; - cdf_ptr += size_tmp; - } else { - W_upper = W_tmp; - cdf_ptr -= size_tmp; - } - } - if (streamval > W_tmp) - { - W_lower = W_tmp; - *data++ = (int)(cdf_ptr - *cdf++); - } else { - W_upper = W_tmp; - *data++ = (int)(cdf_ptr - *cdf++ - 1); - } - - /* shift interval to start at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamval -= W_lower; - - /* renormalize interval and update streamval */ - while ( !(W_upper & 0xFF000000) ) /* W_upper < 2^24 */ - { - /* read next byte from stream */ - streamval = (streamval << 8) | *++stream_ptr; - W_upper <<= 8; - } - - if (W_upper == 0) - /* Should not be possible in normal operation */ - return -2; - - - } - - streamdata->stream_index = (int)(stream_ptr - streamdata->stream); - streamdata->W_upper = W_upper; - streamdata->streamval = streamval; - - - /* find number of bytes in original stream (determined by current interval width) */ - if ( W_upper > 0x01FFFFFF ) - return streamdata->stream_index - 2; - else - return streamdata->stream_index - 1; -} - - - -/* - * function to decode more symbols from the arithmetic bytestream, taking single step up or - * down at a time - * cdf tables can be of arbitrary size, but large tables may take a lot of iterations - */ -int WebRtcIsac_DecHistOneStepMulti(int *data, /* output: data vector */ - Bitstr *streamdata, /* in-/output struct containing bitstream */ - const uint16_t *const *cdf, /* input: array of cdf arrays */ - const uint16_t *init_index, /* input: vector of initial cdf table search entries */ - const int N) /* input: data vector length */ -{ - uint32_t W_lower, W_upper; - uint32_t W_tmp; - uint32_t W_upper_LSB, W_upper_MSB; - uint32_t streamval; - const uint8_t *stream_ptr; - const uint16_t *cdf_ptr; - int k; - - - stream_ptr = streamdata->stream + streamdata->stream_index; - W_upper = streamdata->W_upper; - if (W_upper == 0) - /* Should not be possible in normal operation */ - return -2; - - if (streamdata->stream_index == 0) /* first time decoder is called for this stream */ - { - /* read first word from bytestream */ - streamval = (uint32_t)(*stream_ptr) << 24; - streamval |= (uint32_t)(*++stream_ptr) << 16; - streamval |= (uint32_t)(*++stream_ptr) << 8; - streamval |= (uint32_t)(*++stream_ptr); - } else { - streamval = streamdata->streamval; - } - - - for (k=N; k>0; k--) - { - /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - - /* start at the specified table entry */ - cdf_ptr = *cdf + (*init_index++); - W_tmp = W_upper_MSB * *cdf_ptr; - W_tmp += (W_upper_LSB * *cdf_ptr) >> 16; - if (streamval > W_tmp) - { - for ( ;; ) - { - W_lower = W_tmp; - if (cdf_ptr[0]==65535) - /* range check */ - return -3; - W_tmp = W_upper_MSB * *++cdf_ptr; - W_tmp += (W_upper_LSB * *cdf_ptr) >> 16; - if (streamval <= W_tmp) break; - } - W_upper = W_tmp; - *data++ = (int)(cdf_ptr - *cdf++ - 1); - } else { - for ( ;; ) - { - W_upper = W_tmp; - --cdf_ptr; - if (cdf_ptr<*cdf) { - /* range check */ - return -3; - } - W_tmp = W_upper_MSB * *cdf_ptr; - W_tmp += (W_upper_LSB * *cdf_ptr) >> 16; - if (streamval > W_tmp) break; - } - W_lower = W_tmp; - *data++ = (int)(cdf_ptr - *cdf++); - } - - /* shift interval to start at zero */ - W_upper -= ++W_lower; - /* add integer to bitstream */ - streamval -= W_lower; - - /* renormalize interval and update streamval */ - while ( !(W_upper & 0xFF000000) ) /* W_upper < 2^24 */ - { - /* read next byte from stream */ - streamval = (streamval << 8) | *++stream_ptr; - W_upper <<= 8; - } - } - - streamdata->stream_index = (int)(stream_ptr - streamdata->stream); - streamdata->W_upper = W_upper; - streamdata->streamval = streamval; - - - /* find number of bytes in original stream (determined by current interval width) */ - if ( W_upper > 0x01FFFFFF ) - return streamdata->stream_index - 2; - else - return streamdata->stream_index - 1; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c deleted file mode 100644 index 777780f54fa7..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * arith_routines.h - * - * This file contains functions for arithmatically encoding and - * decoding DFT coefficients. - * - */ - - -#include "modules/audio_coding/codecs/isac/main/source/arith_routines.h" - - - -static const int32_t kHistEdgesQ15[51] = { - -327680, -314573, -301466, -288359, -275252, -262144, -249037, -235930, -222823, -209716, - -196608, -183501, -170394, -157287, -144180, -131072, -117965, -104858, -91751, -78644, - -65536, -52429, -39322, -26215, -13108, 0, 13107, 26214, 39321, 52428, - 65536, 78643, 91750, 104857, 117964, 131072, 144179, 157286, 170393, 183500, - 196608, 209715, 222822, 235929, 249036, 262144, 275251, 288358, 301465, 314572, - 327680}; - - -static const int kCdfSlopeQ0[51] = { /* Q0 */ - 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, - 5, 5, 13, 23, 47, 87, 154, 315, 700, 1088, - 2471, 6064, 14221, 21463, 36634, 36924, 19750, 13270, 5806, 2312, - 1095, 660, 316, 145, 86, 41, 32, 5, 5, 5, - 5, 5, 5, 5, 5, 5, 5, 5, 5, 2, 0}; - - -static const int kCdfQ16[51] = { /* Q16 */ - 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, - 20, 22, 24, 29, 38, 57, 92, 153, 279, 559, - 994, 1983, 4408, 10097, 18682, 33336, 48105, 56005, 61313, 63636, - 64560, 64998, 65262, 65389, 65447, 65481, 65497, 65510, 65512, 65514, - 65516, 65518, 65520, 65522, 65524, 65526, 65528, 65530, 65532, 65534, - 65535}; - - - -/* function to be converted to fixed point */ -static __inline uint32_t piecewise(int32_t xinQ15) { - - int32_t ind, qtmp1, qtmp2, qtmp3; - uint32_t tmpUW32; - - - qtmp2 = xinQ15; - - if (qtmp2 < kHistEdgesQ15[0]) { - qtmp2 = kHistEdgesQ15[0]; - } - if (qtmp2 > kHistEdgesQ15[50]) { - qtmp2 = kHistEdgesQ15[50]; - } - - qtmp1 = qtmp2 - kHistEdgesQ15[0]; /* Q15 - Q15 = Q15 */ - ind = (qtmp1 * 5) >> 16; /* 2^16 / 5 = 0.4 in Q15 */ - /* Q15 -> Q0 */ - qtmp1 = qtmp2 - kHistEdgesQ15[ind]; /* Q15 - Q15 = Q15 */ - qtmp2 = kCdfSlopeQ0[ind] * qtmp1; /* Q0 * Q15 = Q15 */ - qtmp3 = qtmp2>>15; /* Q15 -> Q0 */ - - tmpUW32 = kCdfQ16[ind] + qtmp3; /* Q0 + Q0 = Q0 */ - return tmpUW32; -} - - - -int WebRtcIsac_EncLogisticMulti2( - Bitstr *streamdata, /* in-/output struct containing bitstream */ - int16_t *dataQ7, /* input: data vector */ - const uint16_t *envQ8, /* input: side info vector defining the width of the pdf */ - const int N, /* input: data vector length / 2 */ - const int16_t isSWB12kHz) -{ - uint32_t W_lower, W_upper; - uint32_t W_upper_LSB, W_upper_MSB; - uint8_t *stream_ptr; - uint8_t *maxStreamPtr; - uint8_t *stream_ptr_carry; - uint32_t cdf_lo, cdf_hi; - int k; - - /* point to beginning of stream buffer */ - stream_ptr = streamdata->stream + streamdata->stream_index; - W_upper = streamdata->W_upper; - - maxStreamPtr = streamdata->stream + STREAM_SIZE_MAX_60 - 1; - for (k = 0; k < N; k++) - { - /* compute cdf_lower and cdf_upper by evaluating the piecewise linear cdf */ - cdf_lo = piecewise((*dataQ7 - 64) * *envQ8); - cdf_hi = piecewise((*dataQ7 + 64) * *envQ8); - - /* test and clip if probability gets too small */ - while (cdf_lo+1 >= cdf_hi) { - /* clip */ - if (*dataQ7 > 0) { - *dataQ7 -= 128; - cdf_hi = cdf_lo; - cdf_lo = piecewise((*dataQ7 - 64) * *envQ8); - } else { - *dataQ7 += 128; - cdf_lo = cdf_hi; - cdf_hi = piecewise((*dataQ7 + 64) * *envQ8); - } - } - - dataQ7++; - // increment only once per 4 iterations for SWB-16kHz or WB - // increment only once per 2 iterations for SWB-12kHz - envQ8 += (isSWB12kHz)? (k & 1):((k & 1) & (k >> 1)); - - - /* update interval */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - W_lower = W_upper_MSB * cdf_lo; - W_lower += (W_upper_LSB * cdf_lo) >> 16; - W_upper = W_upper_MSB * cdf_hi; - W_upper += (W_upper_LSB * cdf_hi) >> 16; - - /* shift interval such that it begins at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamdata->streamval += W_lower; - - /* handle carry */ - if (streamdata->streamval < W_lower) - { - /* propagate carry */ - stream_ptr_carry = stream_ptr; - while (!(++(*--stream_ptr_carry))); - } - - /* renormalize interval, store most significant byte of streamval and update streamval */ - while ( !(W_upper & 0xFF000000) ) /* W_upper < 2^24 */ - { - W_upper <<= 8; - *stream_ptr++ = (uint8_t) (streamdata->streamval >> 24); - - if(stream_ptr > maxStreamPtr) - { - return -ISAC_DISALLOWED_BITSTREAM_LENGTH; - } - streamdata->streamval <<= 8; - } - } - - /* calculate new stream_index */ - streamdata->stream_index = (int)(stream_ptr - streamdata->stream); - streamdata->W_upper = W_upper; - - return 0; -} - - - -int WebRtcIsac_DecLogisticMulti2( - int16_t *dataQ7, /* output: data vector */ - Bitstr *streamdata, /* in-/output struct containing bitstream */ - const uint16_t *envQ8, /* input: side info vector defining the width of the pdf */ - const int16_t *ditherQ7,/* input: dither vector */ - const int N, /* input: data vector length */ - const int16_t isSWB12kHz) -{ - uint32_t W_lower, W_upper; - uint32_t W_tmp; - uint32_t W_upper_LSB, W_upper_MSB; - uint32_t streamval; - const uint8_t *stream_ptr; - uint32_t cdf_tmp; - int16_t candQ7; - int k; - - // Position just past the end of the stream. STREAM_SIZE_MAX_60 instead of - // STREAM_SIZE_MAX (which is the size of the allocated buffer) because that's - // the limit to how much data is filled in. - const uint8_t* const stream_end = streamdata->stream + STREAM_SIZE_MAX_60; - - stream_ptr = streamdata->stream + streamdata->stream_index; - W_upper = streamdata->W_upper; - if (streamdata->stream_index == 0) /* first time decoder is called for this stream */ - { - /* read first word from bytestream */ - if (stream_ptr + 3 >= stream_end) - return -1; // Would read out of bounds. Malformed input? - streamval = *stream_ptr << 24; - streamval |= *++stream_ptr << 16; - streamval |= *++stream_ptr << 8; - streamval |= *++stream_ptr; - } else { - streamval = streamdata->streamval; - } - - - for (k = 0; k < N; k++) - { - /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - - /* find first candidate by inverting the logistic cdf */ - candQ7 = - *ditherQ7 + 64; - cdf_tmp = piecewise(candQ7 * *envQ8); - - W_tmp = W_upper_MSB * cdf_tmp; - W_tmp += (W_upper_LSB * cdf_tmp) >> 16; - if (streamval > W_tmp) - { - W_lower = W_tmp; - candQ7 += 128; - cdf_tmp = piecewise(candQ7 * *envQ8); - - W_tmp = W_upper_MSB * cdf_tmp; - W_tmp += (W_upper_LSB * cdf_tmp) >> 16; - while (streamval > W_tmp) - { - W_lower = W_tmp; - candQ7 += 128; - cdf_tmp = piecewise(candQ7 * *envQ8); - - W_tmp = W_upper_MSB * cdf_tmp; - W_tmp += (W_upper_LSB * cdf_tmp) >> 16; - - /* error check */ - if (W_lower == W_tmp) return -1; - } - W_upper = W_tmp; - - /* another sample decoded */ - *dataQ7 = candQ7 - 64; - } - else - { - W_upper = W_tmp; - candQ7 -= 128; - cdf_tmp = piecewise(candQ7 * *envQ8); - - W_tmp = W_upper_MSB * cdf_tmp; - W_tmp += (W_upper_LSB * cdf_tmp) >> 16; - while ( !(streamval > W_tmp) ) - { - W_upper = W_tmp; - candQ7 -= 128; - cdf_tmp = piecewise(candQ7 * *envQ8); - - W_tmp = W_upper_MSB * cdf_tmp; - W_tmp += (W_upper_LSB * cdf_tmp) >> 16; - - /* error check */ - if (W_upper == W_tmp) return -1; - } - W_lower = W_tmp; - - /* another sample decoded */ - *dataQ7 = candQ7 + 64; - } - ditherQ7++; - dataQ7++; - // increment only once per 4 iterations for SWB-16kHz or WB - // increment only once per 2 iterations for SWB-12kHz - envQ8 += (isSWB12kHz)? (k & 1):((k & 1) & (k >> 1)); - - /* shift interval to start at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamval -= W_lower; - - /* renormalize interval and update streamval */ - while ( !(W_upper & 0xFF000000) ) /* W_upper < 2^24 */ - { - /* read next byte from stream */ - if (stream_ptr + 1 >= stream_end) - return -1; // Would read out of bounds. Malformed input? - streamval = (streamval << 8) | *++stream_ptr; - W_upper <<= 8; - } - } - - streamdata->stream_index = (int)(stream_ptr - streamdata->stream); - streamdata->W_upper = W_upper; - streamdata->streamval = streamval; - - /* find number of bytes in original stream (determined by current interval width) */ - if ( W_upper > 0x01FFFFFF ) - return streamdata->stream_index - 2; - else - return streamdata->stream_index - 1; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/audio_decoder_isac.cc b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/audio_decoder_isac.cc deleted file mode 100644 index b671002e1eef..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/audio_decoder_isac.cc +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h" - -#include "modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h" - -namespace webrtc { - -// Explicit instantiation: -template class AudioDecoderIsacT; - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/audio_encoder_isac.cc b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/audio_encoder_isac.cc deleted file mode 100644 index b7f2c0b1afec..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/audio_encoder_isac.cc +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h" - -#include "modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h" - -namespace webrtc { - -// Explicit instantiation: -template class AudioEncoderIsacT; - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/audio_encoder_isac_unittest.cc b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/audio_encoder_isac_unittest.cc deleted file mode 100644 index 07bab055e149..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/audio_encoder_isac_unittest.cc +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h" - -#include - -#include "test/gtest.h" - -namespace webrtc { - -namespace { - -void TestBadConfig(const AudioEncoderIsacFloatImpl::Config& config) { - EXPECT_FALSE(config.IsOk()); -} - -void TestGoodConfig(const AudioEncoderIsacFloatImpl::Config& config) { - EXPECT_TRUE(config.IsOk()); - AudioEncoderIsacFloatImpl aei(config); -} - -// Wrap subroutine calls that test things in this, so that the error messages -// will be accompanied by stack traces that make it possible to tell which -// subroutine invocation caused the failure. -#define S(x) \ - do { \ - SCOPED_TRACE(#x); \ - x; \ - } while (0) - -} // namespace - -TEST(AudioEncoderIsacTest, TestConfigBitrate) { - AudioEncoderIsacFloatImpl::Config config; - - // The default value is some real, positive value. - EXPECT_GT(config.bit_rate, 1); - S(TestGoodConfig(config)); - - // 0 is another way to ask for the default value. - config.bit_rate = 0; - S(TestGoodConfig(config)); - - // Try some unreasonable values and watch them fail. - config.bit_rate = -1; - S(TestBadConfig(config)); - config.bit_rate = 1; - S(TestBadConfig(config)); - config.bit_rate = std::numeric_limits::max(); - S(TestBadConfig(config)); -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c deleted file mode 100644 index 486cd959141b..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c +++ /dev/null @@ -1,1013 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * BwEstimator.c - * - * This file contains the code for the Bandwidth Estimator designed - * for iSAC. - * - */ - -#include -#include - -#include "modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/include/isac.h" -#include "rtc_base/checks.h" - -/* array of quantization levels for bottle neck info; Matlab code: */ -/* sprintf('%4.1ff, ', logspace(log10(5000), log10(40000), 12)) */ -static const float kQRateTableWb[12] = -{ - 10000.0f, 11115.3f, 12355.1f, 13733.1f, 15264.8f, 16967.3f, - 18859.8f, 20963.3f, 23301.4f, 25900.3f, 28789.0f, 32000.0f}; - - -static const float kQRateTableSwb[24] = -{ - 10000.0f, 11115.3f, 12355.1f, 13733.1f, 15264.8f, 16967.3f, - 18859.8f, 20963.3f, 23153.1f, 25342.9f, 27532.7f, 29722.5f, - 31912.3f, 34102.1f, 36291.9f, 38481.7f, 40671.4f, 42861.2f, - 45051.0f, 47240.8f, 49430.6f, 51620.4f, 53810.2f, 56000.0f, -}; - - - - -int32_t WebRtcIsac_InitBandwidthEstimator( - BwEstimatorstr* bwest_str, - enum IsacSamplingRate encoderSampRate, - enum IsacSamplingRate decoderSampRate) -{ - switch(encoderSampRate) - { - case kIsacWideband: - { - bwest_str->send_bw_avg = INIT_BN_EST_WB; - break; - } - case kIsacSuperWideband: - { - bwest_str->send_bw_avg = INIT_BN_EST_SWB; - break; - } - } - - switch(decoderSampRate) - { - case kIsacWideband: - { - bwest_str->prev_frame_length = INIT_FRAME_LEN_WB; - bwest_str->rec_bw_inv = 1.0f / - (INIT_BN_EST_WB + INIT_HDR_RATE_WB); - bwest_str->rec_bw = (int32_t)INIT_BN_EST_WB; - bwest_str->rec_bw_avg_Q = INIT_BN_EST_WB; - bwest_str->rec_bw_avg = INIT_BN_EST_WB + INIT_HDR_RATE_WB; - bwest_str->rec_header_rate = INIT_HDR_RATE_WB; - break; - } - case kIsacSuperWideband: - { - bwest_str->prev_frame_length = INIT_FRAME_LEN_SWB; - bwest_str->rec_bw_inv = 1.0f / - (INIT_BN_EST_SWB + INIT_HDR_RATE_SWB); - bwest_str->rec_bw = (int32_t)INIT_BN_EST_SWB; - bwest_str->rec_bw_avg_Q = INIT_BN_EST_SWB; - bwest_str->rec_bw_avg = INIT_BN_EST_SWB + INIT_HDR_RATE_SWB; - bwest_str->rec_header_rate = INIT_HDR_RATE_SWB; - break; - } - } - - bwest_str->prev_rec_rtp_number = 0; - bwest_str->prev_rec_arr_ts = 0; - bwest_str->prev_rec_send_ts = 0; - bwest_str->prev_rec_rtp_rate = 1.0f; - bwest_str->last_update_ts = 0; - bwest_str->last_reduction_ts = 0; - bwest_str->count_tot_updates_rec = -9; - bwest_str->rec_jitter = 10.0f; - bwest_str->rec_jitter_short_term = 0.0f; - bwest_str->rec_jitter_short_term_abs = 5.0f; - bwest_str->rec_max_delay = 10.0f; - bwest_str->rec_max_delay_avg_Q = 10.0f; - bwest_str->num_pkts_rec = 0; - - bwest_str->send_max_delay_avg = 10.0f; - - bwest_str->hsn_detect_rec = 0; - - bwest_str->num_consec_rec_pkts_over_30k = 0; - - bwest_str->hsn_detect_snd = 0; - - bwest_str->num_consec_snt_pkts_over_30k = 0; - - bwest_str->in_wait_period = 0; - - bwest_str->change_to_WB = 0; - - bwest_str->numConsecLatePkts = 0; - bwest_str->consecLatency = 0; - bwest_str->inWaitLatePkts = 0; - bwest_str->senderTimestamp = 0; - bwest_str->receiverTimestamp = 0; - - bwest_str->external_bw_info.in_use = 0; - - return 0; -} - -/* This function updates both bottle neck rates */ -/* Parameters: */ -/* rtp_number - value from RTP packet, from NetEq */ -/* frame length - length of signal frame in ms, from iSAC decoder */ -/* send_ts - value in RTP header giving send time in samples */ -/* arr_ts - value given by timeGetTime() time of arrival in samples of packet from NetEq */ -/* pksize - size of packet in bytes, from NetEq */ -/* Index - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */ -/* returns 0 if everything went fine, -1 otherwise */ -int16_t WebRtcIsac_UpdateBandwidthEstimator( - BwEstimatorstr* bwest_str, - const uint16_t rtp_number, - const int32_t frame_length, - const uint32_t send_ts, - const uint32_t arr_ts, - const size_t pksize - /*, const uint16_t Index*/) -{ - float weight = 0.0f; - float curr_bw_inv = 0.0f; - float rec_rtp_rate; - float t_diff_proj; - float arr_ts_diff; - float send_ts_diff; - float arr_time_noise; - float arr_time_noise_abs; - - float delay_correction_factor = 1; - float late_diff = 0.0f; - int immediate_set = 0; - int num_pkts_expected; - - RTC_DCHECK(!bwest_str->external_bw_info.in_use); - - // We have to adjust the header-rate if the first packet has a - // frame-size different than the initialized value. - if ( frame_length != bwest_str->prev_frame_length ) - { - bwest_str->rec_header_rate = (float)HEADER_SIZE * 8.0f * - 1000.0f / (float)frame_length; /* bits/s */ - } - - /* UPDATE ESTIMATES ON THIS SIDE */ - /* compute far-side transmission rate */ - rec_rtp_rate = ((float)pksize * 8.0f * 1000.0f / (float)frame_length) + - bwest_str->rec_header_rate; - // rec_rtp_rate packet bits/s + header bits/s - - /* check for timer wrap-around */ - if (arr_ts < bwest_str->prev_rec_arr_ts) - { - bwest_str->prev_rec_arr_ts = arr_ts; - bwest_str->last_update_ts = arr_ts; - bwest_str->last_reduction_ts = arr_ts + 3*FS; - bwest_str->num_pkts_rec = 0; - - /* store frame length */ - bwest_str->prev_frame_length = frame_length; - - /* store far-side transmission rate */ - bwest_str->prev_rec_rtp_rate = rec_rtp_rate; - - /* store far-side RTP time stamp */ - bwest_str->prev_rec_rtp_number = rtp_number; - - return 0; - } - - bwest_str->num_pkts_rec++; - - /* check that it's not one of the first 9 packets */ - if ( bwest_str->count_tot_updates_rec > 0 ) - { - if(bwest_str->in_wait_period > 0 ) - { - bwest_str->in_wait_period--; - } - - bwest_str->inWaitLatePkts -= ((bwest_str->inWaitLatePkts > 0)? 1:0); - send_ts_diff = (float)(send_ts - bwest_str->prev_rec_send_ts); - - if (send_ts_diff <= (16 * frame_length)*2) - //doesn't allow for a dropped packet, not sure necessary to be - // that strict -DH - { - /* if not been updated for a long time, reduce the BN estimate */ - if((uint32_t)(arr_ts - bwest_str->last_update_ts) * - 1000.0f / FS > 3000) - { - //how many frames should have been received since the last - // update if too many have been dropped or there have been - // big delays won't allow this reduction may no longer need - // the send_ts_diff here - num_pkts_expected = (int)(((float)(arr_ts - - bwest_str->last_update_ts) * 1000.0f /(float) FS) / - (float)frame_length); - - if(((float)bwest_str->num_pkts_rec/(float)num_pkts_expected) > - 0.9) - { - float inv_bitrate = (float) pow( 0.99995, - (double)((uint32_t)(arr_ts - - bwest_str->last_reduction_ts)*1000.0f/FS) ); - - if ( inv_bitrate ) - { - bwest_str->rec_bw_inv /= inv_bitrate; - - //precautionary, likely never necessary - if (bwest_str->hsn_detect_snd && - bwest_str->hsn_detect_rec) - { - if (bwest_str->rec_bw_inv > 0.000066f) - { - bwest_str->rec_bw_inv = 0.000066f; - } - } - } - else - { - bwest_str->rec_bw_inv = 1.0f / - (INIT_BN_EST_WB + INIT_HDR_RATE_WB); - } - /* reset time-since-update counter */ - bwest_str->last_reduction_ts = arr_ts; - } - else - //reset here? - { - bwest_str->last_reduction_ts = arr_ts + 3*FS; - bwest_str->last_update_ts = arr_ts; - bwest_str->num_pkts_rec = 0; - } - } - } - else - { - bwest_str->last_reduction_ts = arr_ts + 3*FS; - bwest_str->last_update_ts = arr_ts; - bwest_str->num_pkts_rec = 0; - } - - - /* temporarily speed up adaptation if frame length has changed */ - if ( frame_length != bwest_str->prev_frame_length ) - { - bwest_str->count_tot_updates_rec = 10; - bwest_str->rec_header_rate = (float)HEADER_SIZE * 8.0f * - 1000.0f / (float)frame_length; /* bits/s */ - - bwest_str->rec_bw_inv = 1.0f /((float)bwest_str->rec_bw + - bwest_str->rec_header_rate); - } - - //////////////////////// - arr_ts_diff = (float)(arr_ts - bwest_str->prev_rec_arr_ts); - - if (send_ts_diff > 0 ) - { - late_diff = arr_ts_diff - send_ts_diff; - } - else - { - late_diff = arr_ts_diff - (float)(16 * frame_length); - } - - if((late_diff > 0) && !bwest_str->inWaitLatePkts) - { - bwest_str->numConsecLatePkts++; - bwest_str->consecLatency += late_diff; - } - else - { - bwest_str->numConsecLatePkts = 0; - bwest_str->consecLatency = 0; - } - if(bwest_str->numConsecLatePkts > 50) - { - float latencyMs = bwest_str->consecLatency/(FS/1000); - float averageLatencyMs = latencyMs / bwest_str->numConsecLatePkts; - delay_correction_factor = frame_length / (frame_length + averageLatencyMs); - immediate_set = 1; - bwest_str->inWaitLatePkts = (int16_t)((bwest_str->consecLatency/(FS/1000)) / 30);// + 150; - bwest_str->start_wait_period = arr_ts; - } - /////////////////////////////////////////////// - - - - /* update only if previous packet was not lost */ - if ( rtp_number == bwest_str->prev_rec_rtp_number + 1 ) - { - - - if (!(bwest_str->hsn_detect_snd && bwest_str->hsn_detect_rec)) - { - if ((arr_ts_diff > (float)(16 * frame_length))) - { - //1/2 second - if ((late_diff > 8000.0f) && !bwest_str->in_wait_period) - { - delay_correction_factor = 0.7f; - bwest_str->in_wait_period = 55; - bwest_str->start_wait_period = arr_ts; - immediate_set = 1; - } - //320 ms - else if (late_diff > 5120.0f && !bwest_str->in_wait_period) - { - delay_correction_factor = 0.8f; - immediate_set = 1; - bwest_str->in_wait_period = 44; - bwest_str->start_wait_period = arr_ts; - } - } - } - - - if ((bwest_str->prev_rec_rtp_rate > bwest_str->rec_bw_avg) && - (rec_rtp_rate > bwest_str->rec_bw_avg) && - !bwest_str->in_wait_period) - { - /* test if still in initiation period and increment counter */ - if (bwest_str->count_tot_updates_rec++ > 99) - { - /* constant weight after initiation part */ - weight = 0.01f; - } - else - { - /* weight decreases with number of updates */ - weight = 1.0f / (float) bwest_str->count_tot_updates_rec; - } - /* Bottle Neck Estimation */ - - /* limit outliers */ - /* if more than 25 ms too much */ - if (arr_ts_diff > frame_length * FS/1000 + 400.0f) - { - // in samples, why 25ms?? - arr_ts_diff = frame_length * FS/1000 + 400.0f; - } - if(arr_ts_diff < (frame_length * FS/1000) - 160.0f) - { - /* don't allow it to be less than frame rate - 10 ms */ - arr_ts_diff = (float)frame_length * FS/1000 - 160.0f; - } - - /* compute inverse receiving rate for last packet */ - curr_bw_inv = arr_ts_diff / ((float)(pksize + HEADER_SIZE) * - 8.0f * FS); // (180+35)*8*16000 = 27.5 Mbit.... - - - if(curr_bw_inv < - (1.0f / (MAX_ISAC_BW + bwest_str->rec_header_rate))) - { - // don't allow inv rate to be larger than MAX - curr_bw_inv = (1.0f / - (MAX_ISAC_BW + bwest_str->rec_header_rate)); - } - - /* update bottle neck rate estimate */ - bwest_str->rec_bw_inv = weight * curr_bw_inv + - (1.0f - weight) * bwest_str->rec_bw_inv; - - /* reset time-since-update counter */ - bwest_str->last_update_ts = arr_ts; - bwest_str->last_reduction_ts = arr_ts + 3 * FS; - bwest_str->num_pkts_rec = 0; - - /* Jitter Estimation */ - /* projected difference between arrival times */ - t_diff_proj = ((float)(pksize + HEADER_SIZE) * 8.0f * - 1000.0f) / bwest_str->rec_bw_avg; - - - // difference between projected and actual - // arrival time differences - arr_time_noise = (float)(arr_ts_diff*1000.0f/FS) - - t_diff_proj; - arr_time_noise_abs = (float) fabs( arr_time_noise ); - - /* long term averaged absolute jitter */ - bwest_str->rec_jitter = weight * arr_time_noise_abs + - (1.0f - weight) * bwest_str->rec_jitter; - if (bwest_str->rec_jitter > 10.0f) - { - bwest_str->rec_jitter = 10.0f; - } - /* short term averaged absolute jitter */ - bwest_str->rec_jitter_short_term_abs = 0.05f * - arr_time_noise_abs + 0.95f * - bwest_str->rec_jitter_short_term_abs; - - /* short term averaged jitter */ - bwest_str->rec_jitter_short_term = 0.05f * arr_time_noise + - 0.95f * bwest_str->rec_jitter_short_term; - } - } - } - else - { - // reset time-since-update counter when - // receiving the first 9 packets - bwest_str->last_update_ts = arr_ts; - bwest_str->last_reduction_ts = arr_ts + 3*FS; - bwest_str->num_pkts_rec = 0; - - bwest_str->count_tot_updates_rec++; - } - - /* limit minimum bottle neck rate */ - if (bwest_str->rec_bw_inv > 1.0f / ((float)MIN_ISAC_BW + - bwest_str->rec_header_rate)) - { - bwest_str->rec_bw_inv = 1.0f / ((float)MIN_ISAC_BW + - bwest_str->rec_header_rate); - } - - // limit maximum bitrate - if (bwest_str->rec_bw_inv < 1.0f / ((float)MAX_ISAC_BW + - bwest_str->rec_header_rate)) - { - bwest_str->rec_bw_inv = 1.0f / ((float)MAX_ISAC_BW + - bwest_str->rec_header_rate); - } - - /* store frame length */ - bwest_str->prev_frame_length = frame_length; - - /* store far-side transmission rate */ - bwest_str->prev_rec_rtp_rate = rec_rtp_rate; - - /* store far-side RTP time stamp */ - bwest_str->prev_rec_rtp_number = rtp_number; - - // Replace bwest_str->rec_max_delay by the new - // value (atomic operation) - bwest_str->rec_max_delay = 3.0f * bwest_str->rec_jitter; - - /* store send and arrival time stamp */ - bwest_str->prev_rec_arr_ts = arr_ts ; - bwest_str->prev_rec_send_ts = send_ts; - - /* Replace bwest_str->rec_bw by the new value (atomic operation) */ - bwest_str->rec_bw = (int32_t)(1.0f / bwest_str->rec_bw_inv - - bwest_str->rec_header_rate); - - if (immediate_set) - { - bwest_str->rec_bw = (int32_t) (delay_correction_factor * - (float) bwest_str->rec_bw); - - if (bwest_str->rec_bw < (int32_t) MIN_ISAC_BW) - { - bwest_str->rec_bw = (int32_t) MIN_ISAC_BW; - } - - bwest_str->rec_bw_avg = bwest_str->rec_bw + - bwest_str->rec_header_rate; - - bwest_str->rec_bw_avg_Q = (float) bwest_str->rec_bw; - - bwest_str->rec_jitter_short_term = 0.0f; - - bwest_str->rec_bw_inv = 1.0f / (bwest_str->rec_bw + - bwest_str->rec_header_rate); - - bwest_str->count_tot_updates_rec = 1; - - immediate_set = 0; - bwest_str->consecLatency = 0; - bwest_str->numConsecLatePkts = 0; - } - - return 0; -} - - -/* This function updates the send bottle neck rate */ -/* Index - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */ -/* returns 0 if everything went fine, -1 otherwise */ -int16_t WebRtcIsac_UpdateUplinkBwImpl( - BwEstimatorstr* bwest_str, - int16_t index, - enum IsacSamplingRate encoderSamplingFreq) -{ - RTC_DCHECK(!bwest_str->external_bw_info.in_use); - - if((index < 0) || (index > 23)) - { - return -ISAC_RANGE_ERROR_BW_ESTIMATOR; - } - - /* UPDATE ESTIMATES FROM OTHER SIDE */ - if(encoderSamplingFreq == kIsacWideband) - { - if(index > 11) - { - index -= 12; - /* compute the jitter estimate as decoded on the other side */ - bwest_str->send_max_delay_avg = 0.9f * bwest_str->send_max_delay_avg + - 0.1f * (float)MAX_ISAC_MD; - } - else - { - /* compute the jitter estimate as decoded on the other side */ - bwest_str->send_max_delay_avg = 0.9f * bwest_str->send_max_delay_avg + - 0.1f * (float)MIN_ISAC_MD; - } - - /* compute the BN estimate as decoded on the other side */ - bwest_str->send_bw_avg = 0.9f * bwest_str->send_bw_avg + - 0.1f * kQRateTableWb[index]; - } - else - { - /* compute the BN estimate as decoded on the other side */ - bwest_str->send_bw_avg = 0.9f * bwest_str->send_bw_avg + - 0.1f * kQRateTableSwb[index]; - } - - if (bwest_str->send_bw_avg > (float) 28000 && !bwest_str->hsn_detect_snd) - { - bwest_str->num_consec_snt_pkts_over_30k++; - - if (bwest_str->num_consec_snt_pkts_over_30k >= 66) - { - //approx 2 seconds with 30ms frames - bwest_str->hsn_detect_snd = 1; - } - } - else if (!bwest_str->hsn_detect_snd) - { - bwest_str->num_consec_snt_pkts_over_30k = 0; - } - return 0; -} - -// called when there is upper-band bit-stream to update jitter -// statistics. -int16_t WebRtcIsac_UpdateUplinkJitter( - BwEstimatorstr* bwest_str, - int32_t index) -{ - RTC_DCHECK(!bwest_str->external_bw_info.in_use); - - if((index < 0) || (index > 23)) - { - return -ISAC_RANGE_ERROR_BW_ESTIMATOR; - } - - if(index > 0) - { - /* compute the jitter estimate as decoded on the other side */ - bwest_str->send_max_delay_avg = 0.9f * bwest_str->send_max_delay_avg + - 0.1f * (float)MAX_ISAC_MD; - } - else - { - /* compute the jitter estimate as decoded on the other side */ - bwest_str->send_max_delay_avg = 0.9f * bwest_str->send_max_delay_avg + - 0.1f * (float)MIN_ISAC_MD; - } - - return 0; -} - - - -// Returns the bandwidth/jitter estimation code (integer 0...23) -// to put in the sending iSAC payload -void -WebRtcIsac_GetDownlinkBwJitIndexImpl( - BwEstimatorstr* bwest_str, - int16_t* bottleneckIndex, - int16_t* jitterInfo, - enum IsacSamplingRate decoderSamplingFreq) -{ - float MaxDelay; - //uint16_t MaxDelayBit; - - float rate; - float r; - float e1, e2; - const float weight = 0.1f; - const float* ptrQuantizationTable; - int16_t addJitterInfo; - int16_t minInd; - int16_t maxInd; - int16_t midInd; - - if (bwest_str->external_bw_info.in_use) { - *bottleneckIndex = bwest_str->external_bw_info.bottleneck_idx; - *jitterInfo = bwest_str->external_bw_info.jitter_info; - return; - } - - /* Get Max Delay Bit */ - /* get unquantized max delay */ - MaxDelay = (float)WebRtcIsac_GetDownlinkMaxDelay(bwest_str); - - if ( ((1.f - weight) * bwest_str->rec_max_delay_avg_Q + weight * - MAX_ISAC_MD - MaxDelay) > (MaxDelay - (1.f-weight) * - bwest_str->rec_max_delay_avg_Q - weight * MIN_ISAC_MD) ) - { - jitterInfo[0] = 0; - /* update quantized average */ - bwest_str->rec_max_delay_avg_Q = - (1.f - weight) * bwest_str->rec_max_delay_avg_Q + weight * - (float)MIN_ISAC_MD; - } - else - { - jitterInfo[0] = 1; - /* update quantized average */ - bwest_str->rec_max_delay_avg_Q = - (1.f-weight) * bwest_str->rec_max_delay_avg_Q + weight * - (float)MAX_ISAC_MD; - } - - // Get unquantized rate. - rate = (float)WebRtcIsac_GetDownlinkBandwidth(bwest_str); - - /* Get Rate Index */ - if(decoderSamplingFreq == kIsacWideband) - { - ptrQuantizationTable = kQRateTableWb; - addJitterInfo = 1; - maxInd = 11; - } - else - { - ptrQuantizationTable = kQRateTableSwb; - addJitterInfo = 0; - maxInd = 23; - } - - minInd = 0; - while(maxInd > minInd + 1) - { - midInd = (maxInd + minInd) >> 1; - if(rate > ptrQuantizationTable[midInd]) - { - minInd = midInd; - } - else - { - maxInd = midInd; - } - } - // Chose the index which gives results an average which is closest - // to rate - r = (1 - weight) * bwest_str->rec_bw_avg_Q - rate; - e1 = weight * ptrQuantizationTable[minInd] + r; - e2 = weight * ptrQuantizationTable[maxInd] + r; - e1 = (e1 > 0)? e1:-e1; - e2 = (e2 > 0)? e2:-e2; - if(e1 < e2) - { - bottleneckIndex[0] = minInd; - } - else - { - bottleneckIndex[0] = maxInd; - } - - bwest_str->rec_bw_avg_Q = (1 - weight) * bwest_str->rec_bw_avg_Q + - weight * ptrQuantizationTable[bottleneckIndex[0]]; - bottleneckIndex[0] += jitterInfo[0] * 12 * addJitterInfo; - - bwest_str->rec_bw_avg = (1 - weight) * bwest_str->rec_bw_avg + weight * - (rate + bwest_str->rec_header_rate); -} - - - -/* get the bottle neck rate from far side to here, as estimated on this side */ -int32_t WebRtcIsac_GetDownlinkBandwidth( const BwEstimatorstr *bwest_str) -{ - int32_t rec_bw; - float jitter_sign; - float bw_adjust; - - RTC_DCHECK(!bwest_str->external_bw_info.in_use); - - /* create a value between -1.0 and 1.0 indicating "average sign" of jitter */ - jitter_sign = bwest_str->rec_jitter_short_term / - bwest_str->rec_jitter_short_term_abs; - - /* adjust bw proportionally to negative average jitter sign */ - bw_adjust = 1.0f - jitter_sign * (0.15f + 0.15f * jitter_sign * jitter_sign); - - /* adjust Rate if jitter sign is mostly constant */ - rec_bw = (int32_t)(bwest_str->rec_bw * bw_adjust); - - /* limit range of bottle neck rate */ - if (rec_bw < MIN_ISAC_BW) - { - rec_bw = MIN_ISAC_BW; - } - else if (rec_bw > MAX_ISAC_BW) - { - rec_bw = MAX_ISAC_BW; - } - return rec_bw; -} - -/* Returns the max delay (in ms) */ -int32_t -WebRtcIsac_GetDownlinkMaxDelay(const BwEstimatorstr *bwest_str) -{ - int32_t rec_max_delay; - - RTC_DCHECK(!bwest_str->external_bw_info.in_use); - - rec_max_delay = (int32_t)(bwest_str->rec_max_delay); - - /* limit range of jitter estimate */ - if (rec_max_delay < MIN_ISAC_MD) - { - rec_max_delay = MIN_ISAC_MD; - } - else if (rec_max_delay > MAX_ISAC_MD) - { - rec_max_delay = MAX_ISAC_MD; - } - return rec_max_delay; -} - -/* Clamp val to the closed interval [min,max]. */ -static int32_t clamp(int32_t val, int32_t min, int32_t max) { - RTC_DCHECK_LE(min, max); - return val < min ? min : (val > max ? max : val); -} - -int32_t WebRtcIsac_GetUplinkBandwidth(const BwEstimatorstr* bwest_str) { - return bwest_str->external_bw_info.in_use - ? bwest_str->external_bw_info.send_bw_avg - : clamp(bwest_str->send_bw_avg, MIN_ISAC_BW, MAX_ISAC_BW); -} - -int32_t WebRtcIsac_GetUplinkMaxDelay(const BwEstimatorstr* bwest_str) { - return bwest_str->external_bw_info.in_use - ? bwest_str->external_bw_info.send_max_delay_avg - : clamp(bwest_str->send_max_delay_avg, MIN_ISAC_MD, MAX_ISAC_MD); -} - -/* - * update long-term average bitrate and amount of data in buffer - * returns minimum payload size (bytes) - */ -int WebRtcIsac_GetMinBytes( - RateModel* State, - int StreamSize, /* bytes in bitstream */ - const int FrameSamples, /* samples per frame */ - const double BottleNeck, /* bottle neck rate; excl headers (bps) */ - const double DelayBuildUp, /* max delay from bottleneck buffering (ms) */ - enum ISACBandwidth bandwidth - /*,int16_t frequentLargePackets*/) -{ - double MinRate = 0.0; - int MinBytes; - double TransmissionTime; - int burstInterval = BURST_INTERVAL; - - // first 10 packets @ low rate, then INIT_BURST_LEN packets @ - // fixed rate of INIT_RATE bps - if (State->InitCounter > 0) - { - if (State->InitCounter-- <= INIT_BURST_LEN) - { - if(bandwidth == isac8kHz) - { - MinRate = INIT_RATE_WB; - } - else - { - MinRate = INIT_RATE_SWB; - } - } - else - { - MinRate = 0; - } - } - else - { - /* handle burst */ - if (State->BurstCounter) - { - if (State->StillBuffered < (1.0 - 1.0/BURST_LEN) * DelayBuildUp) - { - /* max bps derived from BottleNeck and DelayBuildUp values */ - MinRate = (1.0 + (FS/1000) * DelayBuildUp / - (double)(BURST_LEN * FrameSamples)) * BottleNeck; - } - else - { - // max bps derived from StillBuffered and DelayBuildUp - // values - MinRate = (1.0 + (FS/1000) * (DelayBuildUp - - State->StillBuffered) / (double)FrameSamples) * BottleNeck; - if (MinRate < 1.04 * BottleNeck) - { - MinRate = 1.04 * BottleNeck; - } - } - State->BurstCounter--; - } - } - - - /* convert rate from bits/second to bytes/packet */ - MinBytes = (int) (MinRate * FrameSamples / (8.0 * FS)); - - /* StreamSize will be adjusted if less than MinBytes */ - if (StreamSize < MinBytes) - { - StreamSize = MinBytes; - } - - /* keep track of when bottle neck was last exceeded by at least 1% */ - if (StreamSize * 8.0 * FS / FrameSamples > 1.01 * BottleNeck) { - if (State->PrevExceed) { - /* bottle_neck exceded twice in a row, decrease ExceedAgo */ - State->ExceedAgo -= /*BURST_INTERVAL*/ burstInterval / (BURST_LEN - 1); - if (State->ExceedAgo < 0) - State->ExceedAgo = 0; - } - else - { - State->ExceedAgo += (FrameSamples * 1000) / FS; /* ms */ - State->PrevExceed = 1; - } - } - else - { - State->PrevExceed = 0; - State->ExceedAgo += (FrameSamples * 1000) / FS; /* ms */ - } - - /* set burst flag if bottle neck not exceeded for long time */ - if ((State->ExceedAgo > burstInterval) && - (State->BurstCounter == 0)) - { - if (State->PrevExceed) - { - State->BurstCounter = BURST_LEN - 1; - } - else - { - State->BurstCounter = BURST_LEN; - } - } - - - /* Update buffer delay */ - TransmissionTime = StreamSize * 8.0 * 1000.0 / BottleNeck; /* ms */ - State->StillBuffered += TransmissionTime; - State->StillBuffered -= (FrameSamples * 1000) / FS; /* ms */ - if (State->StillBuffered < 0.0) - { - State->StillBuffered = 0.0; - } - - return MinBytes; -} - - -/* - * update long-term average bitrate and amount of data in buffer - */ -void WebRtcIsac_UpdateRateModel( - RateModel *State, - int StreamSize, /* bytes in bitstream */ - const int FrameSamples, /* samples per frame */ - const double BottleNeck) /* bottle neck rate; excl headers (bps) */ -{ - double TransmissionTime; - - /* avoid the initial "high-rate" burst */ - State->InitCounter = 0; - - /* Update buffer delay */ - TransmissionTime = StreamSize * 8.0 * 1000.0 / BottleNeck; /* ms */ - State->StillBuffered += TransmissionTime; - State->StillBuffered -= (FrameSamples * 1000) / FS; /* ms */ - if (State->StillBuffered < 0.0) - State->StillBuffered = 0.0; - -} - - -void WebRtcIsac_InitRateModel( - RateModel *State) -{ - State->PrevExceed = 0; /* boolean */ - State->ExceedAgo = 0; /* ms */ - State->BurstCounter = 0; /* packets */ - State->InitCounter = INIT_BURST_LEN + 10; /* packets */ - State->StillBuffered = 1.0; /* ms */ -} - -int WebRtcIsac_GetNewFrameLength( - double bottle_neck, - int current_framesamples) -{ - int new_framesamples; - - const int Thld_20_30 = 20000; - - //const int Thld_30_20 = 30000; - const int Thld_30_20 = 1000000; // disable 20 ms frames - - const int Thld_30_60 = 18000; - //const int Thld_30_60 = 0; // disable 60 ms frames - - const int Thld_60_30 = 27000; - - - new_framesamples = current_framesamples; - - /* find new framelength */ - switch(current_framesamples) { - case 320: - if (bottle_neck < Thld_20_30) - new_framesamples = 480; - break; - case 480: - if (bottle_neck < Thld_30_60) - new_framesamples = 960; - else if (bottle_neck > Thld_30_20) - new_framesamples = 320; - break; - case 960: - if (bottle_neck >= Thld_60_30) - new_framesamples = 480; - break; - } - - return new_framesamples; -} - -double WebRtcIsac_GetSnr( - double bottle_neck, - int framesamples) -{ - double s2nr; - - const double a_20 = -30.0; - const double b_20 = 0.8; - const double c_20 = 0.0; - - const double a_30 = -23.0; - const double b_30 = 0.48; - const double c_30 = 0.0; - - const double a_60 = -23.0; - const double b_60 = 0.53; - const double c_60 = 0.0; - - - /* find new SNR value */ - switch(framesamples) { - case 320: - s2nr = a_20 + b_20 * bottle_neck * 0.001 + c_20 * bottle_neck * - bottle_neck * 0.000001; - break; - case 480: - s2nr = a_30 + b_30 * bottle_neck * 0.001 + c_30 * bottle_neck * - bottle_neck * 0.000001; - break; - case 960: - s2nr = a_60 + b_60 * bottle_neck * 0.001 + c_60 * bottle_neck * - bottle_neck * 0.000001; - break; - default: - s2nr = 0; - } - - return s2nr; - -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h deleted file mode 100644 index 5f4550a3a526..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * bandwidth_estimator.h - * - * This header file contains the API for the Bandwidth Estimator - * designed for iSAC. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_BANDWIDTH_ESTIMATOR_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_BANDWIDTH_ESTIMATOR_H_ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -#define MIN_ISAC_BW 10000 -#define MIN_ISAC_BW_LB 10000 -#define MIN_ISAC_BW_UB 25000 - -#define MAX_ISAC_BW 56000 -#define MAX_ISAC_BW_UB 32000 -#define MAX_ISAC_BW_LB 32000 - -#define MIN_ISAC_MD 5 -#define MAX_ISAC_MD 25 - -// assumed header size, in bytes; we don't know the exact number -// (header compression may be used) -#define HEADER_SIZE 35 - -// Initial Frame-Size, in ms, for Wideband & Super-Wideband Mode -#define INIT_FRAME_LEN_WB 60 -#define INIT_FRAME_LEN_SWB 30 - -// Initial Bottleneck Estimate, in bits/sec, for -// Wideband & Super-wideband mode -#define INIT_BN_EST_WB 20e3f -#define INIT_BN_EST_SWB 56e3f - -// Initial Header rate (header rate depends on frame-size), -// in bits/sec, for Wideband & Super-Wideband mode. -#define INIT_HDR_RATE_WB \ - ((float)HEADER_SIZE * 8.0f * 1000.0f / (float)INIT_FRAME_LEN_WB) -#define INIT_HDR_RATE_SWB \ - ((float)HEADER_SIZE * 8.0f * 1000.0f / (float)INIT_FRAME_LEN_SWB) - -// number of packets in a row for a high rate burst -#define BURST_LEN 3 - -// ms, max time between two full bursts -#define BURST_INTERVAL 500 - -// number of packets in a row for initial high rate burst -#define INIT_BURST_LEN 5 - -// bits/s, rate for the first BURST_LEN packets -#define INIT_RATE_WB INIT_BN_EST_WB -#define INIT_RATE_SWB INIT_BN_EST_SWB - -#if defined(__cplusplus) -extern "C" { -#endif - -/* This function initializes the struct */ -/* to be called before using the struct for anything else */ -/* returns 0 if everything went fine, -1 otherwise */ -int32_t WebRtcIsac_InitBandwidthEstimator( - BwEstimatorstr* bwest_str, - enum IsacSamplingRate encoderSampRate, - enum IsacSamplingRate decoderSampRate); - -/* This function updates the receiving estimate */ -/* Parameters: */ -/* rtp_number - value from RTP packet, from NetEq */ -/* frame length - length of signal frame in ms, from iSAC decoder */ -/* send_ts - value in RTP header giving send time in samples */ -/* arr_ts - value given by timeGetTime() time of arrival in samples of - * packet from NetEq */ -/* pksize - size of packet in bytes, from NetEq */ -/* Index - integer (range 0...23) indicating bottle neck & jitter as - * estimated by other side */ -/* returns 0 if everything went fine, -1 otherwise */ -int16_t WebRtcIsac_UpdateBandwidthEstimator(BwEstimatorstr* bwest_str, - uint16_t rtp_number, - int32_t frame_length, - uint32_t send_ts, - uint32_t arr_ts, - size_t pksize); - -/* Update receiving estimates. Used when we only receive BWE index, no iSAC data - * packet. */ -int16_t WebRtcIsac_UpdateUplinkBwImpl( - BwEstimatorstr* bwest_str, - int16_t Index, - enum IsacSamplingRate encoderSamplingFreq); - -/* Returns the bandwidth/jitter estimation code (integer 0...23) to put in the - * sending iSAC payload */ -void WebRtcIsac_GetDownlinkBwJitIndexImpl( - BwEstimatorstr* bwest_str, - int16_t* bottleneckIndex, - int16_t* jitterInfo, - enum IsacSamplingRate decoderSamplingFreq); - -/* Returns the bandwidth estimation (in bps) */ -int32_t WebRtcIsac_GetDownlinkBandwidth(const BwEstimatorstr* bwest_str); - -/* Returns the max delay (in ms) */ -int32_t WebRtcIsac_GetDownlinkMaxDelay(const BwEstimatorstr* bwest_str); - -/* Returns the bandwidth that iSAC should send with in bps */ -int32_t WebRtcIsac_GetUplinkBandwidth(const BwEstimatorstr* bwest_str); - -/* Returns the max delay value from the other side in ms */ -int32_t WebRtcIsac_GetUplinkMaxDelay(const BwEstimatorstr* bwest_str); - -/* - * update amount of data in bottle neck buffer and burst handling - * returns minimum payload size (bytes) - */ -int WebRtcIsac_GetMinBytes( - RateModel* State, - int StreamSize, /* bytes in bitstream */ - int FrameLen, /* ms per frame */ - double BottleNeck, /* bottle neck rate; excl headers (bps) */ - double DelayBuildUp, /* max delay from bottleneck buffering (ms) */ - enum ISACBandwidth bandwidth - /*,int16_t frequentLargePackets*/); - -/* - * update long-term average bitrate and amount of data in buffer - */ -void WebRtcIsac_UpdateRateModel( - RateModel* State, - int StreamSize, /* bytes in bitstream */ - int FrameSamples, /* samples per frame */ - double BottleNeck); /* bottle neck rate; excl headers (bps) */ - -void WebRtcIsac_InitRateModel(RateModel* State); - -/* Returns the new framelength value (input argument: bottle_neck) */ -int WebRtcIsac_GetNewFrameLength(double bottle_neck, int current_framelength); - -/* Returns the new SNR value (input argument: bottle_neck) */ -double WebRtcIsac_GetSnr(double bottle_neck, int new_framelength); - -int16_t WebRtcIsac_UpdateUplinkJitter(BwEstimatorstr* bwest_str, int32_t index); - -#if defined(__cplusplus) -} -#endif - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_BANDWIDTH_ESTIMATOR_H_ \ - */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/codec.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/codec.h deleted file mode 100644 index a7c7ddc14a62..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/codec.h +++ /dev/null @@ -1,223 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * codec.h - * - * This header file contains the calls to the internal encoder - * and decoder functions. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CODEC_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CODEC_H_ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/structs.h" -#include "modules/third_party/fft/fft.h" - -void WebRtcIsac_ResetBitstream(Bitstr* bit_stream); - -int WebRtcIsac_EstimateBandwidth(BwEstimatorstr* bwest_str, - Bitstr* streamdata, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts, - enum IsacSamplingRate encoderSampRate, - enum IsacSamplingRate decoderSampRate); - -int WebRtcIsac_DecodeLb(const TransformTables* transform_tables, - float* signal_out, - ISACLBDecStruct* ISACdec_obj, - int16_t* current_framesamples, - int16_t isRCUPayload); - -int WebRtcIsac_DecodeRcuLb(float* signal_out, - ISACLBDecStruct* ISACdec_obj, - int16_t* current_framesamples); - -int WebRtcIsac_EncodeLb(const TransformTables* transform_tables, - float* in, - ISACLBEncStruct* ISACencLB_obj, - int16_t codingMode, - int16_t bottleneckIndex); - -int WebRtcIsac_EncodeStoredDataLb(const IsacSaveEncoderData* ISACSavedEnc_obj, - Bitstr* ISACBitStr_obj, - int BWnumber, - float scale); - -int WebRtcIsac_EncodeStoredDataUb( - const ISACUBSaveEncDataStruct* ISACSavedEnc_obj, - Bitstr* bitStream, - int32_t jitterInfo, - float scale, - enum ISACBandwidth bandwidth); - -int16_t WebRtcIsac_GetRedPayloadUb( - const ISACUBSaveEncDataStruct* ISACSavedEncObj, - Bitstr* bitStreamObj, - enum ISACBandwidth bandwidth); - -/****************************************************************************** - * WebRtcIsac_RateAllocation() - * Internal function to perform a rate-allocation for upper and lower-band, - * given a total rate. - * - * Input: - * - inRateBitPerSec : a total bit-rate in bits/sec. - * - * Output: - * - rateLBBitPerSec : a bit-rate allocated to the lower-band - * in bits/sec. - * - rateUBBitPerSec : a bit-rate allocated to the upper-band - * in bits/sec. - * - * Return value : 0 if rate allocation has been successful. - * -1 if failed to allocate rates. - */ - -int16_t WebRtcIsac_RateAllocation(int32_t inRateBitPerSec, - double* rateLBBitPerSec, - double* rateUBBitPerSec, - enum ISACBandwidth* bandwidthKHz); - -/****************************************************************************** - * WebRtcIsac_DecodeUb16() - * - * Decode the upper-band if the codec is in 0-16 kHz mode. - * - * Input/Output: - * -ISACdec_obj : pointer to the upper-band decoder object. The - * bit-stream is stored inside the decoder object. - * - * Output: - * -signal_out : decoded audio, 480 samples 30 ms. - * - * Return value : >0 number of decoded bytes. - * <0 if an error occurred. - */ -int WebRtcIsac_DecodeUb16(const TransformTables* transform_tables, - float* signal_out, - ISACUBDecStruct* ISACdec_obj, - int16_t isRCUPayload); - -/****************************************************************************** - * WebRtcIsac_DecodeUb12() - * - * Decode the upper-band if the codec is in 0-12 kHz mode. - * - * Input/Output: - * -ISACdec_obj : pointer to the upper-band decoder object. The - * bit-stream is stored inside the decoder object. - * - * Output: - * -signal_out : decoded audio, 480 samples 30 ms. - * - * Return value : >0 number of decoded bytes. - * <0 if an error occurred. - */ -int WebRtcIsac_DecodeUb12(const TransformTables* transform_tables, - float* signal_out, - ISACUBDecStruct* ISACdec_obj, - int16_t isRCUPayload); - -/****************************************************************************** - * WebRtcIsac_EncodeUb16() - * - * Encode the upper-band if the codec is in 0-16 kHz mode. - * - * Input: - * -in : upper-band audio, 160 samples (10 ms). - * - * Input/Output: - * -ISACdec_obj : pointer to the upper-band encoder object. The - * bit-stream is stored inside the encoder object. - * - * Return value : >0 number of encoded bytes. - * <0 if an error occurred. - */ -int WebRtcIsac_EncodeUb16(const TransformTables* transform_tables, - float* in, - ISACUBEncStruct* ISACenc_obj, - int32_t jitterInfo); - -/****************************************************************************** - * WebRtcIsac_EncodeUb12() - * - * Encode the upper-band if the codec is in 0-12 kHz mode. - * - * Input: - * -in : upper-band audio, 160 samples (10 ms). - * - * Input/Output: - * -ISACdec_obj : pointer to the upper-band encoder object. The - * bit-stream is stored inside the encoder object. - * - * Return value : >0 number of encoded bytes. - * <0 if an error occurred. - */ -int WebRtcIsac_EncodeUb12(const TransformTables* transform_tables, - float* in, - ISACUBEncStruct* ISACenc_obj, - int32_t jitterInfo); - -/************************** initialization functions *************************/ - -void WebRtcIsac_InitMasking(MaskFiltstr* maskdata); - -void WebRtcIsac_InitPostFilterbank(PostFiltBankstr* postfiltdata); - -/**************************** transform functions ****************************/ - -void WebRtcIsac_InitTransform(TransformTables* tables); - -void WebRtcIsac_Time2Spec(const TransformTables* tables, - double* inre1, - double* inre2, - int16_t* outre, - int16_t* outim, - FFTstr* fftstr_obj); - -void WebRtcIsac_Spec2time(const TransformTables* tables, - double* inre, - double* inim, - double* outre1, - double* outre2, - FFTstr* fftstr_obj); - -/***************************** filterbank functions **************************/ - -void WebRtcIsac_FilterAndCombineFloat(float* InLP, - float* InHP, - float* Out, - PostFiltBankstr* postfiltdata); - -/************************* normalized lattice filters ************************/ - -void WebRtcIsac_NormLatticeFilterMa(int orderCoef, - float* stateF, - float* stateG, - float* lat_in, - double* filtcoeflo, - double* lat_out); - -void WebRtcIsac_NormLatticeFilterAr(int orderCoef, - float* stateF, - float* stateG, - double* lat_in, - double* lo_filt_coef, - float* lat_out); - -void WebRtcIsac_Dir2Lat(double* a, int orderCoef, float* sth, float* cth); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CODEC_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/crc.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/crc.c deleted file mode 100644 index 1bb0827031c4..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/crc.c +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/crc.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" - -#define POLYNOMIAL 0x04c11db7L - - -static const uint32_t kCrcTable[256] = { - 0, 0x4c11db7, 0x9823b6e, 0xd4326d9, 0x130476dc, 0x17c56b6b, - 0x1a864db2, 0x1e475005, 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, - 0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, 0x4c11db70, 0x48d0c6c7, - 0x4593e01e, 0x4152fda9, 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75, - 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, 0x791d4014, 0x7ddc5da3, - 0x709f7b7a, 0x745e66cd, 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039, - 0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, 0xbe2b5b58, 0xbaea46ef, - 0xb7a96036, 0xb3687d81, 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d, - 0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, 0xc7361b4c, 0xc3f706fb, - 0xceb42022, 0xca753d95, 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1, - 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, 0x34867077, 0x30476dc0, - 0x3d044b19, 0x39c556ae, 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, - 0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, 0x18aeb13, 0x54bf6a4, - 0x808d07d, 0xcc9cdca, 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde, - 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, 0x5e9f46bf, 0x5a5e5b08, - 0x571d7dd1, 0x53dc6066, 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba, - 0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, 0xbfa1b04b, 0xbb60adfc, - 0xb6238b25, 0xb2e29692, 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6, - 0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, 0xe0b41de7, 0xe4750050, - 0xe9362689, 0xedf73b3e, 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2, - 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, 0xd5b88683, 0xd1799b34, - 0xdc3abded, 0xd8fba05a, 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, - 0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, 0x4f040d56, 0x4bc510e1, - 0x46863638, 0x42472b8f, 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53, - 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, 0x36194d42, 0x32d850f5, - 0x3f9b762c, 0x3b5a6b9b, 0x315d626, 0x7d4cb91, 0xa97ed48, 0xe56f0ff, - 0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, 0xf12f560e, 0xf5ee4bb9, - 0xf8ad6d60, 0xfc6c70d7, 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b, - 0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, 0xc423cd6a, 0xc0e2d0dd, - 0xcda1f604, 0xc960ebb3, 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7, - 0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, 0x9b3660c6, 0x9ff77d71, - 0x92b45ba8, 0x9675461f, 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, - 0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, 0x4e8ee645, 0x4a4ffbf2, - 0x470cdd2b, 0x43cdc09c, 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8, - 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, 0x119b4be9, 0x155a565e, - 0x18197087, 0x1cd86d30, 0x29f3d35, 0x65e2082, 0xb1d065b, 0xfdc1bec, - 0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, 0x2497d08d, 0x2056cd3a, - 0x2d15ebe3, 0x29d4f654, 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0, - 0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, 0xe3a1cbc1, 0xe760d676, - 0xea23f0af, 0xeee2ed18, 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4, - 0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, 0x9abc8bd5, 0x9e7d9662, - 0x933eb0bb, 0x97ffad0c, 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, - 0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4 -}; - - - - -/**************************************************************************** - * WebRtcIsac_GetCrc(...) - * - * This function returns a 32 bit CRC checksum of a bit stream - * - * Input: - * - bitstream : payload bitstream - * - len_bitstream_in_bytes : number of 8-bit words in the bit stream - * - * Output: - * - crc : checksum - * - * Return value : 0 - Ok - * -1 - Error - */ - -int WebRtcIsac_GetCrc(const int16_t* bitstream, - int len_bitstream_in_bytes, - uint32_t* crc) -{ - uint8_t* bitstream_ptr_uw8; - uint32_t crc_state; - int byte_cntr; - int crc_tbl_indx; - - /* Sanity Check. */ - if (bitstream == NULL) { - return -1; - } - /* cast to UWord8 pointer */ - bitstream_ptr_uw8 = (uint8_t *)bitstream; - - /* initialize */ - crc_state = 0xFFFFFFFF; - - for (byte_cntr = 0; byte_cntr < len_bitstream_in_bytes; byte_cntr++) { - crc_tbl_indx = (WEBRTC_SPL_RSHIFT_U32(crc_state, 24) ^ - bitstream_ptr_uw8[byte_cntr]) & 0xFF; - crc_state = (crc_state << 8) ^ kCrcTable[crc_tbl_indx]; - } - - *crc = ~crc_state; - return 0; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/crc.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/crc.h deleted file mode 100644 index f031019ed37f..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/crc.h +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * crc.h - * - * Checksum functions - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CRC_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CRC_H_ - -#include - -/**************************************************************************** - * WebRtcIsac_GetCrc(...) - * - * This function returns a 32 bit CRC checksum of a bit stream - * - * Input: - * - encoded : payload bit stream - * - no_of_word8s : number of 8-bit words in the bit stream - * - * Output: - * - crc : checksum - * - * Return value : 0 - Ok - * -1 - Error - */ - -int WebRtcIsac_GetCrc(const int16_t* encoded, int no_of_word8s, uint32_t* crc); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CRC_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/decode.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/decode.c deleted file mode 100644 index 6e114e4a2be8..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/decode.c +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * decode_B.c - * - * This file contains definition of funtions for decoding. - * Decoding of lower-band, including normal-decoding and RCU decoding. - * Decoding of upper-band, including 8-12 kHz, when the bandwidth is - * 0-12 kHz, and 8-16 kHz, when the bandwidth is 0-16 kHz. - * - */ - -#include -#include -#include - -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_filter.h" - -/* - * function to decode the bitstream - * returns the total number of bytes in the stream - */ -int WebRtcIsac_DecodeLb(const TransformTables* transform_tables, - float* signal_out, ISACLBDecStruct* ISACdecLB_obj, - int16_t* current_framesamples, - int16_t isRCUPayload) { - int k; - int len, err; - int16_t bandwidthInd; - - float LP_dec_float[FRAMESAMPLES_HALF]; - float HP_dec_float[FRAMESAMPLES_HALF]; - - double LPw[FRAMESAMPLES_HALF]; - double HPw[FRAMESAMPLES_HALF]; - double LPw_pf[FRAMESAMPLES_HALF]; - - double lo_filt_coef[(ORDERLO + 1)*SUBFRAMES]; - double hi_filt_coef[(ORDERHI + 1)*SUBFRAMES]; - - double real_f[FRAMESAMPLES_HALF]; - double imag_f[FRAMESAMPLES_HALF]; - - double PitchLags[4]; - double PitchGains[4]; - double AvgPitchGain; - int16_t PitchGains_Q12[4]; - int16_t AvgPitchGain_Q12; - - float gain; - - int frame_nb; /* counter */ - int frame_mode; /* 0 30ms, 1 for 60ms */ - /* Processed_samples: 480 (30, 60 ms). Cannot take other values. */ - - WebRtcIsac_ResetBitstream(&(ISACdecLB_obj->bitstr_obj)); - - len = 0; - - /* Decode framelength and BW estimation - not used, - only for stream pointer*/ - err = WebRtcIsac_DecodeFrameLen(&ISACdecLB_obj->bitstr_obj, - current_framesamples); - if (err < 0) { - return err; - } - - /* Frame_mode: - * 0: indicates 30 ms frame (480 samples) - * 1: indicates 60 ms frame (960 samples) */ - frame_mode = *current_framesamples / MAX_FRAMESAMPLES; - - err = WebRtcIsac_DecodeSendBW(&ISACdecLB_obj->bitstr_obj, &bandwidthInd); - if (err < 0) { - return err; - } - - /* One loop if it's one frame (20 or 30ms), 2 loops if 2 frames - bundled together (60ms). */ - for (frame_nb = 0; frame_nb <= frame_mode; frame_nb++) { - /* Decode & de-quantize pitch parameters */ - err = WebRtcIsac_DecodePitchGain(&ISACdecLB_obj->bitstr_obj, - PitchGains_Q12); - if (err < 0) { - return err; - } - - err = WebRtcIsac_DecodePitchLag(&ISACdecLB_obj->bitstr_obj, PitchGains_Q12, - PitchLags); - if (err < 0) { - return err; - } - - AvgPitchGain_Q12 = (PitchGains_Q12[0] + PitchGains_Q12[1] + - PitchGains_Q12[2] + PitchGains_Q12[3]) >> 2; - - /* Decode & de-quantize filter coefficients. */ - err = WebRtcIsac_DecodeLpc(&ISACdecLB_obj->bitstr_obj, lo_filt_coef, - hi_filt_coef); - if (err < 0) { - return err; - } - /* Decode & de-quantize spectrum. */ - len = WebRtcIsac_DecodeSpec(&ISACdecLB_obj->bitstr_obj, AvgPitchGain_Q12, - kIsacLowerBand, real_f, imag_f); - if (len < 0) { - return len; - } - - /* Inverse transform. */ - WebRtcIsac_Spec2time(transform_tables, real_f, imag_f, LPw, HPw, - &ISACdecLB_obj->fftstr_obj); - - /* Convert PitchGains back to float for pitchfilter_post */ - for (k = 0; k < 4; k++) { - PitchGains[k] = ((float)PitchGains_Q12[k]) / 4096; - } - if (isRCUPayload) { - for (k = 0; k < 240; k++) { - LPw[k] *= RCU_TRANSCODING_SCALE_INVERSE; - HPw[k] *= RCU_TRANSCODING_SCALE_INVERSE; - } - } - - /* Inverse pitch filter. */ - WebRtcIsac_PitchfilterPost(LPw, LPw_pf, &ISACdecLB_obj->pitchfiltstr_obj, - PitchLags, PitchGains); - /* Convert AvgPitchGain back to float for computation of gain. */ - AvgPitchGain = ((float)AvgPitchGain_Q12) / 4096; - gain = 1.0f - 0.45f * (float)AvgPitchGain; - - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - /* Reduce gain to compensate for pitch enhancer. */ - LPw_pf[k] *= gain; - } - - if (isRCUPayload) { - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - /* Compensation for transcoding gain changes. */ - LPw_pf[k] *= RCU_TRANSCODING_SCALE; - HPw[k] *= RCU_TRANSCODING_SCALE; - } - } - /* Perceptual post-filtering (using normalized lattice filter). */ - WebRtcIsac_NormLatticeFilterAr( - ORDERLO, ISACdecLB_obj->maskfiltstr_obj.PostStateLoF, - (ISACdecLB_obj->maskfiltstr_obj).PostStateLoG, LPw_pf, lo_filt_coef, - LP_dec_float); - WebRtcIsac_NormLatticeFilterAr( - ORDERHI, ISACdecLB_obj->maskfiltstr_obj.PostStateHiF, - (ISACdecLB_obj->maskfiltstr_obj).PostStateHiG, HPw, hi_filt_coef, - HP_dec_float); - - /* Recombine the 2 bands. */ - WebRtcIsac_FilterAndCombineFloat(LP_dec_float, HP_dec_float, - signal_out + frame_nb * FRAMESAMPLES, - &ISACdecLB_obj->postfiltbankstr_obj); - } - return len; -} - - -/* - * This decode function is called when the codec is operating in 16 kHz - * bandwidth to decode the upperband, i.e. 8-16 kHz. - * - * Contrary to lower-band, the upper-band (8-16 kHz) is not split in - * frequency, but split to 12 sub-frames, i.e. twice as lower-band. - */ -int WebRtcIsac_DecodeUb16(const TransformTables* transform_tables, - float* signal_out, ISACUBDecStruct* ISACdecUB_obj, - int16_t isRCUPayload) { - int len, err; - - double halfFrameFirst[FRAMESAMPLES_HALF]; - double halfFrameSecond[FRAMESAMPLES_HALF]; - - double percepFilterParam[(UB_LPC_ORDER + 1) * (SUBFRAMES << 1) + - (UB_LPC_ORDER + 1)]; - - double real_f[FRAMESAMPLES_HALF]; - double imag_f[FRAMESAMPLES_HALF]; - const int16_t kAveragePitchGain = 0; /* No pitch-gain for upper-band. */ - len = 0; - - /* Decode & de-quantize filter coefficients. */ - memset(percepFilterParam, 0, sizeof(percepFilterParam)); - err = WebRtcIsac_DecodeInterpolLpcUb(&ISACdecUB_obj->bitstr_obj, - percepFilterParam, isac16kHz); - if (err < 0) { - return err; - } - - /* Decode & de-quantize spectrum. */ - len = WebRtcIsac_DecodeSpec(&ISACdecUB_obj->bitstr_obj, kAveragePitchGain, - kIsacUpperBand16, real_f, imag_f); - if (len < 0) { - return len; - } - if (isRCUPayload) { - int n; - for (n = 0; n < 240; n++) { - real_f[n] *= RCU_TRANSCODING_SCALE_UB_INVERSE; - imag_f[n] *= RCU_TRANSCODING_SCALE_UB_INVERSE; - } - } - /* Inverse transform. */ - WebRtcIsac_Spec2time(transform_tables, - real_f, imag_f, halfFrameFirst, halfFrameSecond, - &ISACdecUB_obj->fftstr_obj); - - /* Perceptual post-filtering (using normalized lattice filter). */ - WebRtcIsac_NormLatticeFilterAr( - UB_LPC_ORDER, ISACdecUB_obj->maskfiltstr_obj.PostStateLoF, - (ISACdecUB_obj->maskfiltstr_obj).PostStateLoG, halfFrameFirst, - &percepFilterParam[(UB_LPC_ORDER + 1)], signal_out); - - WebRtcIsac_NormLatticeFilterAr( - UB_LPC_ORDER, ISACdecUB_obj->maskfiltstr_obj.PostStateLoF, - (ISACdecUB_obj->maskfiltstr_obj).PostStateLoG, halfFrameSecond, - &percepFilterParam[(UB_LPC_ORDER + 1) * SUBFRAMES + (UB_LPC_ORDER + 1)], - &signal_out[FRAMESAMPLES_HALF]); - - return len; -} - -/* - * This decode function is called when the codec operates at 0-12 kHz - * bandwidth to decode the upperband, i.e. 8-12 kHz. - * - * At the encoder the upper-band is split into two band, 8-12 kHz & 12-16 - * kHz, and only 8-12 kHz is encoded. At the decoder, 8-12 kHz band is - * reconstructed and 12-16 kHz replaced with zeros. Then two bands - * are combined, to reconstruct the upperband 8-16 kHz. - */ -int WebRtcIsac_DecodeUb12(const TransformTables* transform_tables, - float* signal_out, ISACUBDecStruct* ISACdecUB_obj, - int16_t isRCUPayload) { - int len, err; - - float LP_dec_float[FRAMESAMPLES_HALF]; - float HP_dec_float[FRAMESAMPLES_HALF]; - - double LPw[FRAMESAMPLES_HALF]; - double HPw[FRAMESAMPLES_HALF]; - - double percepFilterParam[(UB_LPC_ORDER + 1)*SUBFRAMES]; - - double real_f[FRAMESAMPLES_HALF]; - double imag_f[FRAMESAMPLES_HALF]; - const int16_t kAveragePitchGain = 0; /* No pitch-gain for upper-band. */ - len = 0; - - /* Decode & dequantize filter coefficients. */ - err = WebRtcIsac_DecodeInterpolLpcUb(&ISACdecUB_obj->bitstr_obj, - percepFilterParam, isac12kHz); - if (err < 0) { - return err; - } - - /* Decode & de-quantize spectrum. */ - len = WebRtcIsac_DecodeSpec(&ISACdecUB_obj->bitstr_obj, kAveragePitchGain, - kIsacUpperBand12, real_f, imag_f); - if (len < 0) { - return len; - } - - if (isRCUPayload) { - int n; - for (n = 0; n < 240; n++) { - real_f[n] *= RCU_TRANSCODING_SCALE_UB_INVERSE; - imag_f[n] *= RCU_TRANSCODING_SCALE_UB_INVERSE; - } - } - /* Inverse transform. */ - WebRtcIsac_Spec2time(transform_tables, - real_f, imag_f, LPw, HPw, &ISACdecUB_obj->fftstr_obj); - /* perceptual post-filtering (using normalized lattice filter) */ - WebRtcIsac_NormLatticeFilterAr(UB_LPC_ORDER, - ISACdecUB_obj->maskfiltstr_obj.PostStateLoF, - (ISACdecUB_obj->maskfiltstr_obj).PostStateLoG, - LPw, percepFilterParam, LP_dec_float); - /* Zero for 12-16 kHz. */ - memset(HP_dec_float, 0, sizeof(float) * (FRAMESAMPLES_HALF)); - /* Recombine the 2 bands. */ - WebRtcIsac_FilterAndCombineFloat(HP_dec_float, LP_dec_float, signal_out, - &ISACdecUB_obj->postfiltbankstr_obj); - return len; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/decode_bwe.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/decode_bwe.c deleted file mode 100644 index 89d970fc75e7..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/decode_bwe.c +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/source/structs.h" -#include "modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" - - -int -WebRtcIsac_EstimateBandwidth( - BwEstimatorstr* bwest_str, - Bitstr* streamdata, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts, - enum IsacSamplingRate encoderSampRate, - enum IsacSamplingRate decoderSampRate) -{ - int16_t index; - int16_t frame_samples; - uint32_t sendTimestampIn16kHz; - uint32_t arrivalTimestampIn16kHz; - uint32_t diffSendTime; - uint32_t diffArrivalTime; - int err; - - /* decode framelength and BW estimation */ - err = WebRtcIsac_DecodeFrameLen(streamdata, &frame_samples); - if(err < 0) // error check - { - return err; - } - err = WebRtcIsac_DecodeSendBW(streamdata, &index); - if(err < 0) // error check - { - return err; - } - - /* UPDATE ESTIMATES FROM OTHER SIDE */ - err = WebRtcIsac_UpdateUplinkBwImpl(bwest_str, index, encoderSampRate); - if(err < 0) - { - return err; - } - - // We like BWE to work at 16 kHz sampling rate, - // therefore, we have to change the timestamps accordingly. - // translate the send timestamp if required - diffSendTime = (uint32_t)((uint32_t)send_ts - - (uint32_t)bwest_str->senderTimestamp); - bwest_str->senderTimestamp = send_ts; - - diffArrivalTime = (uint32_t)((uint32_t)arr_ts - - (uint32_t)bwest_str->receiverTimestamp); - bwest_str->receiverTimestamp = arr_ts; - - if(decoderSampRate == kIsacSuperWideband) - { - diffArrivalTime = (uint32_t)diffArrivalTime >> 1; - diffSendTime = (uint32_t)diffSendTime >> 1; - } - - // arrival timestamp in 16 kHz - arrivalTimestampIn16kHz = (uint32_t)((uint32_t) - bwest_str->prev_rec_arr_ts + (uint32_t)diffArrivalTime); - // send timestamp in 16 kHz - sendTimestampIn16kHz = (uint32_t)((uint32_t) - bwest_str->prev_rec_send_ts + (uint32_t)diffSendTime); - - err = WebRtcIsac_UpdateBandwidthEstimator(bwest_str, rtp_seq_number, - (frame_samples * 1000) / FS, sendTimestampIn16kHz, - arrivalTimestampIn16kHz, packet_size); - // error check - if(err < 0) - { - return err; - } - - return 0; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/encode.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/encode.c deleted file mode 100644 index bf92d02c533c..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/encode.c +++ /dev/null @@ -1,1260 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * encode.c - * - * This file contains definition of funtions for encoding. - * Decoding of upper-band, including 8-12 kHz, when the bandwidth is - * 0-12 kHz, and 8-16 kHz, when the bandwidth is 0-16 kHz. - * - */ - -#include -#include -#include - -#include "modules/audio_coding/codecs/isac/main/source/structs.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/main/source/arith_routines.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_analysis.h" -#include "modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/isac_vad.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_filter.h" - - -#define UB_LOOKAHEAD 24 - - -/* - Rate allocation tables of lower and upper-band bottleneck for - 12kHz & 16kHz bandwidth. - - 12 kHz bandwidth - ----------------- - The overall bottleneck of the coder is between 38 kbps and 45 kbps. We have - considered 7 enteries, uniformly distributed in this interval, i.e. 38, - 39.17, 40.33, 41.5, 42.67, 43.83 and 45. For every entery, the lower-band - and the upper-band bottlenecks are specified in - 'kLowerBandBitRate12' and 'kUpperBandBitRate12' - tables, respectively. E.g. the overall rate of 41.5 kbps corresponts to a - bottleneck of 31 kbps for lower-band and 27 kbps for upper-band. Given an - overall bottleneck of the codec, we use linear interpolation to get - lower-band and upper-band bottlenecks. - - 16 kHz bandwidth - ----------------- - The overall bottleneck of the coder is between 50 kbps and 56 kbps. We have - considered 7 enteries, uniformly distributed in this interval, i.e. 50, 51.2, - 52.4, 53.6, 54.8 and 56. For every entery, the lower-band and the upper-band - bottlenecks are specified in 'kLowerBandBitRate16' and - 'kUpperBandBitRate16' tables, respectively. E.g. the overall rate - of 53.6 kbps corresponts to a bottleneck of 32 kbps for lower-band and 30 - kbps for upper-band. Given an overall bottleneck of the codec, we use linear - interpolation to get lower-band and upper-band bottlenecks. - - */ - -/* 38 39.17 40.33 41.5 42.67 43.83 45 */ -static const int16_t kLowerBandBitRate12[7] = { - 29000, 30000, 30000, 31000, 31000, 32000, 32000 }; -static const int16_t kUpperBandBitRate12[7] = { - 25000, 25000, 27000, 27000, 29000, 29000, 32000 }; - -/* 50 51.2 52.4 53.6 54.8 56 */ -static const int16_t kLowerBandBitRate16[6] = { - 31000, 31000, 32000, 32000, 32000, 32000 }; -static const int16_t kUpperBandBitRate16[6] = { - 28000, 29000, 29000, 30000, 31000, 32000 }; - -/****************************************************************************** - * WebRtcIsac_RateAllocation() - * Internal function to perform a rate-allocation for upper and lower-band, - * given a total rate. - * - * Input: - * - inRateBitPerSec : a total bottleneck in bits/sec. - * - * Output: - * - rateLBBitPerSec : a bottleneck allocated to the lower-band - * in bits/sec. - * - rateUBBitPerSec : a bottleneck allocated to the upper-band - * in bits/sec. - * - * Return value : 0 if rate allocation has been successful. - * -1 if failed to allocate rates. - */ - -int16_t WebRtcIsac_RateAllocation(int32_t inRateBitPerSec, - double* rateLBBitPerSec, - double* rateUBBitPerSec, - enum ISACBandwidth* bandwidthKHz) { - int16_t idx; - double idxD; - double idxErr; - if (inRateBitPerSec < 38000) { - /* If the given overall bottleneck is less than 38000 then - * then codec has to operate in wideband mode, i.e. 8 kHz - * bandwidth. */ - *rateLBBitPerSec = (int16_t)((inRateBitPerSec > 32000) ? - 32000 : inRateBitPerSec); - *rateUBBitPerSec = 0; - *bandwidthKHz = isac8kHz; - } else if ((inRateBitPerSec >= 38000) && (inRateBitPerSec < 50000)) { - /* At a bottleneck between 38 and 50 kbps the codec is operating - * at 12 kHz bandwidth. Using xxxBandBitRate12[] to calculates - * upper/lower bottleneck */ - - /* Find the bottlenecks by linear interpolation, - * step is (45000 - 38000)/6.0 we use the inverse of it. */ - const double stepSizeInv = 8.5714286e-4; - idxD = (inRateBitPerSec - 38000) * stepSizeInv; - idx = (idxD >= 6) ? 6 : ((int16_t)idxD); - idxErr = idxD - idx; - *rateLBBitPerSec = kLowerBandBitRate12[idx]; - *rateUBBitPerSec = kUpperBandBitRate12[idx]; - - if (idx < 6) { - *rateLBBitPerSec += (int16_t)( - idxErr * (kLowerBandBitRate12[idx + 1] - kLowerBandBitRate12[idx])); - *rateUBBitPerSec += (int16_t)( - idxErr * (kUpperBandBitRate12[idx + 1] - kUpperBandBitRate12[idx])); - } - *bandwidthKHz = isac12kHz; - } else if ((inRateBitPerSec >= 50000) && (inRateBitPerSec <= 56000)) { - /* A bottleneck between 50 and 56 kbps corresponds to bandwidth - * of 16 kHz. Using xxxBandBitRate16[] to calculates - * upper/lower bottleneck. */ - - /* Find the bottlenecks by linear interpolation - * step is (56000 - 50000)/5 we use the inverse of it. */ - const double stepSizeInv = 8.3333333e-4; - idxD = (inRateBitPerSec - 50000) * stepSizeInv; - idx = (idxD >= 5) ? 5 : ((int16_t)idxD); - idxErr = idxD - idx; - *rateLBBitPerSec = kLowerBandBitRate16[idx]; - *rateUBBitPerSec = kUpperBandBitRate16[idx]; - - if (idx < 5) { - *rateLBBitPerSec += (int16_t)(idxErr * - (kLowerBandBitRate16[idx + 1] - - kLowerBandBitRate16[idx])); - - *rateUBBitPerSec += (int16_t)(idxErr * - (kUpperBandBitRate16[idx + 1] - - kUpperBandBitRate16[idx])); - } - *bandwidthKHz = isac16kHz; - } else { - /* Out-of-range botlteneck value. */ - return -1; - } - - /* limit the values. */ - *rateLBBitPerSec = (*rateLBBitPerSec > 32000) ? 32000 : *rateLBBitPerSec; - *rateUBBitPerSec = (*rateUBBitPerSec > 32000) ? 32000 : *rateUBBitPerSec; - return 0; -} - - -void WebRtcIsac_ResetBitstream(Bitstr* bit_stream) { - bit_stream->W_upper = 0xFFFFFFFF; - bit_stream->stream_index = 0; - bit_stream->streamval = 0; -} - -int WebRtcIsac_EncodeLb(const TransformTables* transform_tables, - float* in, ISACLBEncStruct* ISACencLB_obj, - int16_t codingMode, - int16_t bottleneckIndex) { - int stream_length = 0; - int err; - int k; - int iterCntr; - - double lofilt_coef[(ORDERLO + 1)*SUBFRAMES]; - double hifilt_coef[(ORDERHI + 1)*SUBFRAMES]; - float LP[FRAMESAMPLES_HALF]; - float HP[FRAMESAMPLES_HALF]; - - double LP_lookahead[FRAMESAMPLES_HALF]; - double HP_lookahead[FRAMESAMPLES_HALF]; - double LP_lookahead_pf[FRAMESAMPLES_HALF + QLOOKAHEAD]; - double LPw[FRAMESAMPLES_HALF]; - - double HPw[FRAMESAMPLES_HALF]; - double LPw_pf[FRAMESAMPLES_HALF]; - int16_t fre[FRAMESAMPLES_HALF]; /* Q7 */ - int16_t fim[FRAMESAMPLES_HALF]; /* Q7 */ - - double PitchLags[4]; - double PitchGains[4]; - int16_t PitchGains_Q12[4]; - int16_t AvgPitchGain_Q12; - - int frame_mode; /* 0 for 30ms, 1 for 60ms */ - int status = 0; - int my_index; - transcode_obj transcodingParam; - double bytesLeftSpecCoding; - uint16_t payloadLimitBytes; - - /* Copy new frame-length and bottleneck rate only for the first 10 ms data */ - if (ISACencLB_obj->buffer_index == 0) { - /* Set the framelength for the next packet. */ - ISACencLB_obj->current_framesamples = ISACencLB_obj->new_framelength; - } - /* 'frame_mode' is 0 (30 ms) or 1 (60 ms). */ - frame_mode = ISACencLB_obj->current_framesamples / MAX_FRAMESAMPLES; - - /* buffer speech samples (by 10ms packet) until the frame-length */ - /* is reached (30 or 60 ms). */ - /*****************************************************************/ - - /* fill the buffer with 10ms input data */ - for (k = 0; k < FRAMESAMPLES_10ms; k++) { - ISACencLB_obj->data_buffer_float[k + ISACencLB_obj->buffer_index] = in[k]; - } - - /* If buffersize is not equal to current framesize then increase index - * and return. We do no encoding untill we have enough audio. */ - if (ISACencLB_obj->buffer_index + FRAMESAMPLES_10ms != FRAMESAMPLES) { - ISACencLB_obj->buffer_index += FRAMESAMPLES_10ms; - return 0; - } - /* If buffer reached the right size, reset index and continue with - * encoding the frame. */ - ISACencLB_obj->buffer_index = 0; - - /* End of buffer function. */ - /**************************/ - - /* Encoding */ - /************/ - - if (frame_mode == 0 || ISACencLB_obj->frame_nb == 0) { - /* This is to avoid Linux warnings until we change 'int' to 'Word32' - * at all places. */ - int intVar; - /* reset bitstream */ - WebRtcIsac_ResetBitstream(&(ISACencLB_obj->bitstr_obj)); - - if ((codingMode == 0) && (frame_mode == 0) && - (ISACencLB_obj->enforceFrameSize == 0)) { - ISACencLB_obj->new_framelength = WebRtcIsac_GetNewFrameLength( - ISACencLB_obj->bottleneck, ISACencLB_obj->current_framesamples); - } - - ISACencLB_obj->s2nr = WebRtcIsac_GetSnr( - ISACencLB_obj->bottleneck, ISACencLB_obj->current_framesamples); - - /* Encode frame length. */ - status = WebRtcIsac_EncodeFrameLen( - ISACencLB_obj->current_framesamples, &ISACencLB_obj->bitstr_obj); - if (status < 0) { - /* Wrong frame size. */ - return status; - } - /* Save framelength for multiple packets memory. */ - ISACencLB_obj->SaveEnc_obj.framelength = - ISACencLB_obj->current_framesamples; - - /* To be used for Redundant Coding. */ - ISACencLB_obj->lastBWIdx = bottleneckIndex; - intVar = (int)bottleneckIndex; - WebRtcIsac_EncodeReceiveBw(&intVar, &ISACencLB_obj->bitstr_obj); - } - - /* Split signal in two bands. */ - WebRtcIsac_SplitAndFilterFloat(ISACencLB_obj->data_buffer_float, LP, HP, - LP_lookahead, HP_lookahead, - &ISACencLB_obj->prefiltbankstr_obj); - - /* estimate pitch parameters and pitch-filter lookahead signal */ - WebRtcIsac_PitchAnalysis(LP_lookahead, LP_lookahead_pf, - &ISACencLB_obj->pitchanalysisstr_obj, PitchLags, - PitchGains); - - /* Encode in FIX Q12. */ - - /* Convert PitchGain to Fixed point. */ - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchGains_Q12[k] = (int16_t)(PitchGains[k] * 4096.0); - } - - /* Set where to store data in multiple packets memory. */ - if (frame_mode == 0 || ISACencLB_obj->frame_nb == 0) { - ISACencLB_obj->SaveEnc_obj.startIdx = 0; - } else { - ISACencLB_obj->SaveEnc_obj.startIdx = 1; - } - - /* Quantize & encode pitch parameters. */ - WebRtcIsac_EncodePitchGain(PitchGains_Q12, &ISACencLB_obj->bitstr_obj, - &ISACencLB_obj->SaveEnc_obj); - WebRtcIsac_EncodePitchLag(PitchLags, PitchGains_Q12, - &ISACencLB_obj->bitstr_obj, - &ISACencLB_obj->SaveEnc_obj); - - AvgPitchGain_Q12 = (PitchGains_Q12[0] + PitchGains_Q12[1] + - PitchGains_Q12[2] + PitchGains_Q12[3]) >> 2; - - /* Find coefficients for perceptual pre-filters. */ - WebRtcIsac_GetLpcCoefLb(LP_lookahead_pf, HP_lookahead, - &ISACencLB_obj->maskfiltstr_obj, ISACencLB_obj->s2nr, - PitchGains_Q12, lofilt_coef, hifilt_coef); - - /* Code LPC model and shape - gains not quantized yet. */ - WebRtcIsac_EncodeLpcLb(lofilt_coef, hifilt_coef, &ISACencLB_obj->bitstr_obj, - &ISACencLB_obj->SaveEnc_obj); - - /* Convert PitchGains back to FLOAT for pitchfilter_pre. */ - for (k = 0; k < 4; k++) { - PitchGains[k] = ((float)PitchGains_Q12[k]) / 4096; - } - - /* Store the state of arithmetic coder before coding LPC gains. */ - transcodingParam.W_upper = ISACencLB_obj->bitstr_obj.W_upper; - transcodingParam.stream_index = ISACencLB_obj->bitstr_obj.stream_index; - transcodingParam.streamval = ISACencLB_obj->bitstr_obj.streamval; - transcodingParam.stream[0] = - ISACencLB_obj->bitstr_obj.stream[ISACencLB_obj->bitstr_obj.stream_index - - 2]; - transcodingParam.stream[1] = - ISACencLB_obj->bitstr_obj.stream[ISACencLB_obj->bitstr_obj.stream_index - - 1]; - transcodingParam.stream[2] = - ISACencLB_obj->bitstr_obj.stream[ISACencLB_obj->bitstr_obj.stream_index]; - - /* Store LPC Gains before encoding them. */ - for (k = 0; k < SUBFRAMES; k++) { - transcodingParam.loFiltGain[k] = lofilt_coef[(LPC_LOBAND_ORDER + 1) * k]; - transcodingParam.hiFiltGain[k] = hifilt_coef[(LPC_HIBAND_ORDER + 1) * k]; - } - - /* Code gains */ - WebRtcIsac_EncodeLpcGainLb(lofilt_coef, hifilt_coef, - &ISACencLB_obj->bitstr_obj, - &ISACencLB_obj->SaveEnc_obj); - - /* Get the correct value for the payload limit and calculate the - * number of bytes left for coding the spectrum. */ - if ((frame_mode == 1) && (ISACencLB_obj->frame_nb == 0)) { - /* It is a 60ms and we are in the first 30ms then the limit at - * this point should be half of the assigned value. */ - payloadLimitBytes = ISACencLB_obj->payloadLimitBytes60 >> 1; - } else if (frame_mode == 0) { - /* It is a 30ms frame */ - /* Subract 3 because termination process may add 3 bytes. */ - payloadLimitBytes = ISACencLB_obj->payloadLimitBytes30 - 3; - } else { - /* This is the second half of a 60ms frame. */ - /* Subract 3 because termination process may add 3 bytes. */ - payloadLimitBytes = ISACencLB_obj->payloadLimitBytes60 - 3; - } - bytesLeftSpecCoding = payloadLimitBytes - transcodingParam.stream_index; - - /* Perceptual pre-filtering (using normalized lattice filter). */ - /* Low-band filtering. */ - WebRtcIsac_NormLatticeFilterMa(ORDERLO, - ISACencLB_obj->maskfiltstr_obj.PreStateLoF, - ISACencLB_obj->maskfiltstr_obj.PreStateLoG, - LP, lofilt_coef, LPw); - /* High-band filtering. */ - WebRtcIsac_NormLatticeFilterMa(ORDERHI, - ISACencLB_obj->maskfiltstr_obj.PreStateHiF, - ISACencLB_obj->maskfiltstr_obj.PreStateHiG, - HP, hifilt_coef, HPw); - /* Pitch filter. */ - WebRtcIsac_PitchfilterPre(LPw, LPw_pf, &ISACencLB_obj->pitchfiltstr_obj, - PitchLags, PitchGains); - /* Transform */ - WebRtcIsac_Time2Spec(transform_tables, - LPw_pf, HPw, fre, fim, &ISACencLB_obj->fftstr_obj); - - /* Save data for multiple packets memory. */ - my_index = ISACencLB_obj->SaveEnc_obj.startIdx * FRAMESAMPLES_HALF; - memcpy(&ISACencLB_obj->SaveEnc_obj.fre[my_index], fre, sizeof(fre)); - memcpy(&ISACencLB_obj->SaveEnc_obj.fim[my_index], fim, sizeof(fim)); - - ISACencLB_obj->SaveEnc_obj.AvgPitchGain[ISACencLB_obj->SaveEnc_obj.startIdx] = - AvgPitchGain_Q12; - - /* Quantization and loss-less coding. */ - err = WebRtcIsac_EncodeSpec(fre, fim, AvgPitchGain_Q12, kIsacLowerBand, - &ISACencLB_obj->bitstr_obj); - if ((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - /* There has been an error but it was not too large payload - (we can cure too large payload). */ - if (frame_mode == 1 && ISACencLB_obj->frame_nb == 1) { - /* If this is the second 30ms of a 60ms frame reset - this such that in the next call encoder starts fresh. */ - ISACencLB_obj->frame_nb = 0; - } - return err; - } - iterCntr = 0; - while ((ISACencLB_obj->bitstr_obj.stream_index > payloadLimitBytes) || - (err == -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - double bytesSpecCoderUsed; - double transcodeScale; - - if (iterCntr >= MAX_PAYLOAD_LIMIT_ITERATION) { - /* We were not able to limit the payload size */ - if ((frame_mode == 1) && (ISACencLB_obj->frame_nb == 0)) { - /* This was the first 30ms of a 60ms frame. Although - the payload is larger than it should be but we let - the second 30ms be encoded. Maybe together we - won't exceed the limit. */ - ISACencLB_obj->frame_nb = 1; - return 0; - } else if ((frame_mode == 1) && (ISACencLB_obj->frame_nb == 1)) { - ISACencLB_obj->frame_nb = 0; - } - - if (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH) { - return -ISAC_PAYLOAD_LARGER_THAN_LIMIT; - } else { - return status; - } - } - - if (err == -ISAC_DISALLOWED_BITSTREAM_LENGTH) { - bytesSpecCoderUsed = STREAM_SIZE_MAX; - /* Being conservative */ - transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed * 0.5; - } else { - bytesSpecCoderUsed = ISACencLB_obj->bitstr_obj.stream_index - - transcodingParam.stream_index; - transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed; - } - - /* To be safe, we reduce the scale depending on - the number of iterations. */ - transcodeScale *= (1.0 - (0.9 * (double)iterCntr / - (double)MAX_PAYLOAD_LIMIT_ITERATION)); - - /* Scale the LPC Gains. */ - for (k = 0; k < SUBFRAMES; k++) { - lofilt_coef[(LPC_LOBAND_ORDER + 1) * k] = - transcodingParam.loFiltGain[k] * transcodeScale; - hifilt_coef[(LPC_HIBAND_ORDER + 1) * k] = - transcodingParam.hiFiltGain[k] * transcodeScale; - transcodingParam.loFiltGain[k] = lofilt_coef[(LPC_LOBAND_ORDER + 1) * k]; - transcodingParam.hiFiltGain[k] = hifilt_coef[(LPC_HIBAND_ORDER + 1) * k]; - } - - /* Scale DFT coefficients. */ - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - fre[k] = (int16_t)(fre[k] * transcodeScale); - fim[k] = (int16_t)(fim[k] * transcodeScale); - } - - /* Save data for multiple packets memory. */ - my_index = ISACencLB_obj->SaveEnc_obj.startIdx * FRAMESAMPLES_HALF; - memcpy(&ISACencLB_obj->SaveEnc_obj.fre[my_index], fre, sizeof(fre)); - memcpy(&ISACencLB_obj->SaveEnc_obj.fim[my_index], fim, sizeof(fim)); - - /* Re-store the state of arithmetic coder before coding LPC gains. */ - ISACencLB_obj->bitstr_obj.W_upper = transcodingParam.W_upper; - ISACencLB_obj->bitstr_obj.stream_index = transcodingParam.stream_index; - ISACencLB_obj->bitstr_obj.streamval = transcodingParam.streamval; - ISACencLB_obj->bitstr_obj.stream[transcodingParam.stream_index - 2] = - transcodingParam.stream[0]; - ISACencLB_obj->bitstr_obj.stream[transcodingParam.stream_index - 1] = - transcodingParam.stream[1]; - ISACencLB_obj->bitstr_obj.stream[transcodingParam.stream_index] = - transcodingParam.stream[2]; - - /* Code gains. */ - WebRtcIsac_EncodeLpcGainLb(lofilt_coef, hifilt_coef, - &ISACencLB_obj->bitstr_obj, - &ISACencLB_obj->SaveEnc_obj); - - /* Update the number of bytes left for encoding the spectrum. */ - bytesLeftSpecCoding = payloadLimitBytes - transcodingParam.stream_index; - - /* Encode the spectrum. */ - err = WebRtcIsac_EncodeSpec(fre, fim, AvgPitchGain_Q12, kIsacLowerBand, - &ISACencLB_obj->bitstr_obj); - - if ((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - /* There has been an error but it was not too large - payload (we can cure too large payload). */ - if (frame_mode == 1 && ISACencLB_obj->frame_nb == 1) { - /* If this is the second 30 ms of a 60 ms frame reset - this such that in the next call encoder starts fresh. */ - ISACencLB_obj->frame_nb = 0; - } - return err; - } - iterCntr++; - } - - /* If 60 ms frame-size and just processed the first 30 ms, */ - /* go back to main function to buffer the other 30 ms speech frame. */ - if (frame_mode == 1) { - if (ISACencLB_obj->frame_nb == 0) { - ISACencLB_obj->frame_nb = 1; - return 0; - } else if (ISACencLB_obj->frame_nb == 1) { - ISACencLB_obj->frame_nb = 0; - /* Also update the frame-length for next packet, - in Adaptive mode only. */ - if (codingMode == 0 && (ISACencLB_obj->enforceFrameSize == 0)) { - ISACencLB_obj->new_framelength = - WebRtcIsac_GetNewFrameLength(ISACencLB_obj->bottleneck, - ISACencLB_obj->current_framesamples); - } - } - } else { - ISACencLB_obj->frame_nb = 0; - } - - /* Complete arithmetic coding. */ - stream_length = WebRtcIsac_EncTerminate(&ISACencLB_obj->bitstr_obj); - return stream_length; -} - - - -static int LimitPayloadUb(ISACUBEncStruct* ISACencUB_obj, - uint16_t payloadLimitBytes, - double bytesLeftSpecCoding, - transcode_obj* transcodingParam, - int16_t* fre, int16_t* fim, - double* lpcGains, enum ISACBand band, int status) { - - int iterCntr = 0; - int k; - double bytesSpecCoderUsed; - double transcodeScale; - const int16_t kAveragePitchGain = 0.0; - - do { - if (iterCntr >= MAX_PAYLOAD_LIMIT_ITERATION) { - /* We were not able to limit the payload size. */ - return -ISAC_PAYLOAD_LARGER_THAN_LIMIT; - } - - if (status == -ISAC_DISALLOWED_BITSTREAM_LENGTH) { - bytesSpecCoderUsed = STREAM_SIZE_MAX; - /* Being conservative. */ - transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed * 0.5; - } else { - bytesSpecCoderUsed = ISACencUB_obj->bitstr_obj.stream_index - - transcodingParam->stream_index; - transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed; - } - - /* To be safe, we reduce the scale depending on the - number of iterations. */ - transcodeScale *= (1.0 - (0.9 * (double)iterCntr / - (double)MAX_PAYLOAD_LIMIT_ITERATION)); - - /* Scale the LPC Gains. */ - if (band == kIsacUpperBand16) { - /* Two sets of coefficients if 16 kHz. */ - for (k = 0; k < SUBFRAMES; k++) { - transcodingParam->loFiltGain[k] *= transcodeScale; - transcodingParam->hiFiltGain[k] *= transcodeScale; - } - } else { - /* One sets of coefficients if 12 kHz. */ - for (k = 0; k < SUBFRAMES; k++) { - transcodingParam->loFiltGain[k] *= transcodeScale; - } - } - - /* Scale DFT coefficients. */ - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - fre[k] = (int16_t)(fre[k] * transcodeScale + 0.5); - fim[k] = (int16_t)(fim[k] * transcodeScale + 0.5); - } - /* Store FFT coefficients for multiple encoding. */ - memcpy(ISACencUB_obj->SaveEnc_obj.realFFT, fre, - sizeof(ISACencUB_obj->SaveEnc_obj.realFFT)); - memcpy(ISACencUB_obj->SaveEnc_obj.imagFFT, fim, - sizeof(ISACencUB_obj->SaveEnc_obj.imagFFT)); - - /* Store the state of arithmetic coder before coding LPC gains */ - ISACencUB_obj->bitstr_obj.W_upper = transcodingParam->W_upper; - ISACencUB_obj->bitstr_obj.stream_index = transcodingParam->stream_index; - ISACencUB_obj->bitstr_obj.streamval = transcodingParam->streamval; - ISACencUB_obj->bitstr_obj.stream[transcodingParam->stream_index - 2] = - transcodingParam->stream[0]; - ISACencUB_obj->bitstr_obj.stream[transcodingParam->stream_index - 1] = - transcodingParam->stream[1]; - ISACencUB_obj->bitstr_obj.stream[transcodingParam->stream_index] = - transcodingParam->stream[2]; - - /* Store the gains for multiple encoding. */ - memcpy(ISACencUB_obj->SaveEnc_obj.lpcGain, lpcGains, - SUBFRAMES * sizeof(double)); - /* Entropy Code lpc-gains, indices are stored for a later use.*/ - WebRtcIsac_EncodeLpcGainUb(transcodingParam->loFiltGain, - &ISACencUB_obj->bitstr_obj, - ISACencUB_obj->SaveEnc_obj.lpcGainIndex); - - /* If 16kHz should do one more set. */ - if (band == kIsacUpperBand16) { - /* Store the gains for multiple encoding. */ - memcpy(&ISACencUB_obj->SaveEnc_obj.lpcGain[SUBFRAMES], - &lpcGains[SUBFRAMES], SUBFRAMES * sizeof(double)); - /* Entropy Code lpc-gains, indices are stored for a later use.*/ - WebRtcIsac_EncodeLpcGainUb( - transcodingParam->hiFiltGain, &ISACencUB_obj->bitstr_obj, - &ISACencUB_obj->SaveEnc_obj.lpcGainIndex[SUBFRAMES]); - } - - /* Update the number of bytes left for encoding the spectrum. */ - bytesLeftSpecCoding = payloadLimitBytes - - ISACencUB_obj->bitstr_obj.stream_index; - - /* Save the bit-stream object at this point for FEC. */ - memcpy(&ISACencUB_obj->SaveEnc_obj.bitStreamObj, - &ISACencUB_obj->bitstr_obj, sizeof(Bitstr)); - - /* Encode the spectrum. */ - status = WebRtcIsac_EncodeSpec(fre, fim, kAveragePitchGain, - band, &ISACencUB_obj->bitstr_obj); - if ((status < 0) && (status != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - /* There has been an error but it was not too large payload - (we can cure too large payload). */ - return status; - } - iterCntr++; - } while ((ISACencUB_obj->bitstr_obj.stream_index > payloadLimitBytes) || - (status == -ISAC_DISALLOWED_BITSTREAM_LENGTH)); - return 0; -} - -int WebRtcIsac_EncodeUb16(const TransformTables* transform_tables, - float* in, ISACUBEncStruct* ISACencUB_obj, - int32_t jitterInfo) { - int err; - int k; - - double lpcVecs[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - double percepFilterParams[(1 + UB_LPC_ORDER) * (SUBFRAMES << 1) + - (1 + UB_LPC_ORDER)]; - - double LP_lookahead[FRAMESAMPLES]; - int16_t fre[FRAMESAMPLES_HALF]; /* Q7 */ - int16_t fim[FRAMESAMPLES_HALF]; /* Q7 */ - - int status = 0; - - double varscale[2]; - double corr[SUBFRAMES << 1][UB_LPC_ORDER + 1]; - double lpcGains[SUBFRAMES << 1]; - transcode_obj transcodingParam; - uint16_t payloadLimitBytes; - double s2nr; - const int16_t kAveragePitchGain = 0.0; - int bytesLeftSpecCoding; - - /* Buffer speech samples (by 10ms packet) until the frame-length is */ - /* reached (30 ms). */ - /*********************************************************************/ - - /* fill the buffer with 10ms input data */ - memcpy(&ISACencUB_obj->data_buffer_float[ISACencUB_obj->buffer_index], in, - FRAMESAMPLES_10ms * sizeof(float)); - - /* If buffer size is not equal to current frame-size, and end of file is - * not reached yet, we don't do encoding unless we have the whole frame. */ - if (ISACencUB_obj->buffer_index + FRAMESAMPLES_10ms < FRAMESAMPLES) { - ISACencUB_obj->buffer_index += FRAMESAMPLES_10ms; - return 0; - } - - /* End of buffer function. */ - /**************************/ - - /* Encoding */ - /************/ - - /* Reset bit-stream */ - WebRtcIsac_ResetBitstream(&(ISACencUB_obj->bitstr_obj)); - - /* Encoding of bandwidth information. */ - WebRtcIsac_EncodeJitterInfo(jitterInfo, &ISACencUB_obj->bitstr_obj); - - status = WebRtcIsac_EncodeBandwidth(isac16kHz, &ISACencUB_obj->bitstr_obj); - if (status < 0) { - return status; - } - - s2nr = WebRtcIsac_GetSnr(ISACencUB_obj->bottleneck, FRAMESAMPLES); - - memcpy(lpcVecs, ISACencUB_obj->lastLPCVec, UB_LPC_ORDER * sizeof(double)); - - for (k = 0; k < FRAMESAMPLES; k++) { - LP_lookahead[k] = ISACencUB_obj->data_buffer_float[UB_LOOKAHEAD + k]; - } - - /* Find coefficients for perceptual pre-filters. */ - WebRtcIsac_GetLpcCoefUb(LP_lookahead, &ISACencUB_obj->maskfiltstr_obj, - &lpcVecs[UB_LPC_ORDER], corr, varscale, isac16kHz); - - memcpy(ISACencUB_obj->lastLPCVec, - &lpcVecs[(UB16_LPC_VEC_PER_FRAME - 1) * (UB_LPC_ORDER)], - sizeof(double) * UB_LPC_ORDER); - - /* Code LPC model and shape - gains not quantized yet. */ - WebRtcIsac_EncodeLpcUB(lpcVecs, &ISACencUB_obj->bitstr_obj, - percepFilterParams, isac16kHz, - &ISACencUB_obj->SaveEnc_obj); - - /* the first set of lpc parameters are from the last sub-frame of - * the previous frame. so we don't care about them. */ - WebRtcIsac_GetLpcGain(s2nr, &percepFilterParams[UB_LPC_ORDER + 1], - (SUBFRAMES << 1), lpcGains, corr, varscale); - - /* Store the state of arithmetic coder before coding LPC gains */ - transcodingParam.stream_index = ISACencUB_obj->bitstr_obj.stream_index; - transcodingParam.W_upper = ISACencUB_obj->bitstr_obj.W_upper; - transcodingParam.streamval = ISACencUB_obj->bitstr_obj.streamval; - transcodingParam.stream[0] = - ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index - - 2]; - transcodingParam.stream[1] = - ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index - - 1]; - transcodingParam.stream[2] = - ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index]; - - /* Store LPC Gains before encoding them. */ - for (k = 0; k < SUBFRAMES; k++) { - transcodingParam.loFiltGain[k] = lpcGains[k]; - transcodingParam.hiFiltGain[k] = lpcGains[SUBFRAMES + k]; - } - - /* Store the gains for multiple encoding. */ - memcpy(ISACencUB_obj->SaveEnc_obj.lpcGain, lpcGains, - (SUBFRAMES << 1) * sizeof(double)); - - WebRtcIsac_EncodeLpcGainUb(lpcGains, &ISACencUB_obj->bitstr_obj, - ISACencUB_obj->SaveEnc_obj.lpcGainIndex); - WebRtcIsac_EncodeLpcGainUb( - &lpcGains[SUBFRAMES], &ISACencUB_obj->bitstr_obj, - &ISACencUB_obj->SaveEnc_obj.lpcGainIndex[SUBFRAMES]); - - /* Get the correct value for the payload limit and calculate the number of - bytes left for coding the spectrum. It is a 30ms frame - Subract 3 because termination process may add 3 bytes */ - payloadLimitBytes = ISACencUB_obj->maxPayloadSizeBytes - - ISACencUB_obj->numBytesUsed - 3; - bytesLeftSpecCoding = payloadLimitBytes - - ISACencUB_obj->bitstr_obj.stream_index; - - for (k = 0; k < (SUBFRAMES << 1); k++) { - percepFilterParams[k * (UB_LPC_ORDER + 1) + (UB_LPC_ORDER + 1)] = - lpcGains[k]; - } - - /* LPC filtering (using normalized lattice filter), */ - /* first half-frame. */ - WebRtcIsac_NormLatticeFilterMa(UB_LPC_ORDER, - ISACencUB_obj->maskfiltstr_obj.PreStateLoF, - ISACencUB_obj->maskfiltstr_obj.PreStateLoG, - &ISACencUB_obj->data_buffer_float[0], - &percepFilterParams[UB_LPC_ORDER + 1], - &LP_lookahead[0]); - - /* Second half-frame filtering. */ - WebRtcIsac_NormLatticeFilterMa( - UB_LPC_ORDER, ISACencUB_obj->maskfiltstr_obj.PreStateLoF, - ISACencUB_obj->maskfiltstr_obj.PreStateLoG, - &ISACencUB_obj->data_buffer_float[FRAMESAMPLES_HALF], - &percepFilterParams[(UB_LPC_ORDER + 1) + SUBFRAMES * (UB_LPC_ORDER + 1)], - &LP_lookahead[FRAMESAMPLES_HALF]); - - WebRtcIsac_Time2Spec(transform_tables, - &LP_lookahead[0], &LP_lookahead[FRAMESAMPLES_HALF], - fre, fim, &ISACencUB_obj->fftstr_obj); - - /* Store FFT coefficients for multiple encoding. */ - memcpy(ISACencUB_obj->SaveEnc_obj.realFFT, fre, sizeof(fre)); - memcpy(ISACencUB_obj->SaveEnc_obj.imagFFT, fim, sizeof(fim)); - - /* Prepare the audio buffer for the next packet - * move the last 3 ms to the beginning of the buffer. */ - memcpy(ISACencUB_obj->data_buffer_float, - &ISACencUB_obj->data_buffer_float[FRAMESAMPLES], - LB_TOTAL_DELAY_SAMPLES * sizeof(float)); - /* start writing with 3 ms delay to compensate for the delay - * of the lower-band. */ - ISACencUB_obj->buffer_index = LB_TOTAL_DELAY_SAMPLES; - - /* Save the bit-stream object at this point for FEC. */ - memcpy(&ISACencUB_obj->SaveEnc_obj.bitStreamObj, &ISACencUB_obj->bitstr_obj, - sizeof(Bitstr)); - - /* Qantization and lossless coding */ - /* Note that there is no pitch-gain for this band so kAveragePitchGain = 0 - * is passed to the function. In fact, the function ignores the 3rd parameter - * for this band. */ - err = WebRtcIsac_EncodeSpec(fre, fim, kAveragePitchGain, kIsacUpperBand16, - &ISACencUB_obj->bitstr_obj); - if ((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - return err; - } - - if ((ISACencUB_obj->bitstr_obj.stream_index > payloadLimitBytes) || - (err == -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - err = LimitPayloadUb(ISACencUB_obj, payloadLimitBytes, bytesLeftSpecCoding, - &transcodingParam, fre, fim, lpcGains, - kIsacUpperBand16, err); - } - if (err < 0) { - return err; - } - /* Complete arithmetic coding. */ - return WebRtcIsac_EncTerminate(&ISACencUB_obj->bitstr_obj); -} - - -int WebRtcIsac_EncodeUb12(const TransformTables* transform_tables, - float* in, ISACUBEncStruct* ISACencUB_obj, - int32_t jitterInfo) { - int err; - int k; - - double lpcVecs[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME]; - - double percepFilterParams[(1 + UB_LPC_ORDER) * SUBFRAMES]; - float LP[FRAMESAMPLES_HALF]; - float HP[FRAMESAMPLES_HALF]; - - double LP_lookahead[FRAMESAMPLES_HALF]; - double HP_lookahead[FRAMESAMPLES_HALF]; - double LPw[FRAMESAMPLES_HALF]; - - double HPw[FRAMESAMPLES_HALF]; - int16_t fre[FRAMESAMPLES_HALF]; /* Q7 */ - int16_t fim[FRAMESAMPLES_HALF]; /* Q7 */ - - int status = 0; - - double varscale[1]; - - double corr[UB_LPC_GAIN_DIM][UB_LPC_ORDER + 1]; - double lpcGains[SUBFRAMES]; - transcode_obj transcodingParam; - uint16_t payloadLimitBytes; - double s2nr; - const int16_t kAveragePitchGain = 0.0; - double bytesLeftSpecCoding; - - /* Buffer speech samples (by 10ms packet) until the framelength is */ - /* reached (30 ms). */ - /********************************************************************/ - - /* Fill the buffer with 10ms input data. */ - memcpy(&ISACencUB_obj->data_buffer_float[ISACencUB_obj->buffer_index], in, - FRAMESAMPLES_10ms * sizeof(float)); - - /* if buffer-size is not equal to current frame-size then increase the - index and return. We do the encoding when we have enough audio. */ - if (ISACencUB_obj->buffer_index + FRAMESAMPLES_10ms < FRAMESAMPLES) { - ISACencUB_obj->buffer_index += FRAMESAMPLES_10ms; - return 0; - } - /* If buffer reached the right size, reset index and continue - with encoding the frame */ - ISACencUB_obj->buffer_index = 0; - - /* End of buffer function */ - /**************************/ - - /* Encoding */ - /************/ - - /* Reset bit-stream. */ - WebRtcIsac_ResetBitstream(&(ISACencUB_obj->bitstr_obj)); - - /* Encoding bandwidth information. */ - WebRtcIsac_EncodeJitterInfo(jitterInfo, &ISACencUB_obj->bitstr_obj); - status = WebRtcIsac_EncodeBandwidth(isac12kHz, &ISACencUB_obj->bitstr_obj); - if (status < 0) { - return status; - } - - s2nr = WebRtcIsac_GetSnr(ISACencUB_obj->bottleneck, FRAMESAMPLES); - - /* Split signal in two bands. */ - WebRtcIsac_SplitAndFilterFloat(ISACencUB_obj->data_buffer_float, HP, LP, - HP_lookahead, LP_lookahead, - &ISACencUB_obj->prefiltbankstr_obj); - - /* Find coefficients for perceptual pre-filters. */ - WebRtcIsac_GetLpcCoefUb(LP_lookahead, &ISACencUB_obj->maskfiltstr_obj, - lpcVecs, corr, varscale, isac12kHz); - - /* Code LPC model and shape - gains not quantized yet. */ - WebRtcIsac_EncodeLpcUB(lpcVecs, &ISACencUB_obj->bitstr_obj, - percepFilterParams, isac12kHz, - &ISACencUB_obj->SaveEnc_obj); - - WebRtcIsac_GetLpcGain(s2nr, percepFilterParams, SUBFRAMES, lpcGains, corr, - varscale); - - /* Store the state of arithmetic coder before coding LPC gains. */ - transcodingParam.W_upper = ISACencUB_obj->bitstr_obj.W_upper; - transcodingParam.stream_index = ISACencUB_obj->bitstr_obj.stream_index; - transcodingParam.streamval = ISACencUB_obj->bitstr_obj.streamval; - transcodingParam.stream[0] = - ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index - - 2]; - transcodingParam.stream[1] = - ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index - - 1]; - transcodingParam.stream[2] = - ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index]; - - /* Store LPC Gains before encoding them. */ - for (k = 0; k < SUBFRAMES; k++) { - transcodingParam.loFiltGain[k] = lpcGains[k]; - } - - /* Store the gains for multiple encoding. */ - memcpy(ISACencUB_obj->SaveEnc_obj.lpcGain, lpcGains, SUBFRAMES * - sizeof(double)); - - WebRtcIsac_EncodeLpcGainUb(lpcGains, &ISACencUB_obj->bitstr_obj, - ISACencUB_obj->SaveEnc_obj.lpcGainIndex); - - for (k = 0; k < SUBFRAMES; k++) { - percepFilterParams[k * (UB_LPC_ORDER + 1)] = lpcGains[k]; - } - - /* perceptual pre-filtering (using normalized lattice filter) */ - /* low-band filtering */ - WebRtcIsac_NormLatticeFilterMa(UB_LPC_ORDER, - ISACencUB_obj->maskfiltstr_obj.PreStateLoF, - ISACencUB_obj->maskfiltstr_obj.PreStateLoG, LP, - percepFilterParams, LPw); - - /* Get the correct value for the payload limit and calculate the number - of bytes left for coding the spectrum. It is a 30ms frame Subract 3 - because termination process may add 3 bytes */ - payloadLimitBytes = ISACencUB_obj->maxPayloadSizeBytes - - ISACencUB_obj->numBytesUsed - 3; - bytesLeftSpecCoding = payloadLimitBytes - - ISACencUB_obj->bitstr_obj.stream_index; - - memset(HPw, 0, sizeof(HPw)); - - /* Transform */ - WebRtcIsac_Time2Spec(transform_tables, - LPw, HPw, fre, fim, &ISACencUB_obj->fftstr_obj); - - /* Store FFT coefficients for multiple encoding. */ - memcpy(ISACencUB_obj->SaveEnc_obj.realFFT, fre, - sizeof(ISACencUB_obj->SaveEnc_obj.realFFT)); - memcpy(ISACencUB_obj->SaveEnc_obj.imagFFT, fim, - sizeof(ISACencUB_obj->SaveEnc_obj.imagFFT)); - - /* Save the bit-stream object at this point for FEC. */ - memcpy(&ISACencUB_obj->SaveEnc_obj.bitStreamObj, - &ISACencUB_obj->bitstr_obj, sizeof(Bitstr)); - - /* Quantization and loss-less coding */ - /* The 4th parameter to this function is pitch-gain, which is only used - * when encoding 0-8 kHz band, and irrelevant in this function, therefore, - * we insert zero here. */ - err = WebRtcIsac_EncodeSpec(fre, fim, kAveragePitchGain, kIsacUpperBand12, - &ISACencUB_obj->bitstr_obj); - if ((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - /* There has been an error but it was not too large - payload (we can cure too large payload) */ - return err; - } - - if ((ISACencUB_obj->bitstr_obj.stream_index > payloadLimitBytes) || - (err == -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - err = LimitPayloadUb(ISACencUB_obj, payloadLimitBytes, bytesLeftSpecCoding, - &transcodingParam, fre, fim, lpcGains, - kIsacUpperBand12, err); - } - if (err < 0) { - return err; - } - /* Complete arithmetic coding. */ - return WebRtcIsac_EncTerminate(&ISACencUB_obj->bitstr_obj); -} - - - - - - -/* This function is used to create a new bit-stream with new BWE. - The same data as previously encoded with the function WebRtcIsac_Encoder(). - The data needed is taken from the structure, where it was stored - when calling the encoder. */ - -int WebRtcIsac_EncodeStoredDataLb(const IsacSaveEncoderData* ISACSavedEnc_obj, - Bitstr* ISACBitStr_obj, int BWnumber, - float scale) { - int ii; - int status; - int BWno = BWnumber; - - const uint16_t* WebRtcIsac_kQPitchGainCdf_ptr[1]; - const uint16_t** cdf; - - double tmpLPCcoeffs_lo[(ORDERLO + 1)*SUBFRAMES * 2]; - double tmpLPCcoeffs_hi[(ORDERHI + 1)*SUBFRAMES * 2]; - int tmpLPCindex_g[12 * 2]; - int16_t tmp_fre[FRAMESAMPLES], tmp_fim[FRAMESAMPLES]; - const int kModel = 0; - - /* Sanity Check - possible values for BWnumber is 0 - 23. */ - if ((BWnumber < 0) || (BWnumber > 23)) { - return -ISAC_RANGE_ERROR_BW_ESTIMATOR; - } - - /* Reset bit-stream. */ - WebRtcIsac_ResetBitstream(ISACBitStr_obj); - - /* Encode frame length */ - status = WebRtcIsac_EncodeFrameLen(ISACSavedEnc_obj->framelength, - ISACBitStr_obj); - if (status < 0) { - /* Wrong frame size. */ - return status; - } - - /* Transcoding */ - if ((scale > 0.0) && (scale < 1.0)) { - /* Compensate LPC gain. */ - for (ii = 0; - ii < ((ORDERLO + 1)* SUBFRAMES * (1 + ISACSavedEnc_obj->startIdx)); - ii++) { - tmpLPCcoeffs_lo[ii] = scale * ISACSavedEnc_obj->LPCcoeffs_lo[ii]; - } - for (ii = 0; - ii < ((ORDERHI + 1) * SUBFRAMES * (1 + ISACSavedEnc_obj->startIdx)); - ii++) { - tmpLPCcoeffs_hi[ii] = scale * ISACSavedEnc_obj->LPCcoeffs_hi[ii]; - } - /* Scale DFT. */ - for (ii = 0; - ii < (FRAMESAMPLES_HALF * (1 + ISACSavedEnc_obj->startIdx)); - ii++) { - tmp_fre[ii] = (int16_t)((scale) * (float)ISACSavedEnc_obj->fre[ii]); - tmp_fim[ii] = (int16_t)((scale) * (float)ISACSavedEnc_obj->fim[ii]); - } - } else { - for (ii = 0; - ii < (KLT_ORDER_GAIN * (1 + ISACSavedEnc_obj->startIdx)); - ii++) { - tmpLPCindex_g[ii] = ISACSavedEnc_obj->LPCindex_g[ii]; - } - for (ii = 0; - ii < (FRAMESAMPLES_HALF * (1 + ISACSavedEnc_obj->startIdx)); - ii++) { - tmp_fre[ii] = ISACSavedEnc_obj->fre[ii]; - tmp_fim[ii] = ISACSavedEnc_obj->fim[ii]; - } - } - - /* Encode bandwidth estimate. */ - WebRtcIsac_EncodeReceiveBw(&BWno, ISACBitStr_obj); - - /* Loop over number of 30 msec */ - for (ii = 0; ii <= ISACSavedEnc_obj->startIdx; ii++) { - /* Encode pitch gains. */ - *WebRtcIsac_kQPitchGainCdf_ptr = WebRtcIsac_kQPitchGainCdf; - WebRtcIsac_EncHistMulti(ISACBitStr_obj, - &ISACSavedEnc_obj->pitchGain_index[ii], - WebRtcIsac_kQPitchGainCdf_ptr, 1); - - /* Entropy coding of quantization pitch lags */ - /* Voicing classification. */ - if (ISACSavedEnc_obj->meanGain[ii] < 0.2) { - cdf = WebRtcIsac_kQPitchLagCdfPtrLo; - } else if (ISACSavedEnc_obj->meanGain[ii] < 0.4) { - cdf = WebRtcIsac_kQPitchLagCdfPtrMid; - } else { - cdf = WebRtcIsac_kQPitchLagCdfPtrHi; - } - WebRtcIsac_EncHistMulti(ISACBitStr_obj, - &ISACSavedEnc_obj->pitchIndex[PITCH_SUBFRAMES * ii], - cdf, PITCH_SUBFRAMES); - - /* LPC */ - /* Only one model exists. The entropy coding is done only for backward - * compatibility. */ - WebRtcIsac_EncHistMulti(ISACBitStr_obj, &kModel, - WebRtcIsac_kQKltModelCdfPtr, 1); - /* Entropy coding of quantization indices - LPC shape only. */ - WebRtcIsac_EncHistMulti(ISACBitStr_obj, - &ISACSavedEnc_obj->LPCindex_s[KLT_ORDER_SHAPE * ii], - WebRtcIsac_kQKltCdfPtrShape, - KLT_ORDER_SHAPE); - - /* If transcoding, get new LPC gain indices */ - if (scale < 1.0) { - WebRtcIsac_TranscodeLPCCoef( - &tmpLPCcoeffs_lo[(ORDERLO + 1) * SUBFRAMES * ii], - &tmpLPCcoeffs_hi[(ORDERHI + 1)*SUBFRAMES * ii], - &tmpLPCindex_g[KLT_ORDER_GAIN * ii]); - } - - /* Entropy coding of quantization indices - LPC gain. */ - WebRtcIsac_EncHistMulti(ISACBitStr_obj, &tmpLPCindex_g[KLT_ORDER_GAIN * ii], - WebRtcIsac_kQKltCdfPtrGain, KLT_ORDER_GAIN); - - /* Quantization and loss-less coding. */ - status = WebRtcIsac_EncodeSpec(&tmp_fre[ii * FRAMESAMPLES_HALF], - &tmp_fim[ii * FRAMESAMPLES_HALF], - ISACSavedEnc_obj->AvgPitchGain[ii], - kIsacLowerBand, ISACBitStr_obj); - if (status < 0) { - return status; - } - } - /* Complete arithmetic coding. */ - return WebRtcIsac_EncTerminate(ISACBitStr_obj); -} - - -int WebRtcIsac_EncodeStoredDataUb( - const ISACUBSaveEncDataStruct* ISACSavedEnc_obj, - Bitstr* bitStream, - int32_t jitterInfo, - float scale, - enum ISACBandwidth bandwidth) { - int n; - int err; - double lpcGain[SUBFRAMES]; - int16_t realFFT[FRAMESAMPLES_HALF]; - int16_t imagFFT[FRAMESAMPLES_HALF]; - const uint16_t** shape_cdf; - int shape_len; - const int16_t kAveragePitchGain = 0.0; - enum ISACBand band; - /* Reset bitstream. */ - WebRtcIsac_ResetBitstream(bitStream); - - /* Encode jitter index. */ - WebRtcIsac_EncodeJitterInfo(jitterInfo, bitStream); - - err = WebRtcIsac_EncodeBandwidth(bandwidth, bitStream); - if (err < 0) { - return err; - } - - /* Encode LPC-shape. */ - if (bandwidth == isac12kHz) { - shape_cdf = WebRtcIsac_kLpcShapeCdfMatUb12; - shape_len = UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME; - band = kIsacUpperBand12; - } else { - shape_cdf = WebRtcIsac_kLpcShapeCdfMatUb16; - shape_len = UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME; - band = kIsacUpperBand16; - } - WebRtcIsac_EncHistMulti(bitStream, ISACSavedEnc_obj->indexLPCShape, - shape_cdf, shape_len); - - if ((scale <= 0.0) || (scale >= 1.0)) { - /* We only consider scales between zero and one. */ - WebRtcIsac_EncHistMulti(bitStream, ISACSavedEnc_obj->lpcGainIndex, - WebRtcIsac_kLpcGainCdfMat, UB_LPC_GAIN_DIM); - if (bandwidth == isac16kHz) { - /* Store gain indices of the second half. */ - WebRtcIsac_EncHistMulti(bitStream, - &ISACSavedEnc_obj->lpcGainIndex[SUBFRAMES], - WebRtcIsac_kLpcGainCdfMat, UB_LPC_GAIN_DIM); - } - /* Store FFT coefficients. */ - err = WebRtcIsac_EncodeSpec(ISACSavedEnc_obj->realFFT, - ISACSavedEnc_obj->imagFFT, kAveragePitchGain, - band, bitStream); - } else { - /* Scale LPC gain and FFT coefficients. */ - for (n = 0; n < SUBFRAMES; n++) { - lpcGain[n] = scale * ISACSavedEnc_obj->lpcGain[n]; - } - /* Store LPC gains. */ - WebRtcIsac_StoreLpcGainUb(lpcGain, bitStream); - - if (bandwidth == isac16kHz) { - /* Scale and code the gains of the second half of the frame, if 16kHz. */ - for (n = 0; n < SUBFRAMES; n++) { - lpcGain[n] = scale * ISACSavedEnc_obj->lpcGain[n + SUBFRAMES]; - } - WebRtcIsac_StoreLpcGainUb(lpcGain, bitStream); - } - - for (n = 0; n < FRAMESAMPLES_HALF; n++) { - realFFT[n] = (int16_t)(scale * (float)ISACSavedEnc_obj->realFFT[n] + - 0.5f); - imagFFT[n] = (int16_t)(scale * (float)ISACSavedEnc_obj->imagFFT[n] + - 0.5f); - } - /* Store FFT coefficients. */ - err = WebRtcIsac_EncodeSpec(realFFT, imagFFT, kAveragePitchGain, - band, bitStream); - } - if (err < 0) { - /* Error happened while encoding FFT coefficients. */ - return err; - } - - /* Complete arithmetic coding. */ - return WebRtcIsac_EncTerminate(bitStream); -} - -int16_t WebRtcIsac_GetRedPayloadUb( - const ISACUBSaveEncDataStruct* ISACSavedEncObj, - Bitstr* bitStreamObj, - enum ISACBandwidth bandwidth) { - int n; - int16_t status; - int16_t realFFT[FRAMESAMPLES_HALF]; - int16_t imagFFT[FRAMESAMPLES_HALF]; - enum ISACBand band; - const int16_t kAveragePitchGain = 0.0; - /* Store bit-stream object. */ - memcpy(bitStreamObj, &ISACSavedEncObj->bitStreamObj, sizeof(Bitstr)); - - /* Scale FFT coefficients. */ - for (n = 0; n < FRAMESAMPLES_HALF; n++) { - realFFT[n] = (int16_t)((float)ISACSavedEncObj->realFFT[n] * - RCU_TRANSCODING_SCALE_UB + 0.5); - imagFFT[n] = (int16_t)((float)ISACSavedEncObj->imagFFT[n] * - RCU_TRANSCODING_SCALE_UB + 0.5); - } - - band = (bandwidth == isac12kHz) ? kIsacUpperBand12 : kIsacUpperBand16; - status = WebRtcIsac_EncodeSpec(realFFT, imagFFT, kAveragePitchGain, band, - bitStreamObj); - if (status < 0) { - return status; - } else { - /* Terminate entropy coding */ - return WebRtcIsac_EncTerminate(bitStreamObj); - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c deleted file mode 100644 index 7b02e64a0148..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c +++ /dev/null @@ -1,706 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * code_LPC_UB.c - * - * This file contains definition of functions used to - * encode LPC parameters (Shape & gain) of the upper band. - * - */ - -#include -#include -#include - -#include "modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/****************************************************************************** - * WebRtcIsac_RemoveLarMean() - * - * Remove the means from LAR coefficients. - * - * Input: - * -lar : pointer to lar vectors. LAR vectors are - * concatenated. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -lar : pointer to mean-removed LAR:s. - * - * - */ -int16_t -WebRtcIsac_RemoveLarMean( - double* lar, - int16_t bandwidth) -{ - int16_t coeffCntr; - int16_t vecCntr; - int16_t numVec; - const double* meanLAR; - switch(bandwidth) - { - case isac12kHz: - { - numVec = UB_LPC_VEC_PER_FRAME; - meanLAR = WebRtcIsac_kMeanLarUb12; - break; - } - case isac16kHz: - { - numVec = UB16_LPC_VEC_PER_FRAME; - meanLAR = WebRtcIsac_kMeanLarUb16; - break; - } - default: - return -1; - } - - for(vecCntr = 0; vecCntr < numVec; vecCntr++) - { - for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++) - { - // REMOVE MEAN - *lar++ -= meanLAR[coeffCntr]; - } - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_DecorrelateIntraVec() - * - * Remove the correlation amonge the components of LAR vectors. If LAR vectors - * of one frame are put in a matrix where each column is a LAR vector of a - * sub-frame, then this is equivalent to multiplying the LAR matrix with - * a decorrelting mtrix from left. - * - * Input: - * -inLar : pointer to mean-removed LAR vecrtors. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : decorrelated LAR vectors. - */ -int16_t -WebRtcIsac_DecorrelateIntraVec( - const double* data, - double* out, - int16_t bandwidth) -{ - const double* ptrData; - const double* ptrRow; - int16_t rowCntr; - int16_t colCntr; - int16_t larVecCntr; - int16_t numVec; - const double* decorrMat; - switch(bandwidth) - { - case isac12kHz: - { - decorrMat = &WebRtcIsac_kIntraVecDecorrMatUb12[0][0]; - numVec = UB_LPC_VEC_PER_FRAME; - break; - } - case isac16kHz: - { - decorrMat = &WebRtcIsac_kIintraVecDecorrMatUb16[0][0]; - numVec = UB16_LPC_VEC_PER_FRAME; - break; - } - default: - return -1; - } - - // - // decorrMat * data - // - // data is assumed to contain 'numVec' of LAR - // vectors (mean removed) each of dimension 'UB_LPC_ORDER' - // concatenated one after the other. - // - - ptrData = data; - for(larVecCntr = 0; larVecCntr < numVec; larVecCntr++) - { - for(rowCntr = 0; rowCntr < UB_LPC_ORDER; rowCntr++) - { - ptrRow = &decorrMat[rowCntr * UB_LPC_ORDER]; - *out = 0; - for(colCntr = 0; colCntr < UB_LPC_ORDER; colCntr++) - { - *out += ptrData[colCntr] * ptrRow[colCntr]; - } - out++; - } - ptrData += UB_LPC_ORDER; - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_DecorrelateInterVec() - * - * Remover the correlation among mean-removed LAR vectors. If LAR vectors - * of one frame are put in a matrix where each column is a LAR vector of a - * sub-frame, then this is equivalent to multiplying the LAR matrix with - * a decorrelting mtrix from right. - * - * Input: - * -data : pointer to matrix of LAR vectors. The matrix - * is stored column-wise. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : decorrelated LAR vectors. - */ -int16_t -WebRtcIsac_DecorrelateInterVec( - const double* data, - double* out, - int16_t bandwidth) -{ - int16_t coeffCntr; - int16_t rowCntr; - int16_t colCntr; - const double* decorrMat; - int16_t interVecDim; - - switch(bandwidth) - { - case isac12kHz: - { - decorrMat = &WebRtcIsac_kInterVecDecorrMatUb12[0][0]; - interVecDim = UB_LPC_VEC_PER_FRAME; - break; - } - case isac16kHz: - { - decorrMat = &WebRtcIsac_kInterVecDecorrMatUb16[0][0]; - interVecDim = UB16_LPC_VEC_PER_FRAME; - break; - } - default: - return -1; - } - - // - // data * decorrMat - // - // data is of size 'interVecDim' * 'UB_LPC_ORDER' - // That is 'interVecDim' of LAR vectors (mean removed) - // in columns each of dimension 'UB_LPC_ORDER'. - // matrix is stored column-wise. - // - - for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++) - { - for(colCntr = 0; colCntr < interVecDim; colCntr++) - { - out[coeffCntr + colCntr * UB_LPC_ORDER] = 0; - for(rowCntr = 0; rowCntr < interVecDim; rowCntr++) - { - out[coeffCntr + colCntr * UB_LPC_ORDER] += - data[coeffCntr + rowCntr * UB_LPC_ORDER] * - decorrMat[rowCntr * interVecDim + colCntr]; - } - } - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_QuantizeUncorrLar() - * - * Quantize the uncorrelated parameters. - * - * Input: - * -data : uncorrelated LAR vectors. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -data : quantized version of the input. - * -idx : pointer to quantization indices. - */ -double -WebRtcIsac_QuantizeUncorrLar( - double* data, - int* recIdx, - int16_t bandwidth) -{ - int16_t cntr; - int32_t idx; - int16_t interVecDim; - const double* leftRecPoint; - double quantizationStepSize; - const int16_t* numQuantCell; - switch(bandwidth) - { - case isac12kHz: - { - leftRecPoint = WebRtcIsac_kLpcShapeLeftRecPointUb12; - quantizationStepSize = WebRtcIsac_kLpcShapeQStepSizeUb12; - numQuantCell = WebRtcIsac_kLpcShapeNumRecPointUb12; - interVecDim = UB_LPC_VEC_PER_FRAME; - break; - } - case isac16kHz: - { - leftRecPoint = WebRtcIsac_kLpcShapeLeftRecPointUb16; - quantizationStepSize = WebRtcIsac_kLpcShapeQStepSizeUb16; - numQuantCell = WebRtcIsac_kLpcShapeNumRecPointUb16; - interVecDim = UB16_LPC_VEC_PER_FRAME; - break; - } - default: - return -1; - } - - // - // Quantize the parametrs. - // - for(cntr = 0; cntr < UB_LPC_ORDER * interVecDim; cntr++) - { - idx = (int32_t)floor((*data - leftRecPoint[cntr]) / - quantizationStepSize + 0.5); - if(idx < 0) - { - idx = 0; - } - else if(idx >= numQuantCell[cntr]) - { - idx = numQuantCell[cntr] - 1; - } - - *data++ = leftRecPoint[cntr] + idx * quantizationStepSize; - *recIdx++ = idx; - } - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_DequantizeLpcParam() - * - * Get the quantized value of uncorrelated LARs given the quantization indices. - * - * Input: - * -idx : pointer to quantiztion indices. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : pointer to quantized values. - */ -int16_t -WebRtcIsac_DequantizeLpcParam( - const int* idx, - double* out, - int16_t bandwidth) -{ - int16_t cntr; - int16_t interVecDim; - const double* leftRecPoint; - double quantizationStepSize; - - switch(bandwidth) - { - case isac12kHz: - { - leftRecPoint = WebRtcIsac_kLpcShapeLeftRecPointUb12; - quantizationStepSize = WebRtcIsac_kLpcShapeQStepSizeUb12; - interVecDim = UB_LPC_VEC_PER_FRAME; - break; - } - case isac16kHz: - { - leftRecPoint = WebRtcIsac_kLpcShapeLeftRecPointUb16; - quantizationStepSize = WebRtcIsac_kLpcShapeQStepSizeUb16; - interVecDim = UB16_LPC_VEC_PER_FRAME; - break; - } - default: - return -1; - } - - // - // Dequantize given the quantization indices - // - - for(cntr = 0; cntr < UB_LPC_ORDER * interVecDim; cntr++) - { - *out++ = leftRecPoint[cntr] + *idx++ * quantizationStepSize; - } - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_CorrelateIntraVec() - * - * This is the inverse of WebRtcIsac_DecorrelateIntraVec(). - * - * Input: - * -data : uncorrelated parameters. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : correlated parametrs. - */ -int16_t -WebRtcIsac_CorrelateIntraVec( - const double* data, - double* out, - int16_t bandwidth) -{ - int16_t vecCntr; - int16_t rowCntr; - int16_t colCntr; - int16_t numVec; - const double* ptrData; - const double* intraVecDecorrMat; - - switch(bandwidth) - { - case isac12kHz: - { - numVec = UB_LPC_VEC_PER_FRAME; - intraVecDecorrMat = &WebRtcIsac_kIntraVecDecorrMatUb12[0][0]; - break; - } - case isac16kHz: - { - numVec = UB16_LPC_VEC_PER_FRAME; - intraVecDecorrMat = &WebRtcIsac_kIintraVecDecorrMatUb16[0][0]; - break; - } - default: - return -1; - } - - - ptrData = data; - for(vecCntr = 0; vecCntr < numVec; vecCntr++) - { - for(colCntr = 0; colCntr < UB_LPC_ORDER; colCntr++) - { - *out = 0; - for(rowCntr = 0; rowCntr < UB_LPC_ORDER; rowCntr++) - { - *out += ptrData[rowCntr] * - intraVecDecorrMat[rowCntr * UB_LPC_ORDER + colCntr]; - } - out++; - } - ptrData += UB_LPC_ORDER; - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_CorrelateInterVec() - * - * This is the inverse of WebRtcIsac_DecorrelateInterVec(). - * - * Input: - * -data - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : correlated parametrs. - */ -int16_t -WebRtcIsac_CorrelateInterVec( - const double* data, - double* out, - int16_t bandwidth) -{ - int16_t coeffCntr; - int16_t rowCntr; - int16_t colCntr; - int16_t interVecDim; - double myVec[UB16_LPC_VEC_PER_FRAME] = {0.0}; - const double* interVecDecorrMat; - - switch(bandwidth) - { - case isac12kHz: - { - interVecDim = UB_LPC_VEC_PER_FRAME; - interVecDecorrMat = &WebRtcIsac_kInterVecDecorrMatUb12[0][0]; - break; - } - case isac16kHz: - { - interVecDim = UB16_LPC_VEC_PER_FRAME; - interVecDecorrMat = &WebRtcIsac_kInterVecDecorrMatUb16[0][0]; - break; - } - default: - return -1; - } - - for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++) - { - for(rowCntr = 0; rowCntr < interVecDim; rowCntr++) - { - myVec[rowCntr] = 0; - for(colCntr = 0; colCntr < interVecDim; colCntr++) - { - myVec[rowCntr] += data[coeffCntr + colCntr * UB_LPC_ORDER] * //*ptrData * - interVecDecorrMat[rowCntr * interVecDim + colCntr]; - //ptrData += UB_LPC_ORDER; - } - } - - for(rowCntr = 0; rowCntr < interVecDim; rowCntr++) - { - out[coeffCntr + rowCntr * UB_LPC_ORDER] = myVec[rowCntr]; - } - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_AddLarMean() - * - * This is the inverse of WebRtcIsac_RemoveLarMean() - * - * Input: - * -data : pointer to mean-removed LAR:s. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -data : pointer to LARs. - */ -int16_t -WebRtcIsac_AddLarMean( - double* data, - int16_t bandwidth) -{ - int16_t coeffCntr; - int16_t vecCntr; - int16_t numVec; - const double* meanLAR; - - switch(bandwidth) - { - case isac12kHz: - { - numVec = UB_LPC_VEC_PER_FRAME; - meanLAR = WebRtcIsac_kMeanLarUb12; - break; - } - case isac16kHz: - { - numVec = UB16_LPC_VEC_PER_FRAME; - meanLAR = WebRtcIsac_kMeanLarUb16; - break; - } - default: - return -1; - } - - for(vecCntr = 0; vecCntr < numVec; vecCntr++) - { - for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++) - { - *data++ += meanLAR[coeffCntr]; - } - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_ToLogDomainRemoveMean() - * - * Transform the LPC gain to log domain then remove the mean value. - * - * Input: - * -lpcGain : pointer to LPC Gain, expecting 6 LPC gains - * - * Output: - * -lpcGain : mean-removed in log domain. - */ -int16_t -WebRtcIsac_ToLogDomainRemoveMean( - double* data) -{ - int16_t coeffCntr; - for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++) - { - data[coeffCntr] = log(data[coeffCntr]) - WebRtcIsac_kMeanLpcGain; - } - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_DecorrelateLPGain() - * - * Decorrelate LPC gains. There are 6 LPC Gains per frame. This is like - * multiplying gain vector with decorrelating matrix. - * - * Input: - * -data : LPC gain in log-domain with mean removed. - * - * Output: - * -out : decorrelated parameters. - */ -int16_t WebRtcIsac_DecorrelateLPGain( - const double* data, - double* out) -{ - int16_t rowCntr; - int16_t colCntr; - - for(colCntr = 0; colCntr < UB_LPC_GAIN_DIM; colCntr++) - { - *out = 0; - for(rowCntr = 0; rowCntr < UB_LPC_GAIN_DIM; rowCntr++) - { - *out += data[rowCntr] * WebRtcIsac_kLpcGainDecorrMat[rowCntr][colCntr]; - } - out++; - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_QuantizeLpcGain() - * - * Quantize the decorrelated log-domain gains. - * - * Input: - * -lpcGain : uncorrelated LPC gains. - * - * Output: - * -idx : quantization indices - * -lpcGain : quantized value of the inpt. - */ -double WebRtcIsac_QuantizeLpcGain( - double* data, - int* idx) -{ - int16_t coeffCntr; - for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++) - { - *idx = (int)floor((*data - WebRtcIsac_kLeftRecPointLpcGain[coeffCntr]) / - WebRtcIsac_kQSizeLpcGain + 0.5); - - if(*idx < 0) - { - *idx = 0; - } - else if(*idx >= WebRtcIsac_kNumQCellLpcGain[coeffCntr]) - { - *idx = WebRtcIsac_kNumQCellLpcGain[coeffCntr] - 1; - } - *data = WebRtcIsac_kLeftRecPointLpcGain[coeffCntr] + *idx * - WebRtcIsac_kQSizeLpcGain; - - data++; - idx++; - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_DequantizeLpcGain() - * - * Get the quantized values given the quantization indices. - * - * Input: - * -idx : pointer to quantization indices. - * - * Output: - * -lpcGains : quantized values of the given parametes. - */ -int16_t WebRtcIsac_DequantizeLpcGain( - const int* idx, - double* out) -{ - int16_t coeffCntr; - for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++) - { - *out = WebRtcIsac_kLeftRecPointLpcGain[coeffCntr] + *idx * - WebRtcIsac_kQSizeLpcGain; - out++; - idx++; - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_CorrelateLpcGain() - * - * This is the inverse of WebRtcIsac_DecorrelateLPGain(). - * - * Input: - * -data : decorrelated parameters. - * - * Output: - * -out : correlated parameters. - */ -int16_t WebRtcIsac_CorrelateLpcGain( - const double* data, - double* out) -{ - int16_t rowCntr; - int16_t colCntr; - - for(rowCntr = 0; rowCntr < UB_LPC_GAIN_DIM; rowCntr++) - { - *out = 0; - for(colCntr = 0; colCntr < UB_LPC_GAIN_DIM; colCntr++) - { - *out += WebRtcIsac_kLpcGainDecorrMat[rowCntr][colCntr] * data[colCntr]; - } - out++; - } - - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_AddMeanToLinearDomain() - * - * This is the inverse of WebRtcIsac_ToLogDomainRemoveMean(). - * - * Input: - * -lpcGain : LPC gain in log-domain & mean removed - * - * Output: - * -lpcGain : LPC gain in normal domain. - */ -int16_t WebRtcIsac_AddMeanToLinearDomain( - double* lpcGains) -{ - int16_t coeffCntr; - for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++) - { - lpcGains[coeffCntr] = exp(lpcGains[coeffCntr] + WebRtcIsac_kMeanLpcGain); - } - return 0; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h deleted file mode 100644 index 8bc3d752c30d..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * encode_lpc_swb.h - * - * This file contains declaration of functions used to - * encode LPC parameters (Shape & gain) of the upper band. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENCODE_LPC_SWB_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENCODE_LPC_SWB_H_ - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -/****************************************************************************** - * WebRtcIsac_RemoveLarMean() - * - * Remove the means from LAR coefficients. - * - * Input: - * -lar : pointer to lar vectors. LAR vectors are - * concatenated. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -lar : pointer to mean-removed LAR:s. - * - * - */ -int16_t WebRtcIsac_RemoveLarMean(double* lar, int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_DecorrelateIntraVec() - * - * Remove the correlation amonge the components of LAR vectors. If LAR vectors - * of one frame are put in a matrix where each column is a LAR vector of a - * sub-frame, then this is equivalent to multiplying the LAR matrix with - * a decorrelting mtrix from left. - * - * Input: - * -inLar : pointer to mean-removed LAR vecrtors. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : decorrelated LAR vectors. - */ -int16_t WebRtcIsac_DecorrelateIntraVec(const double* inLAR, - double* out, - int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_DecorrelateInterVec() - * - * Remover the correlation among mean-removed LAR vectors. If LAR vectors - * of one frame are put in a matrix where each column is a LAR vector of a - * sub-frame, then this is equivalent to multiplying the LAR matrix with - * a decorrelting mtrix from right. - * - * Input: - * -data : pointer to matrix of LAR vectors. The matrix - * is stored column-wise. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : decorrelated LAR vectors. - */ -int16_t WebRtcIsac_DecorrelateInterVec(const double* data, - double* out, - int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_QuantizeUncorrLar() - * - * Quantize the uncorrelated parameters. - * - * Input: - * -data : uncorrelated LAR vectors. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -data : quantized version of the input. - * -idx : pointer to quantization indices. - */ -double WebRtcIsac_QuantizeUncorrLar(double* data, int* idx, int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_CorrelateIntraVec() - * - * This is the inverse of WebRtcIsac_DecorrelateIntraVec(). - * - * Input: - * -data : uncorrelated parameters. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : correlated parametrs. - */ -int16_t WebRtcIsac_CorrelateIntraVec(const double* data, - double* out, - int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_CorrelateInterVec() - * - * This is the inverse of WebRtcIsac_DecorrelateInterVec(). - * - * Input: - * -data - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : correlated parametrs. - */ -int16_t WebRtcIsac_CorrelateInterVec(const double* data, - double* out, - int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_AddLarMean() - * - * This is the inverse of WebRtcIsac_RemoveLarMean() - * - * Input: - * -data : pointer to mean-removed LAR:s. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -data : pointer to LARs. - */ -int16_t WebRtcIsac_AddLarMean(double* data, int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_DequantizeLpcParam() - * - * Get the quantized value of uncorrelated LARs given the quantization indices. - * - * Input: - * -idx : pointer to quantiztion indices. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : pointer to quantized values. - */ -int16_t WebRtcIsac_DequantizeLpcParam(const int* idx, - double* out, - int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_ToLogDomainRemoveMean() - * - * Transform the LPC gain to log domain then remove the mean value. - * - * Input: - * -lpcGain : pointer to LPC Gain, expecting 6 LPC gains - * - * Output: - * -lpcGain : mean-removed in log domain. - */ -int16_t WebRtcIsac_ToLogDomainRemoveMean(double* lpGains); - -/****************************************************************************** - * WebRtcIsac_DecorrelateLPGain() - * - * Decorrelate LPC gains. There are 6 LPC Gains per frame. This is like - * multiplying gain vector with decorrelating matrix. - * - * Input: - * -data : LPC gain in log-domain with mean removed. - * - * Output: - * -out : decorrelated parameters. - */ -int16_t WebRtcIsac_DecorrelateLPGain(const double* data, double* out); - -/****************************************************************************** - * WebRtcIsac_QuantizeLpcGain() - * - * Quantize the decorrelated log-domain gains. - * - * Input: - * -lpcGain : uncorrelated LPC gains. - * - * Output: - * -idx : quantization indices - * -lpcGain : quantized value of the inpt. - */ -double WebRtcIsac_QuantizeLpcGain(double* lpGains, int* idx); - -/****************************************************************************** - * WebRtcIsac_DequantizeLpcGain() - * - * Get the quantized values given the quantization indices. - * - * Input: - * -idx : pointer to quantization indices. - * - * Output: - * -lpcGains : quantized values of the given parametes. - */ -int16_t WebRtcIsac_DequantizeLpcGain(const int* idx, double* lpGains); - -/****************************************************************************** - * WebRtcIsac_CorrelateLpcGain() - * - * This is the inverse of WebRtcIsac_DecorrelateLPGain(). - * - * Input: - * -data : decorrelated parameters. - * - * Output: - * -out : correlated parameters. - */ -int16_t WebRtcIsac_CorrelateLpcGain(const double* data, double* out); - -/****************************************************************************** - * WebRtcIsac_AddMeanToLinearDomain() - * - * This is the inverse of WebRtcIsac_ToLogDomainRemoveMean(). - * - * Input: - * -lpcGain : LPC gain in log-domain & mean removed - * - * Output: - * -lpcGain : LPC gain in normal domain. - */ -int16_t WebRtcIsac_AddMeanToLinearDomain(double* lpcGains); - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENCODE_LPC_SWB_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c deleted file mode 100644 index 188c8f6b86da..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c +++ /dev/null @@ -1,2066 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * entropy_coding.c - * - * This header file defines all of the functions used to arithmetically - * encode the iSAC bistream - * - */ - - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/main/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/arith_routines.h" -#include "modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/os_specific_inline.h" - -#include -#include - -static const uint16_t kLpcVecPerSegmentUb12 = 5; -static const uint16_t kLpcVecPerSegmentUb16 = 4; - -/* CDF array for encoder bandwidth (12 vs 16 kHz) indicator. */ -static const uint16_t kOneBitEqualProbCdf[3] = { - 0, 32768, 65535 }; - -/* Pointer to cdf array for encoder bandwidth (12 vs 16 kHz) indicator. */ -static const uint16_t* const kOneBitEqualProbCdf_ptr[1] = { - kOneBitEqualProbCdf }; - -/* - * Initial cdf index for decoder of encoded bandwidth - * (12 vs 16 kHz) indicator. - */ -static const uint16_t kOneBitEqualProbInitIndex[1] = { 1 }; - - -static const int kIsSWB12 = 1; - -/* compute correlation from power spectrum */ -static void FindCorrelation(int32_t* PSpecQ12, int32_t* CorrQ7) { - int32_t summ[FRAMESAMPLES / 8]; - int32_t diff[FRAMESAMPLES / 8]; - const int16_t* CS_ptrQ9; - int32_t sum; - int k, n; - - for (k = 0; k < FRAMESAMPLES / 8; k++) { - summ[k] = (PSpecQ12[k] + PSpecQ12[FRAMESAMPLES_QUARTER - 1 - k] + 16) >> 5; - diff[k] = (PSpecQ12[k] - PSpecQ12[FRAMESAMPLES_QUARTER - 1 - k] + 16) >> 5; - } - - sum = 2; - for (n = 0; n < FRAMESAMPLES / 8; n++) { - sum += summ[n]; - } - CorrQ7[0] = sum; - - for (k = 0; k < AR_ORDER; k += 2) { - sum = 0; - CS_ptrQ9 = WebRtcIsac_kCos[k]; - for (n = 0; n < FRAMESAMPLES / 8; n++) - sum += (CS_ptrQ9[n] * diff[n] + 256) >> 9; - CorrQ7[k + 1] = sum; - } - - for (k = 1; k < AR_ORDER; k += 2) { - sum = 0; - CS_ptrQ9 = WebRtcIsac_kCos[k]; - for (n = 0; n < FRAMESAMPLES / 8; n++) - sum += (CS_ptrQ9[n] * summ[n] + 256) >> 9; - CorrQ7[k + 1] = sum; - } -} - -/* compute inverse AR power spectrum */ -/* Changed to the function used in iSAC FIX for compatibility reasons */ -static void FindInvArSpec(const int16_t* ARCoefQ12, - const int32_t gainQ10, - int32_t* CurveQ16) { - int32_t CorrQ11[AR_ORDER + 1]; - int64_t sum, tmpGain; - int32_t diffQ16[FRAMESAMPLES / 8]; - const int16_t* CS_ptrQ9; - int k, n; - int16_t round, shftVal = 0, sh; - - sum = 0; - for (n = 0; n < AR_ORDER + 1; n++) { - sum += WEBRTC_SPL_MUL(ARCoefQ12[n], ARCoefQ12[n]); /* Q24 */ - } - sum = ((sum >> 6) * 65 + 32768) >> 16; /* Q8 */ - CorrQ11[0] = (sum * gainQ10 + 256) >> 9; - - /* To avoid overflow, we shift down gainQ10 if it is large. - * We will not lose any precision */ - if (gainQ10 > 400000) { - tmpGain = gainQ10 >> 3; - round = 32; - shftVal = 6; - } else { - tmpGain = gainQ10; - round = 256; - shftVal = 9; - } - - for (k = 1; k < AR_ORDER + 1; k++) { - sum = 16384; - for (n = k; n < AR_ORDER + 1; n++) - sum += WEBRTC_SPL_MUL(ARCoefQ12[n - k], ARCoefQ12[n]); /* Q24 */ - sum >>= 15; - CorrQ11[k] = (sum * tmpGain + round) >> shftVal; - } - sum = CorrQ11[0] << 7; - for (n = 0; n < FRAMESAMPLES / 8; n++) { - CurveQ16[n] = sum; - } - for (k = 1; k < AR_ORDER; k += 2) { - for (n = 0; n < FRAMESAMPLES / 8; n++) { - CurveQ16[n] += (WebRtcIsac_kCos[k][n] * CorrQ11[k + 1] + 2) >> 2; - } - } - - CS_ptrQ9 = WebRtcIsac_kCos[0]; - - /* If CorrQ11[1] too large we avoid getting overflow in the - * calculation by shifting */ - sh = WebRtcSpl_NormW32(CorrQ11[1]); - if (CorrQ11[1] == 0) { /* Use next correlation */ - sh = WebRtcSpl_NormW32(CorrQ11[2]); - } - if (sh < 9) { - shftVal = 9 - sh; - } else { - shftVal = 0; - } - for (n = 0; n < FRAMESAMPLES / 8; n++) { - diffQ16[n] = (CS_ptrQ9[n] * (CorrQ11[1] >> shftVal) + 2) >> 2; - } - for (k = 2; k < AR_ORDER; k += 2) { - CS_ptrQ9 = WebRtcIsac_kCos[k]; - for (n = 0; n < FRAMESAMPLES / 8; n++) { - diffQ16[n] += (CS_ptrQ9[n] * (CorrQ11[k + 1] >> shftVal) + 2) >> 2; - } - } - - for (k = 0; k < FRAMESAMPLES / 8; k++) { - int32_t diff_q16_shifted = (int32_t)((uint32_t)(diffQ16[k]) << shftVal); - CurveQ16[FRAMESAMPLES_QUARTER - 1 - k] = CurveQ16[k] - diff_q16_shifted; - CurveQ16[k] += diff_q16_shifted; - } -} - -/* Generate array of dither samples in Q7. */ -static void GenerateDitherQ7Lb(int16_t* bufQ7, uint32_t seed, - int length, int16_t AvgPitchGain_Q12) { - int k, shft; - int16_t dither1_Q7, dither2_Q7, dither_gain_Q14; - - /* This threshold should be equal to that in decode_spec(). */ - if (AvgPitchGain_Q12 < 614) { - for (k = 0; k < length - 2; k += 3) { - /* New random unsigned int. */ - seed = (seed * 196314165) + 907633515; - - /* Fixed-point dither sample between -64 and 64 (Q7). */ - /* dither = seed * 128 / 4294967295 */ - dither1_Q7 = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - /* New random unsigned int. */ - seed = (seed * 196314165) + 907633515; - - /* Fixed-point dither sample between -64 and 64. */ - dither2_Q7 = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - shft = (seed >> 25) & 15; - if (shft < 5) { - bufQ7[k] = dither1_Q7; - bufQ7[k + 1] = dither2_Q7; - bufQ7[k + 2] = 0; - } else if (shft < 10) { - bufQ7[k] = dither1_Q7; - bufQ7[k + 1] = 0; - bufQ7[k + 2] = dither2_Q7; - } else { - bufQ7[k] = 0; - bufQ7[k + 1] = dither1_Q7; - bufQ7[k + 2] = dither2_Q7; - } - } - } else { - dither_gain_Q14 = (int16_t)(22528 - 10 * AvgPitchGain_Q12); - - /* Dither on half of the coefficients. */ - for (k = 0; k < length - 1; k += 2) { - /* New random unsigned int */ - seed = (seed * 196314165) + 907633515; - - /* Fixed-point dither sample between -64 and 64. */ - dither1_Q7 = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - /* Dither sample is placed in either even or odd index. */ - shft = (seed >> 25) & 1; /* Either 0 or 1 */ - - bufQ7[k + shft] = (((dither_gain_Q14 * dither1_Q7) + 8192) >> 14); - bufQ7[k + 1 - shft] = 0; - } - } -} - - - -/****************************************************************************** - * GenerateDitherQ7LbUB() - * - * generate array of dither samples in Q7 There are less zeros in dither - * vector compared to GenerateDitherQ7Lb. - * - * A uniform random number generator with the range of [-64 64] is employed - * but the generated dithers are scaled by 0.35, a heuristic scaling. - * - * Input: - * -seed : the initial seed for the random number generator. - * -length : the number of dither values to be generated. - * - * Output: - * -bufQ7 : pointer to a buffer where dithers are written to. - */ -static void GenerateDitherQ7LbUB( - int16_t* bufQ7, - uint32_t seed, - int length) { - int k; - for (k = 0; k < length; k++) { - /* new random unsigned int */ - seed = (seed * 196314165) + 907633515; - - /* Fixed-point dither sample between -64 and 64 (Q7). */ - /* bufQ7 = seed * 128 / 4294967295 */ - bufQ7[k] = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - /* Scale by 0.35. */ - bufQ7[k] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(bufQ7[k], 2048, 13); - } -} - -/* - * Function to decode the complex spectrum from the bit stream - * returns the total number of bytes in the stream. - */ -int WebRtcIsac_DecodeSpec(Bitstr* streamdata, int16_t AvgPitchGain_Q12, - enum ISACBand band, double* fr, double* fi) { - int16_t DitherQ7[FRAMESAMPLES]; - int16_t data[FRAMESAMPLES]; - int32_t invARSpec2_Q16[FRAMESAMPLES_QUARTER]; - uint16_t invARSpecQ8[FRAMESAMPLES_QUARTER]; - int16_t ARCoefQ12[AR_ORDER + 1]; - int16_t RCQ15[AR_ORDER]; - int16_t gainQ10; - int32_t gain2_Q10, res; - int32_t in_sqrt; - int32_t newRes; - int k, len, i; - int is_12khz = !kIsSWB12; - int num_dft_coeff = FRAMESAMPLES; - /* Create dither signal. */ - if (band == kIsacLowerBand) { - GenerateDitherQ7Lb(DitherQ7, streamdata->W_upper, FRAMESAMPLES, - AvgPitchGain_Q12); - } else { - GenerateDitherQ7LbUB(DitherQ7, streamdata->W_upper, FRAMESAMPLES); - if (band == kIsacUpperBand12) { - is_12khz = kIsSWB12; - num_dft_coeff = FRAMESAMPLES_HALF; - } - } - - /* Decode model parameters. */ - if (WebRtcIsac_DecodeRc(streamdata, RCQ15) < 0) - return -ISAC_RANGE_ERROR_DECODE_SPECTRUM; - - WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12); - - if (WebRtcIsac_DecodeGain2(streamdata, &gain2_Q10) < 0) - return -ISAC_RANGE_ERROR_DECODE_SPECTRUM; - - /* Compute inverse AR power spectrum. */ - FindInvArSpec(ARCoefQ12, gain2_Q10, invARSpec2_Q16); - - /* Convert to magnitude spectrum, - * by doing square-roots (modified from SPLIB). */ - res = 1 << (WebRtcSpl_GetSizeInBits(invARSpec2_Q16[0]) >> 1); - for (k = 0; k < FRAMESAMPLES_QUARTER; k++) { - in_sqrt = invARSpec2_Q16[k]; - i = 10; - - /* Negative values make no sense for a real sqrt-function. */ - if (in_sqrt < 0) - in_sqrt = -in_sqrt; - - newRes = (in_sqrt / res + res) >> 1; - do { - res = newRes; - newRes = (in_sqrt / res + res) >> 1; - } while (newRes != res && i-- > 0); - - invARSpecQ8[k] = (int16_t)newRes; - } - - len = WebRtcIsac_DecLogisticMulti2(data, streamdata, invARSpecQ8, DitherQ7, - num_dft_coeff, is_12khz); - /* Arithmetic decoding of spectrum. */ - if (len < 1) { - return -ISAC_RANGE_ERROR_DECODE_SPECTRUM; - } - - switch (band) { - case kIsacLowerBand: { - /* Scale down spectral samples with low SNR. */ - int32_t p1; - int32_t p2; - if (AvgPitchGain_Q12 <= 614) { - p1 = 30 << 10; - p2 = 32768 + (33 << 16); - } else { - p1 = 36 << 10; - p2 = 32768 + (40 << 16); - } - for (k = 0; k < FRAMESAMPLES; k += 4) { - gainQ10 = WebRtcSpl_DivW32W16ResW16(p1, (int16_t)( - (invARSpec2_Q16[k >> 2] + p2) >> 16)); - *fr++ = (double)((data[ k ] * gainQ10 + 512) >> 10) / 128.0; - *fi++ = (double)((data[k + 1] * gainQ10 + 512) >> 10) / 128.0; - *fr++ = (double)((data[k + 2] * gainQ10 + 512) >> 10) / 128.0; - *fi++ = (double)((data[k + 3] * gainQ10 + 512) >> 10) / 128.0; - } - break; - } - case kIsacUpperBand12: { - for (k = 0, i = 0; k < FRAMESAMPLES_HALF; k += 4) { - fr[i] = (double)data[ k ] / 128.0; - fi[i] = (double)data[k + 1] / 128.0; - i++; - fr[i] = (double)data[k + 2] / 128.0; - fi[i] = (double)data[k + 3] / 128.0; - i++; - } - /* The second half of real and imaginary coefficients is zero. This is - * due to using the old FFT module which requires two signals as input - * while in 0-12 kHz mode we only have 8-12 kHz band, and the second - * signal is set to zero. */ - memset(&fr[FRAMESAMPLES_QUARTER], 0, FRAMESAMPLES_QUARTER * - sizeof(double)); - memset(&fi[FRAMESAMPLES_QUARTER], 0, FRAMESAMPLES_QUARTER * - sizeof(double)); - break; - } - case kIsacUpperBand16: { - for (i = 0, k = 0; k < FRAMESAMPLES; k += 4, i++) { - fr[i] = (double)data[ k ] / 128.0; - fi[i] = (double)data[k + 1] / 128.0; - fr[(FRAMESAMPLES_HALF) - 1 - i] = (double)data[k + 2] / 128.0; - fi[(FRAMESAMPLES_HALF) - 1 - i] = (double)data[k + 3] / 128.0; - } - break; - } - } - return len; -} - - -int WebRtcIsac_EncodeSpec(const int16_t* fr, const int16_t* fi, - int16_t AvgPitchGain_Q12, enum ISACBand band, - Bitstr* streamdata) { - int16_t ditherQ7[FRAMESAMPLES]; - int16_t dataQ7[FRAMESAMPLES]; - int32_t PSpec[FRAMESAMPLES_QUARTER]; - int32_t invARSpec2_Q16[FRAMESAMPLES_QUARTER]; - uint16_t invARSpecQ8[FRAMESAMPLES_QUARTER]; - int32_t CorrQ7[AR_ORDER + 1]; - int32_t CorrQ7_norm[AR_ORDER + 1]; - int16_t RCQ15[AR_ORDER]; - int16_t ARCoefQ12[AR_ORDER + 1]; - int32_t gain2_Q10; - int16_t val; - int32_t nrg, res; - uint32_t sum; - int32_t in_sqrt; - int32_t newRes; - int16_t err; - uint32_t nrg_u32; - int shift_var; - int k, n, j, i; - int is_12khz = !kIsSWB12; - int num_dft_coeff = FRAMESAMPLES; - - /* Create dither signal. */ - if (band == kIsacLowerBand) { - GenerateDitherQ7Lb(ditherQ7, streamdata->W_upper, FRAMESAMPLES, - AvgPitchGain_Q12); - } else { - GenerateDitherQ7LbUB(ditherQ7, streamdata->W_upper, FRAMESAMPLES); - if (band == kIsacUpperBand12) { - is_12khz = kIsSWB12; - num_dft_coeff = FRAMESAMPLES_HALF; - } - } - - /* add dither and quantize, and compute power spectrum */ - switch (band) { - case kIsacLowerBand: { - for (k = 0; k < FRAMESAMPLES; k += 4) { - val = ((*fr++ + ditherQ7[k] + 64) & 0xFF80) - ditherQ7[k]; - dataQ7[k] = val; - sum = val * val; - - val = ((*fi++ + ditherQ7[k + 1] + 64) & 0xFF80) - ditherQ7[k + 1]; - dataQ7[k + 1] = val; - sum += val * val; - - val = ((*fr++ + ditherQ7[k + 2] + 64) & 0xFF80) - ditherQ7[k + 2]; - dataQ7[k + 2] = val; - sum += val * val; - - val = ((*fi++ + ditherQ7[k + 3] + 64) & 0xFF80) - ditherQ7[k + 3]; - dataQ7[k + 3] = val; - sum += val * val; - - PSpec[k >> 2] = sum >> 2; - } - break; - } - case kIsacUpperBand12: { - for (k = 0, j = 0; k < FRAMESAMPLES_HALF; k += 4) { - val = ((*fr++ + ditherQ7[k] + 64) & 0xFF80) - ditherQ7[k]; - dataQ7[k] = val; - sum = val * val; - - val = ((*fi++ + ditherQ7[k + 1] + 64) & 0xFF80) - ditherQ7[k + 1]; - dataQ7[k + 1] = val; - sum += val * val; - - PSpec[j++] = sum >> 1; - - val = ((*fr++ + ditherQ7[k + 2] + 64) & 0xFF80) - ditherQ7[k + 2]; - dataQ7[k + 2] = val; - sum = val * val; - - val = ((*fi++ + ditherQ7[k + 3] + 64) & 0xFF80) - ditherQ7[k + 3]; - dataQ7[k + 3] = val; - sum += val * val; - - PSpec[j++] = sum >> 1; - } - break; - } - case kIsacUpperBand16: { - for (j = 0, k = 0; k < FRAMESAMPLES; k += 4, j++) { - val = ((fr[j] + ditherQ7[k] + 64) & 0xFF80) - ditherQ7[k]; - dataQ7[k] = val; - sum = val * val; - - val = ((fi[j] + ditherQ7[k + 1] + 64) & 0xFF80) - ditherQ7[k + 1]; - dataQ7[k + 1] = val; - sum += val * val; - - val = ((fr[(FRAMESAMPLES_HALF) - 1 - j] + ditherQ7[k + 2] + 64) & - 0xFF80) - ditherQ7[k + 2]; - dataQ7[k + 2] = val; - sum += val * val; - - val = ((fi[(FRAMESAMPLES_HALF) - 1 - j] + ditherQ7[k + 3] + 64) & - 0xFF80) - ditherQ7[k + 3]; - dataQ7[k + 3] = val; - sum += val * val; - - PSpec[k >> 2] = sum >> 2; - } - break; - } - } - - /* compute correlation from power spectrum */ - FindCorrelation(PSpec, CorrQ7); - - /* Find AR coefficients */ - /* Aumber of bit shifts to 14-bit normalize CorrQ7[0] - * (leaving room for sign) */ - shift_var = WebRtcSpl_NormW32(CorrQ7[0]) - 18; - - if (shift_var > 0) { - for (k = 0; k < AR_ORDER + 1; k++) { - CorrQ7_norm[k] = CorrQ7[k] << shift_var; - } - } else { - for (k = 0; k < AR_ORDER + 1; k++) { - CorrQ7_norm[k] = CorrQ7[k] >> (-shift_var); - } - } - - /* Find RC coefficients. */ - WebRtcSpl_AutoCorrToReflCoef(CorrQ7_norm, AR_ORDER, RCQ15); - - /* Quantize & code RC Coefficient. */ - WebRtcIsac_EncodeRc(RCQ15, streamdata); - - /* RC -> AR coefficients */ - WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12); - - /* Compute ARCoef' * Corr * ARCoef in Q19. */ - nrg = 0; - for (j = 0; j <= AR_ORDER; j++) { - for (n = 0; n <= j; n++) { - nrg += (ARCoefQ12[j] * ((CorrQ7_norm[j - n] * ARCoefQ12[n] + 256) >> 9) + - 4) >> 3; - } - for (n = j + 1; n <= AR_ORDER; n++) { - nrg += (ARCoefQ12[j] * ((CorrQ7_norm[n - j] * ARCoefQ12[n] + 256) >> 9) + - 4) >> 3; - } - } - - nrg_u32 = (uint32_t)nrg; - if (shift_var > 0) { - nrg_u32 = nrg_u32 >> shift_var; - } else { - nrg_u32 = nrg_u32 << (-shift_var); - } - if (nrg_u32 > 0x7FFFFFFF) { - nrg = 0x7FFFFFFF; - } else { - nrg = (int32_t)nrg_u32; - } - /* Also shifts 31 bits to the left! */ - gain2_Q10 = WebRtcSpl_DivResultInQ31(FRAMESAMPLES_QUARTER, nrg); - - /* Quantize & code gain2_Q10. */ - if (WebRtcIsac_EncodeGain2(&gain2_Q10, streamdata)) { - return -1; - } - - /* Compute inverse AR power spectrum. */ - FindInvArSpec(ARCoefQ12, gain2_Q10, invARSpec2_Q16); - /* Convert to magnitude spectrum, by doing square-roots - * (modified from SPLIB). */ - res = 1 << (WebRtcSpl_GetSizeInBits(invARSpec2_Q16[0]) >> 1); - for (k = 0; k < FRAMESAMPLES_QUARTER; k++) { - in_sqrt = invARSpec2_Q16[k]; - i = 10; - /* Negative values make no sense for a real sqrt-function. */ - if (in_sqrt < 0) { - in_sqrt = -in_sqrt; - } - newRes = (in_sqrt / res + res) >> 1; - do { - res = newRes; - newRes = (in_sqrt / res + res) >> 1; - } while (newRes != res && i-- > 0); - - invARSpecQ8[k] = (int16_t)newRes; - } - /* arithmetic coding of spectrum */ - err = WebRtcIsac_EncLogisticMulti2(streamdata, dataQ7, invARSpecQ8, - num_dft_coeff, is_12khz); - if (err < 0) { - return (err); - } - return 0; -} - - -/* step-up */ -void WebRtcIsac_Rc2Poly(double* RC, int N, double* a) { - int m, k; - double tmp[MAX_AR_MODEL_ORDER]; - - a[0] = 1.0; - tmp[0] = 1.0; - for (m = 1; m <= N; m++) { - /* copy */ - memcpy(&tmp[1], &a[1], (m - 1) * sizeof(double)); - a[m] = RC[m - 1]; - for (k = 1; k < m; k++) { - a[k] += RC[m - 1] * tmp[m - k]; - } - } - return; -} - -/* step-down */ -void WebRtcIsac_Poly2Rc(double* a, int N, double* RC) { - int m, k; - double tmp[MAX_AR_MODEL_ORDER]; - double tmp_inv; - - RC[N - 1] = a[N]; - for (m = N - 1; m > 0; m--) { - tmp_inv = 1.0 / (1.0 - RC[m] * RC[m]); - for (k = 1; k <= m; k++) { - tmp[k] = (a[k] - RC[m] * a[m - k + 1]) * tmp_inv; - } - - memcpy(&a[1], &tmp[1], (m - 1) * sizeof(double)); - RC[m - 1] = tmp[m]; - } - return; -} - - -#define MAX_ORDER 100 - -/* Matlab's LAR definition */ -void WebRtcIsac_Rc2Lar(const double* refc, double* lar, int order) { - int k; - for (k = 0; k < order; k++) { - lar[k] = log((1 + refc[k]) / (1 - refc[k])); - } -} - - -void WebRtcIsac_Lar2Rc(const double* lar, double* refc, int order) { - int k; - double tmp; - - for (k = 0; k < order; k++) { - tmp = exp(lar[k]); - refc[k] = (tmp - 1) / (tmp + 1); - } -} - -void WebRtcIsac_Poly2Lar(double* lowband, int orderLo, double* hiband, - int orderHi, int Nsub, double* lars) { - int k; - double rc[MAX_ORDER], *inpl, *inph, *outp; - - inpl = lowband; - inph = hiband; - outp = lars; - for (k = 0; k < Nsub; k++) { - /* gains */ - outp[0] = inpl[0]; - outp[1] = inph[0]; - outp += 2; - - /* Low band */ - inpl[0] = 1.0; - WebRtcIsac_Poly2Rc(inpl, orderLo, rc); - WebRtcIsac_Rc2Lar(rc, outp, orderLo); - outp += orderLo; - - /* High band */ - inph[0] = 1.0; - WebRtcIsac_Poly2Rc(inph, orderHi, rc); - WebRtcIsac_Rc2Lar(rc, outp, orderHi); - outp += orderHi; - - inpl += orderLo + 1; - inph += orderHi + 1; - } -} - - -int16_t WebRtcIsac_Poly2LarUB(double* lpcVecs, int16_t bandwidth) { - double poly[MAX_ORDER]; - double rc[MAX_ORDER]; - double* ptrIO; - int16_t vecCntr; - int16_t vecSize; - int16_t numVec; - - vecSize = UB_LPC_ORDER; - switch (bandwidth) { - case isac12kHz: { - numVec = UB_LPC_VEC_PER_FRAME; - break; - } - case isac16kHz: { - numVec = UB16_LPC_VEC_PER_FRAME; - break; - } - default: - return -1; - } - - ptrIO = lpcVecs; - poly[0] = 1.0; - for (vecCntr = 0; vecCntr < numVec; vecCntr++) { - memcpy(&poly[1], ptrIO, sizeof(double) * vecSize); - WebRtcIsac_Poly2Rc(poly, vecSize, rc); - WebRtcIsac_Rc2Lar(rc, ptrIO, vecSize); - ptrIO += vecSize; - } - return 0; -} - - -void WebRtcIsac_Lar2Poly(double* lars, double* lowband, int orderLo, - double* hiband, int orderHi, int Nsub) { - int k, orderTot; - double rc[MAX_ORDER], *outpl, *outph, *inp; - - orderTot = (orderLo + orderHi + 2); - outpl = lowband; - outph = hiband; - /* First two elements of 'inp' store gains*/ - inp = lars; - for (k = 0; k < Nsub; k++) { - /* Low band */ - WebRtcIsac_Lar2Rc(&inp[2], rc, orderLo); - WebRtcIsac_Rc2Poly(rc, orderLo, outpl); - - /* High band */ - WebRtcIsac_Lar2Rc(&inp[orderLo + 2], rc, orderHi); - WebRtcIsac_Rc2Poly(rc, orderHi, outph); - - /* gains */ - outpl[0] = inp[0]; - outph[0] = inp[1]; - - outpl += orderLo + 1; - outph += orderHi + 1; - inp += orderTot; - } -} - -/* - * assumes 2 LAR vectors interpolates to 'numPolyVec' A-polynomials - * Note: 'numPolyVecs' includes the first and the last point of the interval - */ -void WebRtcIsac_Lar2PolyInterpolUB(double* larVecs, double* percepFilterParams, - int numPolyVecs) { - int polyCntr, coeffCntr; - double larInterpol[UB_LPC_ORDER]; - double rc[UB_LPC_ORDER]; - double delta[UB_LPC_ORDER]; - - /* calculate the step-size for linear interpolation coefficients */ - for (coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++) { - delta[coeffCntr] = (larVecs[UB_LPC_ORDER + coeffCntr] - - larVecs[coeffCntr]) / (numPolyVecs - 1); - } - - for (polyCntr = 0; polyCntr < numPolyVecs; polyCntr++) { - for (coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++) { - larInterpol[coeffCntr] = larVecs[coeffCntr] + - delta[coeffCntr] * polyCntr; - } - WebRtcIsac_Lar2Rc(larInterpol, rc, UB_LPC_ORDER); - - /* convert to A-polynomial, the following function returns A[0] = 1; - * which is written where gains had to be written. Then we write the - * gain (outside this function). This way we say a memcpy. */ - WebRtcIsac_Rc2Poly(rc, UB_LPC_ORDER, percepFilterParams); - percepFilterParams += (UB_LPC_ORDER + 1); - } -} - -int WebRtcIsac_DecodeLpc(Bitstr* streamdata, double* LPCCoef_lo, - double* LPCCoef_hi) { - double lars[KLT_ORDER_GAIN + KLT_ORDER_SHAPE]; - int err; - - err = WebRtcIsac_DecodeLpcCoef(streamdata, lars); - if (err < 0) { - return -ISAC_RANGE_ERROR_DECODE_LPC; - } - WebRtcIsac_Lar2Poly(lars, LPCCoef_lo, ORDERLO, LPCCoef_hi, ORDERHI, - SUBFRAMES); - return 0; -} - -int16_t WebRtcIsac_DecodeInterpolLpcUb(Bitstr* streamdata, - double* percepFilterParams, - int16_t bandwidth) { - double lpcCoeff[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - int err; - int interpolCntr; - int subframeCntr; - int16_t numSegments; - int16_t numVecPerSegment; - int16_t numGains; - - double percepFilterGains[SUBFRAMES << 1]; - double* ptrOutParam = percepFilterParams; - - err = WebRtcIsac_DecodeLpcCoefUB(streamdata, lpcCoeff, percepFilterGains, - bandwidth); - if (err < 0) { - return -ISAC_RANGE_ERROR_DECODE_LPC; - } - - switch (bandwidth) { - case isac12kHz: { - numGains = SUBFRAMES; - numSegments = UB_LPC_VEC_PER_FRAME - 1; - numVecPerSegment = kLpcVecPerSegmentUb12; - break; - } - case isac16kHz: { - numGains = SUBFRAMES << 1; - numSegments = UB16_LPC_VEC_PER_FRAME - 1; - numVecPerSegment = kLpcVecPerSegmentUb16; - break; - } - default: - return -1; - } - - for (interpolCntr = 0; interpolCntr < numSegments; interpolCntr++) { - WebRtcIsac_Lar2PolyInterpolUB(&lpcCoeff[interpolCntr * UB_LPC_ORDER], - ptrOutParam, numVecPerSegment + 1); - ptrOutParam += (numVecPerSegment * (UB_LPC_ORDER + 1)); - } - - ptrOutParam = percepFilterParams; - - if (bandwidth == isac16kHz) { - ptrOutParam += (1 + UB_LPC_ORDER); - } - - for (subframeCntr = 0; subframeCntr < numGains; subframeCntr++) { - *ptrOutParam = percepFilterGains[subframeCntr]; - ptrOutParam += (1 + UB_LPC_ORDER); - } - return 0; -} - - -/* decode & dequantize LPC Coef */ -int WebRtcIsac_DecodeLpcCoef(Bitstr* streamdata, double* LPCCoef) { - int j, k, n, pos, pos2, posg, poss, offsg, offss, offs2; - int index_g[KLT_ORDER_GAIN], index_s[KLT_ORDER_SHAPE]; - double tmpcoeffs_g[KLT_ORDER_GAIN], tmpcoeffs_s[KLT_ORDER_SHAPE]; - double tmpcoeffs2_g[KLT_ORDER_GAIN], tmpcoeffs2_s[KLT_ORDER_SHAPE]; - double sum; - int err; - int model = 1; - - /* entropy decoding of model number */ - /* We are keeping this for backward compatibility of bit-streams. */ - err = WebRtcIsac_DecHistOneStepMulti(&model, streamdata, - WebRtcIsac_kQKltModelCdfPtr, - WebRtcIsac_kQKltModelInitIndex, 1); - if (err < 0) { - return err; - } - /* Only accepted value of model is 0. It is kept in bit-stream for backward - * compatibility. */ - if (model != 0) { - return -ISAC_DISALLOWED_LPC_MODEL; - } - - /* entropy decoding of quantization indices */ - err = WebRtcIsac_DecHistOneStepMulti( - index_s, streamdata, WebRtcIsac_kQKltCdfPtrShape, - WebRtcIsac_kQKltInitIndexShape, KLT_ORDER_SHAPE); - if (err < 0) { - return err; - } - err = WebRtcIsac_DecHistOneStepMulti( - index_g, streamdata, WebRtcIsac_kQKltCdfPtrGain, - WebRtcIsac_kQKltInitIndexGain, KLT_ORDER_GAIN); - if (err < 0) { - return err; - } - - /* find quantization levels for coefficients */ - for (k = 0; k < KLT_ORDER_SHAPE; k++) { - tmpcoeffs_s[k] = - WebRtcIsac_kQKltLevelsShape[WebRtcIsac_kQKltOffsetShape[k] + - index_s[k]]; - } - for (k = 0; k < KLT_ORDER_GAIN; k++) { - tmpcoeffs_g[k] = WebRtcIsac_kQKltLevelsGain[WebRtcIsac_kQKltOffsetGain[k] + - index_g[k]]; - } - - /* Inverse KLT */ - - /* Left transform, transpose matrix! */ - offsg = 0; - offss = 0; - posg = 0; - poss = 0; - for (j = 0; j < SUBFRAMES; j++) { - offs2 = 0; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = offsg; - pos2 = offs2; - for (n = 0; n < LPC_GAIN_ORDER; n++) { - sum += tmpcoeffs_g[pos++] * WebRtcIsac_kKltT1Gain[pos2++]; - } - tmpcoeffs2_g[posg++] = sum; - offs2 += LPC_GAIN_ORDER; - } - offs2 = 0; - for (k = 0; k < LPC_SHAPE_ORDER; k++) { - sum = 0; - pos = offss; - pos2 = offs2; - for (n = 0; n < LPC_SHAPE_ORDER; n++) { - sum += tmpcoeffs_s[pos++] * WebRtcIsac_kKltT1Shape[pos2++]; - } - tmpcoeffs2_s[poss++] = sum; - offs2 += LPC_SHAPE_ORDER; - } - offsg += LPC_GAIN_ORDER; - offss += LPC_SHAPE_ORDER; - } - - /* Right transform, transpose matrix */ - offsg = 0; - offss = 0; - posg = 0; - poss = 0; - for (j = 0; j < SUBFRAMES; j++) { - posg = offsg; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = k; - pos2 = j; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_g[pos] * WebRtcIsac_kKltT2Gain[pos2]; - pos += LPC_GAIN_ORDER; - pos2 += SUBFRAMES; - - } - tmpcoeffs_g[posg++] = sum; - } - poss = offss; - for (k = 0; k < LPC_SHAPE_ORDER; k++) { - sum = 0; - pos = k; - pos2 = j; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_s[pos] * WebRtcIsac_kKltT2Shape[pos2]; - pos += LPC_SHAPE_ORDER; - pos2 += SUBFRAMES; - } - tmpcoeffs_s[poss++] = sum; - } - offsg += LPC_GAIN_ORDER; - offss += LPC_SHAPE_ORDER; - } - - /* scaling, mean addition, and gain restoration */ - posg = 0; - poss = 0; - pos = 0; - for (k = 0; k < SUBFRAMES; k++) { - /* log gains */ - LPCCoef[pos] = tmpcoeffs_g[posg] / LPC_GAIN_SCALE; - LPCCoef[pos] += WebRtcIsac_kLpcMeansGain[posg]; - LPCCoef[pos] = exp(LPCCoef[pos]); - pos++; - posg++; - LPCCoef[pos] = tmpcoeffs_g[posg] / LPC_GAIN_SCALE; - LPCCoef[pos] += WebRtcIsac_kLpcMeansGain[posg]; - LPCCoef[pos] = exp(LPCCoef[pos]); - pos++; - posg++; - - /* Low-band LAR coefficients. */ - for (n = 0; n < LPC_LOBAND_ORDER; n++, pos++, poss++) { - LPCCoef[pos] = tmpcoeffs_s[poss] / LPC_LOBAND_SCALE; - LPCCoef[pos] += WebRtcIsac_kLpcMeansShape[poss]; - } - - /* High-band LAR coefficients. */ - for (n = 0; n < LPC_HIBAND_ORDER; n++, pos++, poss++) { - LPCCoef[pos] = tmpcoeffs_s[poss] / LPC_HIBAND_SCALE; - LPCCoef[pos] += WebRtcIsac_kLpcMeansShape[poss]; - } - } - return 0; -} - -/* Encode LPC in LAR domain. */ -void WebRtcIsac_EncodeLar(double* LPCCoef, Bitstr* streamdata, - IsacSaveEncoderData* encData) { - int j, k, n, pos, pos2, poss, offss, offs2; - int index_s[KLT_ORDER_SHAPE]; - int index_ovr_s[KLT_ORDER_SHAPE]; - double tmpcoeffs_s[KLT_ORDER_SHAPE]; - double tmpcoeffs2_s[KLT_ORDER_SHAPE]; - double sum; - const int kModel = 0; - - /* Mean removal and scaling. */ - poss = 0; - pos = 0; - for (k = 0; k < SUBFRAMES; k++) { - /* First two element are gains, move over them. */ - pos += 2; - - /* Low-band LAR coefficients. */ - for (n = 0; n < LPC_LOBAND_ORDER; n++, poss++, pos++) { - tmpcoeffs_s[poss] = LPCCoef[pos] - WebRtcIsac_kLpcMeansShape[poss]; - tmpcoeffs_s[poss] *= LPC_LOBAND_SCALE; - } - - /* High-band LAR coefficients. */ - for (n = 0; n < LPC_HIBAND_ORDER; n++, poss++, pos++) { - tmpcoeffs_s[poss] = LPCCoef[pos] - WebRtcIsac_kLpcMeansShape[poss]; - tmpcoeffs_s[poss] *= LPC_HIBAND_SCALE; - } - } - - /* KLT */ - - /* Left transform. */ - offss = 0; - for (j = 0; j < SUBFRAMES; j++) { - poss = offss; - for (k = 0; k < LPC_SHAPE_ORDER; k++) { - sum = 0; - pos = offss; - pos2 = k; - for (n = 0; n < LPC_SHAPE_ORDER; n++) { - sum += tmpcoeffs_s[pos++] * WebRtcIsac_kKltT1Shape[pos2]; - pos2 += LPC_SHAPE_ORDER; - } - tmpcoeffs2_s[poss++] = sum; - } - offss += LPC_SHAPE_ORDER; - } - - /* Right transform. */ - offss = 0; - offs2 = 0; - for (j = 0; j < SUBFRAMES; j++) { - poss = offss; - for (k = 0; k < LPC_SHAPE_ORDER; k++) { - sum = 0; - pos = k; - pos2 = offs2; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_s[pos] * WebRtcIsac_kKltT2Shape[pos2++]; - pos += LPC_SHAPE_ORDER; - } - tmpcoeffs_s[poss++] = sum; - } - offs2 += SUBFRAMES; - offss += LPC_SHAPE_ORDER; - } - - /* Quantize coefficients. */ - for (k = 0; k < KLT_ORDER_SHAPE; k++) { - index_s[k] = (WebRtcIsac_lrint(tmpcoeffs_s[k] / KLT_STEPSIZE)) + - WebRtcIsac_kQKltQuantMinShape[k]; - if (index_s[k] < 0) { - index_s[k] = 0; - } else if (index_s[k] > WebRtcIsac_kQKltMaxIndShape[k]) { - index_s[k] = WebRtcIsac_kQKltMaxIndShape[k]; - } - index_ovr_s[k] = WebRtcIsac_kQKltOffsetShape[k] + index_s[k]; - } - - - /* Only one model remains in this version of the code, kModel = 0. We - * are keeping for bit-streams to be backward compatible. */ - /* entropy coding of model number */ - WebRtcIsac_EncHistMulti(streamdata, &kModel, WebRtcIsac_kQKltModelCdfPtr, 1); - - /* Save data for creation of multiple bit streams */ - /* Entropy coding of quantization indices - shape only. */ - WebRtcIsac_EncHistMulti(streamdata, index_s, WebRtcIsac_kQKltCdfPtrShape, - KLT_ORDER_SHAPE); - - /* Save data for creation of multiple bit streams. */ - for (k = 0; k < KLT_ORDER_SHAPE; k++) { - encData->LPCindex_s[KLT_ORDER_SHAPE * encData->startIdx + k] = index_s[k]; - } - - /* Find quantization levels for shape coefficients. */ - for (k = 0; k < KLT_ORDER_SHAPE; k++) { - tmpcoeffs_s[k] = WebRtcIsac_kQKltLevelsShape[index_ovr_s[k]]; - } - /* Inverse KLT. */ - /* Left transform, transpose matrix.! */ - offss = 0; - poss = 0; - for (j = 0; j < SUBFRAMES; j++) { - offs2 = 0; - for (k = 0; k < LPC_SHAPE_ORDER; k++) { - sum = 0; - pos = offss; - pos2 = offs2; - for (n = 0; n < LPC_SHAPE_ORDER; n++) { - sum += tmpcoeffs_s[pos++] * WebRtcIsac_kKltT1Shape[pos2++]; - } - tmpcoeffs2_s[poss++] = sum; - offs2 += LPC_SHAPE_ORDER; - } - offss += LPC_SHAPE_ORDER; - } - - /* Right transform, Transpose matrix */ - offss = 0; - poss = 0; - for (j = 0; j < SUBFRAMES; j++) { - poss = offss; - for (k = 0; k < LPC_SHAPE_ORDER; k++) { - sum = 0; - pos = k; - pos2 = j; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_s[pos] * WebRtcIsac_kKltT2Shape[pos2]; - pos += LPC_SHAPE_ORDER; - pos2 += SUBFRAMES; - } - tmpcoeffs_s[poss++] = sum; - } - offss += LPC_SHAPE_ORDER; - } - - /* Scaling, mean addition, and gain restoration. */ - poss = 0; - pos = 0; - for (k = 0; k < SUBFRAMES; k++) { - /* Ignore gains. */ - pos += 2; - - /* Low band LAR coefficients. */ - for (n = 0; n < LPC_LOBAND_ORDER; n++, pos++, poss++) { - LPCCoef[pos] = tmpcoeffs_s[poss] / LPC_LOBAND_SCALE; - LPCCoef[pos] += WebRtcIsac_kLpcMeansShape[poss]; - } - - /* High band LAR coefficients. */ - for (n = 0; n < LPC_HIBAND_ORDER; n++, pos++, poss++) { - LPCCoef[pos] = tmpcoeffs_s[poss] / LPC_HIBAND_SCALE; - LPCCoef[pos] += WebRtcIsac_kLpcMeansShape[poss]; - } - } -} - - -void WebRtcIsac_EncodeLpcLb(double* LPCCoef_lo, double* LPCCoef_hi, - Bitstr* streamdata, IsacSaveEncoderData* encData) { - double lars[KLT_ORDER_GAIN + KLT_ORDER_SHAPE]; - int k; - - WebRtcIsac_Poly2Lar(LPCCoef_lo, ORDERLO, LPCCoef_hi, ORDERHI, SUBFRAMES, - lars); - WebRtcIsac_EncodeLar(lars, streamdata, encData); - WebRtcIsac_Lar2Poly(lars, LPCCoef_lo, ORDERLO, LPCCoef_hi, ORDERHI, - SUBFRAMES); - /* Save data for creation of multiple bit streams (and transcoding). */ - for (k = 0; k < (ORDERLO + 1)*SUBFRAMES; k++) { - encData->LPCcoeffs_lo[(ORDERLO + 1)*SUBFRAMES * encData->startIdx + k] = - LPCCoef_lo[k]; - } - for (k = 0; k < (ORDERHI + 1)*SUBFRAMES; k++) { - encData->LPCcoeffs_hi[(ORDERHI + 1)*SUBFRAMES * encData->startIdx + k] = - LPCCoef_hi[k]; - } -} - - -int16_t WebRtcIsac_EncodeLpcUB(double* lpcVecs, Bitstr* streamdata, - double* interpolLPCCoeff, - int16_t bandwidth, - ISACUBSaveEncDataStruct* encData) { - double U[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - int idx[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - int interpolCntr; - - WebRtcIsac_Poly2LarUB(lpcVecs, bandwidth); - WebRtcIsac_RemoveLarMean(lpcVecs, bandwidth); - WebRtcIsac_DecorrelateIntraVec(lpcVecs, U, bandwidth); - WebRtcIsac_DecorrelateInterVec(U, lpcVecs, bandwidth); - WebRtcIsac_QuantizeUncorrLar(lpcVecs, idx, bandwidth); - - WebRtcIsac_CorrelateInterVec(lpcVecs, U, bandwidth); - WebRtcIsac_CorrelateIntraVec(U, lpcVecs, bandwidth); - WebRtcIsac_AddLarMean(lpcVecs, bandwidth); - - switch (bandwidth) { - case isac12kHz: { - /* Store the indices to be used for multiple encoding. */ - memcpy(encData->indexLPCShape, idx, UB_LPC_ORDER * - UB_LPC_VEC_PER_FRAME * sizeof(int)); - WebRtcIsac_EncHistMulti(streamdata, idx, WebRtcIsac_kLpcShapeCdfMatUb12, - UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME); - for (interpolCntr = 0; interpolCntr < UB_INTERPOL_SEGMENTS; - interpolCntr++) { - WebRtcIsac_Lar2PolyInterpolUB(lpcVecs, interpolLPCCoeff, - kLpcVecPerSegmentUb12 + 1); - lpcVecs += UB_LPC_ORDER; - interpolLPCCoeff += (kLpcVecPerSegmentUb12 * (UB_LPC_ORDER + 1)); - } - break; - } - case isac16kHz: { - /* Store the indices to be used for multiple encoding. */ - memcpy(encData->indexLPCShape, idx, UB_LPC_ORDER * - UB16_LPC_VEC_PER_FRAME * sizeof(int)); - WebRtcIsac_EncHistMulti(streamdata, idx, WebRtcIsac_kLpcShapeCdfMatUb16, - UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME); - for (interpolCntr = 0; interpolCntr < UB16_INTERPOL_SEGMENTS; - interpolCntr++) { - WebRtcIsac_Lar2PolyInterpolUB(lpcVecs, interpolLPCCoeff, - kLpcVecPerSegmentUb16 + 1); - lpcVecs += UB_LPC_ORDER; - interpolLPCCoeff += (kLpcVecPerSegmentUb16 * (UB_LPC_ORDER + 1)); - } - break; - } - default: - return -1; - } - return 0; -} - -void WebRtcIsac_EncodeLpcGainLb(double* LPCCoef_lo, double* LPCCoef_hi, - Bitstr* streamdata, - IsacSaveEncoderData* encData) { - int j, k, n, pos, pos2, posg, offsg, offs2; - int index_g[KLT_ORDER_GAIN]; - int index_ovr_g[KLT_ORDER_GAIN]; - double tmpcoeffs_g[KLT_ORDER_GAIN]; - double tmpcoeffs2_g[KLT_ORDER_GAIN]; - double sum; - /* log gains, mean removal and scaling */ - posg = 0; - for (k = 0; k < SUBFRAMES; k++) { - tmpcoeffs_g[posg] = log(LPCCoef_lo[(LPC_LOBAND_ORDER + 1) * k]); - tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[posg]; - tmpcoeffs_g[posg] *= LPC_GAIN_SCALE; - posg++; - tmpcoeffs_g[posg] = log(LPCCoef_hi[(LPC_HIBAND_ORDER + 1) * k]); - tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[posg]; - tmpcoeffs_g[posg] *= LPC_GAIN_SCALE; - posg++; - } - - /* KLT */ - - /* Left transform. */ - offsg = 0; - for (j = 0; j < SUBFRAMES; j++) { - posg = offsg; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = offsg; - pos2 = k; - for (n = 0; n < LPC_GAIN_ORDER; n++) { - sum += tmpcoeffs_g[pos++] * WebRtcIsac_kKltT1Gain[pos2]; - pos2 += LPC_GAIN_ORDER; - } - tmpcoeffs2_g[posg++] = sum; - } - offsg += LPC_GAIN_ORDER; - } - - /* Right transform. */ - offsg = 0; - offs2 = 0; - for (j = 0; j < SUBFRAMES; j++) { - posg = offsg; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = k; - pos2 = offs2; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_g[pos] * WebRtcIsac_kKltT2Gain[pos2++]; - pos += LPC_GAIN_ORDER; - } - tmpcoeffs_g[posg++] = sum; - } - offs2 += SUBFRAMES; - offsg += LPC_GAIN_ORDER; - } - - /* Quantize coefficients. */ - for (k = 0; k < KLT_ORDER_GAIN; k++) { - /* Get index. */ - pos2 = WebRtcIsac_lrint(tmpcoeffs_g[k] / KLT_STEPSIZE); - index_g[k] = (pos2) + WebRtcIsac_kQKltQuantMinGain[k]; - if (index_g[k] < 0) { - index_g[k] = 0; - } else if (index_g[k] > WebRtcIsac_kQKltMaxIndGain[k]) { - index_g[k] = WebRtcIsac_kQKltMaxIndGain[k]; - } - index_ovr_g[k] = WebRtcIsac_kQKltOffsetGain[k] + index_g[k]; - - /* Find quantization levels for coefficients. */ - tmpcoeffs_g[k] = WebRtcIsac_kQKltLevelsGain[index_ovr_g[k]]; - - /* Save data for creation of multiple bit streams. */ - encData->LPCindex_g[KLT_ORDER_GAIN * encData->startIdx + k] = index_g[k]; - } - - /* Entropy coding of quantization indices - gain. */ - WebRtcIsac_EncHistMulti(streamdata, index_g, WebRtcIsac_kQKltCdfPtrGain, - KLT_ORDER_GAIN); - - /* Find quantization levels for coefficients. */ - /* Left transform. */ - offsg = 0; - posg = 0; - for (j = 0; j < SUBFRAMES; j++) { - offs2 = 0; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = offsg; - pos2 = offs2; - for (n = 0; n < LPC_GAIN_ORDER; n++) - sum += tmpcoeffs_g[pos++] * WebRtcIsac_kKltT1Gain[pos2++]; - tmpcoeffs2_g[posg++] = sum; - offs2 += LPC_GAIN_ORDER; - } - offsg += LPC_GAIN_ORDER; - } - - /* Right transform, transpose matrix. */ - offsg = 0; - posg = 0; - for (j = 0; j < SUBFRAMES; j++) { - posg = offsg; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = k; - pos2 = j; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_g[pos] * WebRtcIsac_kKltT2Gain[pos2]; - pos += LPC_GAIN_ORDER; - pos2 += SUBFRAMES; - } - tmpcoeffs_g[posg++] = sum; - } - offsg += LPC_GAIN_ORDER; - } - - - /* Scaling, mean addition, and gain restoration. */ - posg = 0; - for (k = 0; k < SUBFRAMES; k++) { - sum = tmpcoeffs_g[posg] / LPC_GAIN_SCALE; - sum += WebRtcIsac_kLpcMeansGain[posg]; - LPCCoef_lo[k * (LPC_LOBAND_ORDER + 1)] = exp(sum); - pos++; - posg++; - sum = tmpcoeffs_g[posg] / LPC_GAIN_SCALE; - sum += WebRtcIsac_kLpcMeansGain[posg]; - LPCCoef_hi[k * (LPC_HIBAND_ORDER + 1)] = exp(sum); - pos++; - posg++; - } - -} - -void WebRtcIsac_EncodeLpcGainUb(double* lpGains, Bitstr* streamdata, - int* lpcGainIndex) { - double U[UB_LPC_GAIN_DIM]; - int idx[UB_LPC_GAIN_DIM]; - WebRtcIsac_ToLogDomainRemoveMean(lpGains); - WebRtcIsac_DecorrelateLPGain(lpGains, U); - WebRtcIsac_QuantizeLpcGain(U, idx); - /* Store the index for re-encoding for FEC. */ - memcpy(lpcGainIndex, idx, UB_LPC_GAIN_DIM * sizeof(int)); - WebRtcIsac_CorrelateLpcGain(U, lpGains); - WebRtcIsac_AddMeanToLinearDomain(lpGains); - WebRtcIsac_EncHistMulti(streamdata, idx, WebRtcIsac_kLpcGainCdfMat, - UB_LPC_GAIN_DIM); -} - - -void WebRtcIsac_StoreLpcGainUb(double* lpGains, Bitstr* streamdata) { - double U[UB_LPC_GAIN_DIM]; - int idx[UB_LPC_GAIN_DIM]; - WebRtcIsac_ToLogDomainRemoveMean(lpGains); - WebRtcIsac_DecorrelateLPGain(lpGains, U); - WebRtcIsac_QuantizeLpcGain(U, idx); - WebRtcIsac_EncHistMulti(streamdata, idx, WebRtcIsac_kLpcGainCdfMat, - UB_LPC_GAIN_DIM); -} - - - -int16_t WebRtcIsac_DecodeLpcGainUb(double* lpGains, Bitstr* streamdata) { - double U[UB_LPC_GAIN_DIM]; - int idx[UB_LPC_GAIN_DIM]; - int err; - err = WebRtcIsac_DecHistOneStepMulti(idx, streamdata, - WebRtcIsac_kLpcGainCdfMat, - WebRtcIsac_kLpcGainEntropySearch, - UB_LPC_GAIN_DIM); - if (err < 0) { - return -1; - } - WebRtcIsac_DequantizeLpcGain(idx, U); - WebRtcIsac_CorrelateLpcGain(U, lpGains); - WebRtcIsac_AddMeanToLinearDomain(lpGains); - return 0; -} - - - -/* decode & dequantize RC */ -int WebRtcIsac_DecodeRc(Bitstr* streamdata, int16_t* RCQ15) { - int k, err; - int index[AR_ORDER]; - - /* entropy decoding of quantization indices */ - err = WebRtcIsac_DecHistOneStepMulti(index, streamdata, - WebRtcIsac_kQArRcCdfPtr, - WebRtcIsac_kQArRcInitIndex, AR_ORDER); - if (err < 0) - return err; - - /* find quantization levels for reflection coefficients */ - for (k = 0; k < AR_ORDER; k++) { - RCQ15[k] = *(WebRtcIsac_kQArRcLevelsPtr[k] + index[k]); - } - return 0; -} - - -/* quantize & code RC */ -void WebRtcIsac_EncodeRc(int16_t* RCQ15, Bitstr* streamdata) { - int k; - int index[AR_ORDER]; - - /* quantize reflection coefficients (add noise feedback?) */ - for (k = 0; k < AR_ORDER; k++) { - index[k] = WebRtcIsac_kQArRcInitIndex[k]; - // The safe-guards in following while conditions are to suppress gcc 4.8.3 - // warnings, Issue 2888. Otherwise, first and last elements of - // `WebRtcIsac_kQArBoundaryLevels` are such that the following search - // *never* cause an out-of-boundary read. - if (RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k]]) { - while (index[k] + 1 < NUM_AR_RC_QUANT_BAUNDARY && - RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k] + 1]) { - index[k]++; - } - } else { - while (index[k] > 0 && - RCQ15[k] < WebRtcIsac_kQArBoundaryLevels[--index[k]]) ; - } - RCQ15[k] = *(WebRtcIsac_kQArRcLevelsPtr[k] + index[k]); - } - - /* entropy coding of quantization indices */ - WebRtcIsac_EncHistMulti(streamdata, index, WebRtcIsac_kQArRcCdfPtr, AR_ORDER); -} - - -/* decode & dequantize squared Gain */ -int WebRtcIsac_DecodeGain2(Bitstr* streamdata, int32_t* gainQ10) { - int index, err; - - /* entropy decoding of quantization index */ - err = WebRtcIsac_DecHistOneStepMulti(&index, streamdata, - WebRtcIsac_kQGainCdf_ptr, - WebRtcIsac_kQGainInitIndex, 1); - if (err < 0) { - return err; - } - /* find quantization level */ - *gainQ10 = WebRtcIsac_kQGain2Levels[index]; - return 0; -} - - -/* quantize & code squared Gain */ -int WebRtcIsac_EncodeGain2(int32_t* gainQ10, Bitstr* streamdata) { - int index; - - /* find quantization index */ - index = WebRtcIsac_kQGainInitIndex[0]; - if (*gainQ10 > WebRtcIsac_kQGain2BoundaryLevels[index]) { - while (*gainQ10 > WebRtcIsac_kQGain2BoundaryLevels[index + 1]) { - index++; - } - } else { - while (*gainQ10 < WebRtcIsac_kQGain2BoundaryLevels[--index]) ; - } - /* De-quantize */ - *gainQ10 = WebRtcIsac_kQGain2Levels[index]; - - /* entropy coding of quantization index */ - WebRtcIsac_EncHistMulti(streamdata, &index, WebRtcIsac_kQGainCdf_ptr, 1); - return 0; -} - - -/* code and decode Pitch Gains and Lags functions */ - -/* decode & dequantize Pitch Gains */ -int WebRtcIsac_DecodePitchGain(Bitstr* streamdata, - int16_t* PitchGains_Q12) { - int index_comb, err; - const uint16_t* WebRtcIsac_kQPitchGainCdf_ptr[1]; - - /* Entropy decoding of quantization indices */ - *WebRtcIsac_kQPitchGainCdf_ptr = WebRtcIsac_kQPitchGainCdf; - err = WebRtcIsac_DecHistBisectMulti(&index_comb, streamdata, - WebRtcIsac_kQPitchGainCdf_ptr, - WebRtcIsac_kQCdfTableSizeGain, 1); - /* Error check, Q_mean_Gain.. tables are of size 144 */ - if ((err < 0) || (index_comb < 0) || (index_comb >= 144)) { - return -ISAC_RANGE_ERROR_DECODE_PITCH_GAIN; - } - /* De-quantize back to pitch gains by table look-up. */ - PitchGains_Q12[0] = WebRtcIsac_kQMeanGain1Q12[index_comb]; - PitchGains_Q12[1] = WebRtcIsac_kQMeanGain2Q12[index_comb]; - PitchGains_Q12[2] = WebRtcIsac_kQMeanGain3Q12[index_comb]; - PitchGains_Q12[3] = WebRtcIsac_kQMeanGain4Q12[index_comb]; - return 0; -} - - -/* Quantize & code Pitch Gains. */ -void WebRtcIsac_EncodePitchGain(int16_t* PitchGains_Q12, - Bitstr* streamdata, - IsacSaveEncoderData* encData) { - int k, j; - double C; - double S[PITCH_SUBFRAMES]; - int index[3]; - int index_comb; - const uint16_t* WebRtcIsac_kQPitchGainCdf_ptr[1]; - double PitchGains[PITCH_SUBFRAMES] = {0, 0, 0, 0}; - - /* Take the asin. */ - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchGains[k] = ((float)PitchGains_Q12[k]) / 4096; - S[k] = asin(PitchGains[k]); - } - - /* Find quantization index; only for the first three - * transform coefficients. */ - for (k = 0; k < 3; k++) { - /* transform */ - C = 0.0; - for (j = 0; j < PITCH_SUBFRAMES; j++) { - C += WebRtcIsac_kTransform[k][j] * S[j]; - } - /* Quantize */ - index[k] = WebRtcIsac_lrint(C / PITCH_GAIN_STEPSIZE); - - /* Check that the index is not outside the boundaries of the table. */ - if (index[k] < WebRtcIsac_kIndexLowerLimitGain[k]) { - index[k] = WebRtcIsac_kIndexLowerLimitGain[k]; - } else if (index[k] > WebRtcIsac_kIndexUpperLimitGain[k]) { - index[k] = WebRtcIsac_kIndexUpperLimitGain[k]; - } - index[k] -= WebRtcIsac_kIndexLowerLimitGain[k]; - } - - /* Calculate unique overall index. */ - index_comb = WebRtcIsac_kIndexMultsGain[0] * index[0] + - WebRtcIsac_kIndexMultsGain[1] * index[1] + index[2]; - - /* unquantize back to pitch gains by table look-up */ - PitchGains_Q12[0] = WebRtcIsac_kQMeanGain1Q12[index_comb]; - PitchGains_Q12[1] = WebRtcIsac_kQMeanGain2Q12[index_comb]; - PitchGains_Q12[2] = WebRtcIsac_kQMeanGain3Q12[index_comb]; - PitchGains_Q12[3] = WebRtcIsac_kQMeanGain4Q12[index_comb]; - - /* entropy coding of quantization pitch gains */ - *WebRtcIsac_kQPitchGainCdf_ptr = WebRtcIsac_kQPitchGainCdf; - WebRtcIsac_EncHistMulti(streamdata, &index_comb, - WebRtcIsac_kQPitchGainCdf_ptr, 1); - encData->pitchGain_index[encData->startIdx] = index_comb; -} - - - -/* Pitch LAG */ -/* Decode & de-quantize Pitch Lags. */ -int WebRtcIsac_DecodePitchLag(Bitstr* streamdata, int16_t* PitchGain_Q12, - double* PitchLags) { - int k, err; - double StepSize; - double C; - int index[PITCH_SUBFRAMES]; - double mean_gain; - const double* mean_val2, *mean_val3, *mean_val4; - const int16_t* lower_limit; - const uint16_t* init_index; - const uint16_t* cdf_size; - const uint16_t** cdf; - double PitchGain[4] = {0, 0, 0, 0}; - - /* compute mean pitch gain */ - mean_gain = 0.0; - for (k = 0; k < 4; k++) { - PitchGain[k] = ((float)PitchGain_Q12[k]) / 4096; - mean_gain += PitchGain[k]; - } - mean_gain /= 4.0; - - /* voicing classification. */ - if (mean_gain < 0.2) { - StepSize = WebRtcIsac_kQPitchLagStepsizeLo; - cdf = WebRtcIsac_kQPitchLagCdfPtrLo; - cdf_size = WebRtcIsac_kQPitchLagCdfSizeLo; - mean_val2 = WebRtcIsac_kQMeanLag2Lo; - mean_val3 = WebRtcIsac_kQMeanLag3Lo; - mean_val4 = WebRtcIsac_kQMeanLag4Lo; - lower_limit = WebRtcIsac_kQIndexLowerLimitLagLo; - init_index = WebRtcIsac_kQInitIndexLagLo; - } else if (mean_gain < 0.4) { - StepSize = WebRtcIsac_kQPitchLagStepsizeMid; - cdf = WebRtcIsac_kQPitchLagCdfPtrMid; - cdf_size = WebRtcIsac_kQPitchLagCdfSizeMid; - mean_val2 = WebRtcIsac_kQMeanLag2Mid; - mean_val3 = WebRtcIsac_kQMeanLag3Mid; - mean_val4 = WebRtcIsac_kQMeanLag4Mid; - lower_limit = WebRtcIsac_kQIndexLowerLimitLagMid; - init_index = WebRtcIsac_kQInitIndexLagMid; - } else { - StepSize = WebRtcIsac_kQPitchLagStepsizeHi; - cdf = WebRtcIsac_kQPitchLagCdfPtrHi; - cdf_size = WebRtcIsac_kQPitchLagCdfSizeHi; - mean_val2 = WebRtcIsac_kQMeanLag2Hi; - mean_val3 = WebRtcIsac_kQMeanLag3Hi; - mean_val4 = WebRtcIsac_kQMeanLag4Hi; - lower_limit = WebRtcIsac_kQindexLowerLimitLagHi; - init_index = WebRtcIsac_kQInitIndexLagHi; - } - - /* Entropy decoding of quantization indices. */ - err = WebRtcIsac_DecHistBisectMulti(index, streamdata, cdf, cdf_size, 1); - if ((err < 0) || (index[0] < 0)) { - return -ISAC_RANGE_ERROR_DECODE_PITCH_LAG; - } - err = WebRtcIsac_DecHistOneStepMulti(index + 1, streamdata, cdf + 1, - init_index, 3); - if (err < 0) { - return -ISAC_RANGE_ERROR_DECODE_PITCH_LAG; - } - - /* Unquantize back to transform coefficients and do the inverse transform: - * S = T'*C. */ - C = (index[0] + lower_limit[0]) * StepSize; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] = WebRtcIsac_kTransformTranspose[k][0] * C; - } - C = mean_val2[index[1]]; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] += WebRtcIsac_kTransformTranspose[k][1] * C; - } - C = mean_val3[index[2]]; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] += WebRtcIsac_kTransformTranspose[k][2] * C; - } - C = mean_val4[index[3]]; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] += WebRtcIsac_kTransformTranspose[k][3] * C; - } - return 0; -} - - - -/* Quantize & code pitch lags. */ -void WebRtcIsac_EncodePitchLag(double* PitchLags, int16_t* PitchGain_Q12, - Bitstr* streamdata, - IsacSaveEncoderData* encData) { - int k, j; - double StepSize; - double C; - int index[PITCH_SUBFRAMES]; - double mean_gain; - const double* mean_val2, *mean_val3, *mean_val4; - const int16_t* lower_limit, *upper_limit; - const uint16_t** cdf; - double PitchGain[4] = {0, 0, 0, 0}; - - /* compute mean pitch gain */ - mean_gain = 0.0; - for (k = 0; k < 4; k++) { - PitchGain[k] = ((float)PitchGain_Q12[k]) / 4096; - mean_gain += PitchGain[k]; - } - mean_gain /= 4.0; - - /* Save data for creation of multiple bit streams */ - encData->meanGain[encData->startIdx] = mean_gain; - - /* Voicing classification. */ - if (mean_gain < 0.2) { - StepSize = WebRtcIsac_kQPitchLagStepsizeLo; - cdf = WebRtcIsac_kQPitchLagCdfPtrLo; - mean_val2 = WebRtcIsac_kQMeanLag2Lo; - mean_val3 = WebRtcIsac_kQMeanLag3Lo; - mean_val4 = WebRtcIsac_kQMeanLag4Lo; - lower_limit = WebRtcIsac_kQIndexLowerLimitLagLo; - upper_limit = WebRtcIsac_kQIndexUpperLimitLagLo; - } else if (mean_gain < 0.4) { - StepSize = WebRtcIsac_kQPitchLagStepsizeMid; - cdf = WebRtcIsac_kQPitchLagCdfPtrMid; - mean_val2 = WebRtcIsac_kQMeanLag2Mid; - mean_val3 = WebRtcIsac_kQMeanLag3Mid; - mean_val4 = WebRtcIsac_kQMeanLag4Mid; - lower_limit = WebRtcIsac_kQIndexLowerLimitLagMid; - upper_limit = WebRtcIsac_kQIndexUpperLimitLagMid; - } else { - StepSize = WebRtcIsac_kQPitchLagStepsizeHi; - cdf = WebRtcIsac_kQPitchLagCdfPtrHi; - mean_val2 = WebRtcIsac_kQMeanLag2Hi; - mean_val3 = WebRtcIsac_kQMeanLag3Hi; - mean_val4 = WebRtcIsac_kQMeanLag4Hi; - lower_limit = WebRtcIsac_kQindexLowerLimitLagHi; - upper_limit = WebRtcIsac_kQindexUpperLimitLagHi; - } - - /* find quantization index */ - for (k = 0; k < 4; k++) { - /* transform */ - C = 0.0; - for (j = 0; j < PITCH_SUBFRAMES; j++) { - C += WebRtcIsac_kTransform[k][j] * PitchLags[j]; - } - /* quantize */ - index[k] = WebRtcIsac_lrint(C / StepSize); - - /* check that the index is not outside the boundaries of the table */ - if (index[k] < lower_limit[k]) { - index[k] = lower_limit[k]; - } else if (index[k] > upper_limit[k]) index[k] = upper_limit[k]; { - index[k] -= lower_limit[k]; - } - /* Save data for creation of multiple bit streams */ - encData->pitchIndex[PITCH_SUBFRAMES * encData->startIdx + k] = index[k]; - } - - /* Un-quantize back to transform coefficients and do the inverse transform: - * S = T'*C */ - C = (index[0] + lower_limit[0]) * StepSize; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] = WebRtcIsac_kTransformTranspose[k][0] * C; - } - C = mean_val2[index[1]]; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] += WebRtcIsac_kTransformTranspose[k][1] * C; - } - C = mean_val3[index[2]]; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] += WebRtcIsac_kTransformTranspose[k][2] * C; - } - C = mean_val4[index[3]]; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] += WebRtcIsac_kTransformTranspose[k][3] * C; - } - /* entropy coding of quantization pitch lags */ - WebRtcIsac_EncHistMulti(streamdata, index, cdf, PITCH_SUBFRAMES); -} - - - -/* Routines for in-band signaling of bandwidth estimation */ -/* Histograms based on uniform distribution of indices */ -/* Move global variables later! */ - - -/* cdf array for frame length indicator */ -const uint16_t WebRtcIsac_kFrameLengthCdf[4] = { - 0, 21845, 43690, 65535 }; - -/* pointer to cdf array for frame length indicator */ -const uint16_t* WebRtcIsac_kFrameLengthCdf_ptr[1] = { - WebRtcIsac_kFrameLengthCdf }; - -/* initial cdf index for decoder of frame length indicator */ -const uint16_t WebRtcIsac_kFrameLengthInitIndex[1] = { 1 }; - - -int WebRtcIsac_DecodeFrameLen(Bitstr* streamdata, int16_t* framesamples) { - int frame_mode, err; - err = 0; - /* entropy decoding of frame length [1:30ms,2:60ms] */ - err = WebRtcIsac_DecHistOneStepMulti(&frame_mode, streamdata, - WebRtcIsac_kFrameLengthCdf_ptr, - WebRtcIsac_kFrameLengthInitIndex, 1); - if (err < 0) - return -ISAC_RANGE_ERROR_DECODE_FRAME_LENGTH; - - switch (frame_mode) { - case 1: - *framesamples = 480; /* 30ms */ - break; - case 2: - *framesamples = 960; /* 60ms */ - break; - default: - err = -ISAC_DISALLOWED_FRAME_MODE_DECODER; - } - return err; -} - -int WebRtcIsac_EncodeFrameLen(int16_t framesamples, Bitstr* streamdata) { - int frame_mode, status; - - status = 0; - frame_mode = 0; - /* entropy coding of frame length [1:480 samples,2:960 samples] */ - switch (framesamples) { - case 480: - frame_mode = 1; - break; - case 960: - frame_mode = 2; - break; - default: - status = - ISAC_DISALLOWED_FRAME_MODE_ENCODER; - } - - if (status < 0) - return status; - - WebRtcIsac_EncHistMulti(streamdata, &frame_mode, - WebRtcIsac_kFrameLengthCdf_ptr, 1); - return status; -} - -/* cdf array for estimated bandwidth */ -static const uint16_t kBwCdf[25] = { - 0, 2731, 5461, 8192, 10923, 13653, 16384, 19114, 21845, 24576, 27306, 30037, - 32768, 35498, 38229, 40959, 43690, 46421, 49151, 51882, 54613, 57343, 60074, - 62804, 65535 }; - -/* pointer to cdf array for estimated bandwidth */ -static const uint16_t* const kBwCdfPtr[1] = { kBwCdf }; - -/* initial cdf index for decoder of estimated bandwidth*/ -static const uint16_t kBwInitIndex[1] = { 7 }; - - -int WebRtcIsac_DecodeSendBW(Bitstr* streamdata, int16_t* BWno) { - int BWno32, err; - - /* entropy decoding of sender's BW estimation [0..23] */ - err = WebRtcIsac_DecHistOneStepMulti(&BWno32, streamdata, kBwCdfPtr, - kBwInitIndex, 1); - if (err < 0) { - return -ISAC_RANGE_ERROR_DECODE_BANDWIDTH; - } - *BWno = (int16_t)BWno32; - return err; -} - -void WebRtcIsac_EncodeReceiveBw(int* BWno, Bitstr* streamdata) { - /* entropy encoding of receiver's BW estimation [0..23] */ - WebRtcIsac_EncHistMulti(streamdata, BWno, kBwCdfPtr, 1); -} - - -/* estimate code length of LPC Coef */ -void WebRtcIsac_TranscodeLPCCoef(double* LPCCoef_lo, double* LPCCoef_hi, - int* index_g) { - int j, k, n, pos, pos2, posg, offsg, offs2; - int index_ovr_g[KLT_ORDER_GAIN]; - double tmpcoeffs_g[KLT_ORDER_GAIN]; - double tmpcoeffs2_g[KLT_ORDER_GAIN]; - double sum; - - /* log gains, mean removal and scaling */ - posg = 0; - for (k = 0; k < SUBFRAMES; k++) { - tmpcoeffs_g[posg] = log(LPCCoef_lo[(LPC_LOBAND_ORDER + 1) * k]); - tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[posg]; - tmpcoeffs_g[posg] *= LPC_GAIN_SCALE; - posg++; - tmpcoeffs_g[posg] = log(LPCCoef_hi[(LPC_HIBAND_ORDER + 1) * k]); - tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[posg]; - tmpcoeffs_g[posg] *= LPC_GAIN_SCALE; - posg++; - } - - /* KLT */ - - /* Left transform. */ - offsg = 0; - for (j = 0; j < SUBFRAMES; j++) { - posg = offsg; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = offsg; - pos2 = k; - for (n = 0; n < LPC_GAIN_ORDER; n++) { - sum += tmpcoeffs_g[pos++] * WebRtcIsac_kKltT1Gain[pos2]; - pos2 += LPC_GAIN_ORDER; - } - tmpcoeffs2_g[posg++] = sum; - } - offsg += LPC_GAIN_ORDER; - } - - /* Right transform. */ - offsg = 0; - offs2 = 0; - for (j = 0; j < SUBFRAMES; j++) { - posg = offsg; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = k; - pos2 = offs2; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_g[pos] * WebRtcIsac_kKltT2Gain[pos2++]; - pos += LPC_GAIN_ORDER; - } - tmpcoeffs_g[posg++] = sum; - } - offs2 += SUBFRAMES; - offsg += LPC_GAIN_ORDER; - } - - - /* quantize coefficients */ - for (k = 0; k < KLT_ORDER_GAIN; k++) { - /* Get index. */ - pos2 = WebRtcIsac_lrint(tmpcoeffs_g[k] / KLT_STEPSIZE); - index_g[k] = (pos2) + WebRtcIsac_kQKltQuantMinGain[k]; - if (index_g[k] < 0) { - index_g[k] = 0; - } else if (index_g[k] > WebRtcIsac_kQKltMaxIndGain[k]) { - index_g[k] = WebRtcIsac_kQKltMaxIndGain[k]; - } - index_ovr_g[k] = WebRtcIsac_kQKltOffsetGain[k] + index_g[k]; - - /* find quantization levels for coefficients */ - tmpcoeffs_g[k] = WebRtcIsac_kQKltLevelsGain[index_ovr_g[k]]; - } -} - - -/* Decode & de-quantize LPC Coefficients. */ -int WebRtcIsac_DecodeLpcCoefUB(Bitstr* streamdata, double* lpcVecs, - double* percepFilterGains, - int16_t bandwidth) { - int index_s[KLT_ORDER_SHAPE]; - - double U[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - int err; - - /* Entropy decoding of quantization indices. */ - switch (bandwidth) { - case isac12kHz: { - err = WebRtcIsac_DecHistOneStepMulti( - index_s, streamdata, WebRtcIsac_kLpcShapeCdfMatUb12, - WebRtcIsac_kLpcShapeEntropySearchUb12, UB_LPC_ORDER * - UB_LPC_VEC_PER_FRAME); - break; - } - case isac16kHz: { - err = WebRtcIsac_DecHistOneStepMulti( - index_s, streamdata, WebRtcIsac_kLpcShapeCdfMatUb16, - WebRtcIsac_kLpcShapeEntropySearchUb16, UB_LPC_ORDER * - UB16_LPC_VEC_PER_FRAME); - break; - } - default: - return -1; - } - - if (err < 0) { - return err; - } - - WebRtcIsac_DequantizeLpcParam(index_s, lpcVecs, bandwidth); - WebRtcIsac_CorrelateInterVec(lpcVecs, U, bandwidth); - WebRtcIsac_CorrelateIntraVec(U, lpcVecs, bandwidth); - WebRtcIsac_AddLarMean(lpcVecs, bandwidth); - WebRtcIsac_DecodeLpcGainUb(percepFilterGains, streamdata); - - if (bandwidth == isac16kHz) { - /* Decode another set of Gains. */ - WebRtcIsac_DecodeLpcGainUb(&percepFilterGains[SUBFRAMES], streamdata); - } - return 0; -} - -int16_t WebRtcIsac_EncodeBandwidth(enum ISACBandwidth bandwidth, - Bitstr* streamData) { - int bandwidthMode; - switch (bandwidth) { - case isac12kHz: { - bandwidthMode = 0; - break; - } - case isac16kHz: { - bandwidthMode = 1; - break; - } - default: - return -ISAC_DISALLOWED_ENCODER_BANDWIDTH; - } - WebRtcIsac_EncHistMulti(streamData, &bandwidthMode, kOneBitEqualProbCdf_ptr, - 1); - return 0; -} - -int16_t WebRtcIsac_DecodeBandwidth(Bitstr* streamData, - enum ISACBandwidth* bandwidth) { - int bandwidthMode; - if (WebRtcIsac_DecHistOneStepMulti(&bandwidthMode, streamData, - kOneBitEqualProbCdf_ptr, - kOneBitEqualProbInitIndex, 1) < 0) { - return -ISAC_RANGE_ERROR_DECODE_BANDWITH; - } - switch (bandwidthMode) { - case 0: { - *bandwidth = isac12kHz; - break; - } - case 1: { - *bandwidth = isac16kHz; - break; - } - default: - return -ISAC_DISALLOWED_BANDWIDTH_MODE_DECODER; - } - return 0; -} - -int16_t WebRtcIsac_EncodeJitterInfo(int32_t jitterIndex, - Bitstr* streamData) { - /* This is to avoid LINUX warning until we change 'int' to 'Word32'. */ - int intVar; - - if ((jitterIndex < 0) || (jitterIndex > 1)) { - return -1; - } - intVar = (int)(jitterIndex); - /* Use the same CDF table as for bandwidth - * both take two values with equal probability.*/ - WebRtcIsac_EncHistMulti(streamData, &intVar, kOneBitEqualProbCdf_ptr, 1); - return 0; -} - -int16_t WebRtcIsac_DecodeJitterInfo(Bitstr* streamData, - int32_t* jitterInfo) { - int intVar; - /* Use the same CDF table as for bandwidth - * both take two values with equal probability. */ - if (WebRtcIsac_DecHistOneStepMulti(&intVar, streamData, - kOneBitEqualProbCdf_ptr, - kOneBitEqualProbInitIndex, 1) < 0) { - return -ISAC_RANGE_ERROR_DECODE_BANDWITH; - } - *jitterInfo = (int16_t)(intVar); - return 0; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.h deleted file mode 100644 index 6c2b8d3cc1a7..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.h +++ /dev/null @@ -1,347 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * entropy_coding.h - * - * This header file declares all of the functions used to arithmetically - * encode the iSAC bistream - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENTROPY_CODING_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENTROPY_CODING_H_ - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -/****************************************************************************** - * WebRtcIsac_DecodeSpec() - * Decode real and imaginary part of the DFT coefficients, given a bit-stream. - * The decoded DFT coefficient can be transformed to time domain by - * WebRtcIsac_Time2Spec(). - * - * Input: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - AvgPitchGain_Q12 : average pitch-gain of the frame. This is only - * relevant for 0-4 kHz band, and the input value is - * not used in other bands. - * - band : specifies which band's DFT should be decoded. - * - * Output: - * - *fr : pointer to a buffer where the real part of DFT - * coefficients are written to. - * - *fi : pointer to a buffer where the imaginary part - * of DFT coefficients are written to. - * - * Return value : < 0 if an error occures - * 0 if succeeded. - */ -int WebRtcIsac_DecodeSpec(Bitstr* streamdata, - int16_t AvgPitchGain_Q12, - enum ISACBand band, - double* fr, - double* fi); - -/****************************************************************************** - * WebRtcIsac_EncodeSpec() - * Encode real and imaginary part of the DFT coefficients into the given - * bit-stream. - * - * Input: - * - *fr : pointer to a buffer where the real part of DFT - * coefficients are written to. - * - *fi : pointer to a buffer where the imaginary part - * of DFT coefficients are written to. - * - AvgPitchGain_Q12 : average pitch-gain of the frame. This is only - * relevant for 0-4 kHz band, and the input value is - * not used in other bands. - * - band : specifies which band's DFT should be decoded. - * - * Output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Return value : < 0 if an error occures - * 0 if succeeded. - */ -int WebRtcIsac_EncodeSpec(const int16_t* fr, - const int16_t* fi, - int16_t AvgPitchGain_Q12, - enum ISACBand band, - Bitstr* streamdata); - -/* decode & dequantize LPC Coef */ -int WebRtcIsac_DecodeLpcCoef(Bitstr* streamdata, double* LPCCoef); -int WebRtcIsac_DecodeLpcCoefUB(Bitstr* streamdata, - double* lpcVecs, - double* percepFilterGains, - int16_t bandwidth); - -int WebRtcIsac_DecodeLpc(Bitstr* streamdata, - double* LPCCoef_lo, - double* LPCCoef_hi); - -/* quantize & code LPC Coef */ -void WebRtcIsac_EncodeLpcLb(double* LPCCoef_lo, - double* LPCCoef_hi, - Bitstr* streamdata, - IsacSaveEncoderData* encData); - -void WebRtcIsac_EncodeLpcGainLb(double* LPCCoef_lo, - double* LPCCoef_hi, - Bitstr* streamdata, - IsacSaveEncoderData* encData); - -/****************************************************************************** - * WebRtcIsac_EncodeLpcUB() - * Encode LPC parameters, given as A-polynomial, of upper-band. The encoding - * is performed in LAR domain. - * For the upper-band, we compute and encode LPC of some sub-frames, LPC of - * other sub-frames are computed by linear interpolation, in LAR domain. This - * function performs the interpolation and returns the LPC of all sub-frames. - * - * Inputs: - * - lpcCoef : a buffer containing A-polynomials of sub-frames - * (excluding first coefficient that is 1). - * - bandwidth : specifies if the codec is operating at 0-12 kHz - * or 0-16 kHz mode. - * - * Input/output: - * - streamdata : pointer to a structure containing the encoded - * data and the parameters needed for entropy - * coding. - * - * Output: - * - interpolLPCCoeff : Decoded and interpolated LPC (A-polynomial) - * of all sub-frames. - * If LP analysis is of order K, and there are N - * sub-frames then this is a buffer of size - * (k + 1) * N, each vector starts with the LPC gain - * of the corresponding sub-frame. The LPC gains - * are encoded and inserted after this function is - * called. The first A-coefficient which is 1 is not - * included. - * - * Return value : 0 if encoding is successful, - * <0 if failed to encode. - */ -int16_t WebRtcIsac_EncodeLpcUB(double* lpcCoeff, - Bitstr* streamdata, - double* interpolLPCCoeff, - int16_t bandwidth, - ISACUBSaveEncDataStruct* encData); - -/****************************************************************************** - * WebRtcIsac_DecodeInterpolLpcUb() - * Decode LPC coefficients and interpolate to get the coefficients fo all - * sub-frmaes. - * - * Inputs: - * - bandwidth : spepecifies if the codec is in 0-12 kHz or - * 0-16 kHz mode. - * - * Input/output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Output: - * - percepFilterParam : Decoded and interpolated LPC (A-polynomial) of - * all sub-frames. - * If LP analysis is of order K, and there are N - * sub-frames then this is a buffer of size - * (k + 1) * N, each vector starts with the LPC gain - * of the corresponding sub-frame. The LPC gains - * are encoded and inserted after this function is - * called. The first A-coefficient which is 1 is not - * included. - * - * Return value : 0 if encoding is successful, - * <0 if failed to encode. - */ -int16_t WebRtcIsac_DecodeInterpolLpcUb(Bitstr* streamdata, - double* percepFilterParam, - int16_t bandwidth); - -/* Decode & dequantize RC */ -int WebRtcIsac_DecodeRc(Bitstr* streamdata, int16_t* RCQ15); - -/* Quantize & code RC */ -void WebRtcIsac_EncodeRc(int16_t* RCQ15, Bitstr* streamdata); - -/* Decode & dequantize squared Gain */ -int WebRtcIsac_DecodeGain2(Bitstr* streamdata, int32_t* Gain2); - -/* Quantize & code squared Gain (input is squared gain) */ -int WebRtcIsac_EncodeGain2(int32_t* gain2, Bitstr* streamdata); - -void WebRtcIsac_EncodePitchGain(int16_t* PitchGains_Q12, - Bitstr* streamdata, - IsacSaveEncoderData* encData); - -void WebRtcIsac_EncodePitchLag(double* PitchLags, - int16_t* PitchGain_Q12, - Bitstr* streamdata, - IsacSaveEncoderData* encData); - -int WebRtcIsac_DecodePitchGain(Bitstr* streamdata, int16_t* PitchGain_Q12); -int WebRtcIsac_DecodePitchLag(Bitstr* streamdata, - int16_t* PitchGain_Q12, - double* PitchLag); - -int WebRtcIsac_DecodeFrameLen(Bitstr* streamdata, int16_t* framelength); -int WebRtcIsac_EncodeFrameLen(int16_t framelength, Bitstr* streamdata); -int WebRtcIsac_DecodeSendBW(Bitstr* streamdata, int16_t* BWno); -void WebRtcIsac_EncodeReceiveBw(int* BWno, Bitstr* streamdata); - -/* Step-down */ -void WebRtcIsac_Poly2Rc(double* a, int N, double* RC); - -/* Step-up */ -void WebRtcIsac_Rc2Poly(double* RC, int N, double* a); - -void WebRtcIsac_TranscodeLPCCoef(double* LPCCoef_lo, - double* LPCCoef_hi, - int* index_g); - -/****************************************************************************** - * WebRtcIsac_EncodeLpcGainUb() - * Encode LPC gains of sub-Frames. - * - * Input/outputs: - * - lpGains : a buffer which contains 'SUBFRAME' number of - * LP gains to be encoded. The input values are - * overwritten by the quantized values. - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Output: - * - lpcGainIndex : quantization indices for lpc gains, these will - * be stored to be used for FEC. - */ -void WebRtcIsac_EncodeLpcGainUb(double* lpGains, - Bitstr* streamdata, - int* lpcGainIndex); - -/****************************************************************************** - * WebRtcIsac_EncodeLpcGainUb() - * Store LPC gains of sub-Frames in 'streamdata'. - * - * Input: - * - lpGains : a buffer which contains 'SUBFRAME' number of - * LP gains to be encoded. - * Input/outputs: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - */ -void WebRtcIsac_StoreLpcGainUb(double* lpGains, Bitstr* streamdata); - -/****************************************************************************** - * WebRtcIsac_DecodeLpcGainUb() - * Decode the LPC gain of sub-frames. - * - * Input/output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Output: - * - lpGains : a buffer where decoded LPC gians will be stored. - * - * Return value : 0 if succeeded. - * <0 if failed. - */ -int16_t WebRtcIsac_DecodeLpcGainUb(double* lpGains, Bitstr* streamdata); - -/****************************************************************************** - * WebRtcIsac_EncodeBandwidth() - * Encode if the bandwidth of encoded audio is 0-12 kHz or 0-16 kHz. - * - * Input: - * - bandwidth : an enumerator specifying if the codec in is - * 0-12 kHz or 0-16 kHz mode. - * - * Input/output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Return value : 0 if succeeded. - * <0 if failed. - */ -int16_t WebRtcIsac_EncodeBandwidth(enum ISACBandwidth bandwidth, - Bitstr* streamData); - -/****************************************************************************** - * WebRtcIsac_DecodeBandwidth() - * Decode the bandwidth of the encoded audio, i.e. if the bandwidth is 0-12 kHz - * or 0-16 kHz. - * - * Input/output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Output: - * - bandwidth : an enumerator specifying if the codec is in - * 0-12 kHz or 0-16 kHz mode. - * - * Return value : 0 if succeeded. - * <0 if failed. - */ -int16_t WebRtcIsac_DecodeBandwidth(Bitstr* streamData, - enum ISACBandwidth* bandwidth); - -/****************************************************************************** - * WebRtcIsac_EncodeJitterInfo() - * Decode the jitter information. - * - * Input/output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Input: - * - jitterInfo : one bit of info specifying if the channel is - * in high/low jitter. Zero indicates low jitter - * and one indicates high jitter. - * - * Return value : 0 if succeeded. - * <0 if failed. - */ -int16_t WebRtcIsac_EncodeJitterInfo(int32_t jitterIndex, Bitstr* streamData); - -/****************************************************************************** - * WebRtcIsac_DecodeJitterInfo() - * Decode the jitter information. - * - * Input/output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Output: - * - jitterInfo : one bit of info specifying if the channel is - * in high/low jitter. Zero indicates low jitter - * and one indicates high jitter. - * - * Return value : 0 if succeeded. - * <0 if failed. - */ -int16_t WebRtcIsac_DecodeJitterInfo(Bitstr* streamData, int32_t* jitterInfo); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENTROPY_CODING_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/filterbanks.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/filterbanks.c deleted file mode 100644 index d57b55022d38..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/filterbanks.c +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * filterbanks.c - * - * This file contains function WebRtcIsac_AllPassFilter2Float, - * WebRtcIsac_SplitAndFilter, and WebRtcIsac_FilterAndCombine - * which implement filterbanks that produce decimated lowpass and - * highpass versions of a signal, and performs reconstruction. - * - */ - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/isac_vad.h" - -/* Combining */ - -/* HPstcoeff_out_1 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */ -static const float kHpStCoefOut1Float[4] = -{-1.99701049409000f, 0.99714204490000f, 0.01701049409000f, -0.01704204490000f}; - -/* HPstcoeff_out_2 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */ -static const float kHpStCoefOut2Float[4] = -{-1.98645294509837f, 0.98672435560000f, 0.00645294509837f, -0.00662435560000f}; - - -/* Function WebRtcIsac_FilterAndCombine */ -/* This is a decoder function that takes the decimated - length FRAMESAMPLES_HALF input low-pass and - high-pass signals and creates a reconstructed fullband - output signal of length FRAMESAMPLES. WebRtcIsac_FilterAndCombine - is the sibling function of WebRtcIsac_SplitAndFilter */ -/* INPUTS: - inLP: a length FRAMESAMPLES_HALF array of input low-pass - samples. - inHP: a length FRAMESAMPLES_HALF array of input high-pass - samples. - postfiltdata: input data structure containing the filterbank - states from the previous decoding iteration. - OUTPUTS: - Out: a length FRAMESAMPLES array of output reconstructed - samples (fullband) based on the input low-pass and - high-pass signals. - postfiltdata: the input data structure containing the filterbank - states is updated for the next decoding iteration */ -void WebRtcIsac_FilterAndCombineFloat(float *InLP, - float *InHP, - float *Out, - PostFiltBankstr *postfiltdata) -{ - int k; - float tempin_ch1[FRAMESAMPLES+MAX_AR_MODEL_ORDER]; - float tempin_ch2[FRAMESAMPLES+MAX_AR_MODEL_ORDER]; - float ftmp, ftmp2; - - /* Form the polyphase signals*/ - for (k=0;kSTATE_0_UPPER_float); - - /* Now, all-pass filter the new lower channel signal. But since all-pass filter factors - at the decoder are swapped from the ones at the encoder, the 'upper' channel - all-pass filter factors (WebRtcIsac_kUpperApFactorsFloat) are used to filter this new - lower channel signal */ - WebRtcIsac_AllPassFilter2Float(tempin_ch2, WebRtcIsac_kUpperApFactorsFloat, - FRAMESAMPLES_HALF, NUMBEROFCHANNELAPSECTIONS,postfiltdata->STATE_0_LOWER_float); - - - /* Merge outputs to form the full length output signal.*/ - for (k=0;kHPstates1_float[0] + - kHpStCoefOut1Float[3] * postfiltdata->HPstates1_float[1]; - ftmp = Out[k] - kHpStCoefOut1Float[0] * postfiltdata->HPstates1_float[0] - - kHpStCoefOut1Float[1] * postfiltdata->HPstates1_float[1]; - postfiltdata->HPstates1_float[1] = postfiltdata->HPstates1_float[0]; - postfiltdata->HPstates1_float[0] = ftmp; - Out[k] = ftmp2; - } - - for (k=0;kHPstates2_float[0] + - kHpStCoefOut2Float[3] * postfiltdata->HPstates2_float[1]; - ftmp = Out[k] - kHpStCoefOut2Float[0] * postfiltdata->HPstates2_float[0] - - kHpStCoefOut2Float[1] * postfiltdata->HPstates2_float[1]; - postfiltdata->HPstates2_float[1] = postfiltdata->HPstates2_float[0]; - postfiltdata->HPstates2_float[0] = ftmp; - Out[k] = ftmp2; - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/intialize.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/intialize.c deleted file mode 100644 index 5c951f6e9d1b..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/intialize.c +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* encode.c - Encoding function for the iSAC coder */ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/structs.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_estimator.h" - -void WebRtcIsac_InitMasking(MaskFiltstr *maskdata) { - - int k; - - for (k = 0; k < WINLEN; k++) { - maskdata->DataBufferLo[k] = 0.0; - maskdata->DataBufferHi[k] = 0.0; - } - for (k = 0; k < ORDERLO+1; k++) { - maskdata->CorrBufLo[k] = 0.0; - maskdata->PreStateLoF[k] = 0.0; - maskdata->PreStateLoG[k] = 0.0; - maskdata->PostStateLoF[k] = 0.0; - maskdata->PostStateLoG[k] = 0.0; - } - for (k = 0; k < ORDERHI+1; k++) { - maskdata->CorrBufHi[k] = 0.0; - maskdata->PreStateHiF[k] = 0.0; - maskdata->PreStateHiG[k] = 0.0; - maskdata->PostStateHiF[k] = 0.0; - maskdata->PostStateHiG[k] = 0.0; - } - - maskdata->OldEnergy = 10.0; - return; -} - -void WebRtcIsac_InitPostFilterbank(PostFiltBankstr *postfiltdata) -{ - int k; - - for (k = 0; k < 2*POSTQORDER; k++) { - postfiltdata->STATE_0_LOWER[k] = 0; - postfiltdata->STATE_0_UPPER[k] = 0; - - postfiltdata->STATE_0_LOWER_float[k] = 0; - postfiltdata->STATE_0_UPPER_float[k] = 0; - } - - /* High pass filter states */ - postfiltdata->HPstates1[0] = 0.0; - postfiltdata->HPstates1[1] = 0.0; - - postfiltdata->HPstates2[0] = 0.0; - postfiltdata->HPstates2[1] = 0.0; - - postfiltdata->HPstates1_float[0] = 0.0f; - postfiltdata->HPstates1_float[1] = 0.0f; - - postfiltdata->HPstates2_float[0] = 0.0f; - postfiltdata->HPstates2_float[1] = 0.0f; - - return; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/isac.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/isac.c deleted file mode 100644 index 456f447d9a7d..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/isac.c +++ /dev/null @@ -1,2307 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * isac.c - * - * This C file contains the functions for the ISAC API - * - */ - -#include "modules/audio_coding/codecs/isac/main/include/isac.h" - -#include -#include -#include -#include - -#include "rtc_base/checks.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/crc.h" -#include "modules/audio_coding/codecs/isac/main/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/os_specific_inline.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" -#include "modules/audio_coding/codecs/isac/main/source/isac_vad.h" -#include "rtc_base/system/arch.h" - -#define BIT_MASK_DEC_INIT 0x0001 -#define BIT_MASK_ENC_INIT 0x0002 - -#define LEN_CHECK_SUM_WORD8 4 -#define MAX_NUM_LAYERS 10 - - -/**************************************************************************** - * UpdatePayloadSizeLimit(...) - * - * Call this function to update the limit on the payload size. The limit on - * payload size might change i) if a user ''directly changes the limit by - * calling xxx_setMaxPayloadSize() or xxx_setMaxRate(), or ii) indirectly - * when bandwidth is changing. The latter might be the result of bandwidth - * adaptation, or direct change of the bottleneck in instantaneous mode. - * - * This function takes the current overall limit on payload, and translates it - * to the limits on lower and upper-band. If the codec is in wideband mode, - * then the overall limit and the limit on the lower-band is the same. - * Otherwise, a fraction of the limit should be allocated to lower-band - * leaving some room for the upper-band bit-stream. That is why an update - * of limit is required every time that the bandwidth is changing. - * - */ -static void UpdatePayloadSizeLimit(ISACMainStruct* instISAC) { - int16_t lim30MsPayloadBytes = WEBRTC_SPL_MIN( - (instISAC->maxPayloadSizeBytes), - (instISAC->maxRateBytesPer30Ms)); - int16_t lim60MsPayloadBytes = WEBRTC_SPL_MIN( - (instISAC->maxPayloadSizeBytes), - (instISAC->maxRateBytesPer30Ms << 1)); - - /* The only time that iSAC will have 60 ms - * frame-size is when operating in wideband, so - * there is no upper-band bit-stream. */ - - if (instISAC->bandwidthKHz == isac8kHz) { - /* At 8 kHz there is no upper-band bit-stream, - * therefore, the lower-band limit is the overall limit. */ - instISAC->instLB.ISACencLB_obj.payloadLimitBytes60 = - lim60MsPayloadBytes; - instISAC->instLB.ISACencLB_obj.payloadLimitBytes30 = - lim30MsPayloadBytes; - } else { - /* When in super-wideband, we only have 30 ms frames. - * Do a rate allocation for the given limit. */ - if (lim30MsPayloadBytes > 250) { - /* 4/5 to lower-band the rest for upper-band. */ - instISAC->instLB.ISACencLB_obj.payloadLimitBytes30 = - (lim30MsPayloadBytes << 2) / 5; - } else if (lim30MsPayloadBytes > 200) { - /* For the interval of 200 to 250 the share of - * upper-band linearly grows from 20 to 50. */ - instISAC->instLB.ISACencLB_obj.payloadLimitBytes30 = - (lim30MsPayloadBytes << 1) / 5 + 100; - } else { - /* Allocate only 20 for upper-band. */ - instISAC->instLB.ISACencLB_obj.payloadLimitBytes30 = - lim30MsPayloadBytes - 20; - } - instISAC->instUB.ISACencUB_obj.maxPayloadSizeBytes = - lim30MsPayloadBytes; - } -} - - -/**************************************************************************** - * UpdateBottleneck(...) - * - * This function updates the bottleneck only if the codec is operating in - * channel-adaptive mode. Furthermore, as the update of bottleneck might - * result in an update of bandwidth, therefore, the bottlenech should be - * updated just right before the first 10ms of a frame is pushed into encoder. - * - */ -static void UpdateBottleneck(ISACMainStruct* instISAC) { - /* Read the bottleneck from bandwidth estimator for the - * first 10 ms audio. This way, if there is a change - * in bandwidth, upper and lower-band will be in sync. */ - if ((instISAC->codingMode == 0) && - (instISAC->instLB.ISACencLB_obj.buffer_index == 0) && - (instISAC->instLB.ISACencLB_obj.frame_nb == 0)) { - int32_t bottleneck = - WebRtcIsac_GetUplinkBandwidth(&instISAC->bwestimator_obj); - - /* Adding hysteresis when increasing signal bandwidth. */ - if ((instISAC->bandwidthKHz == isac8kHz) - && (bottleneck > 37000) - && (bottleneck < 41000)) { - bottleneck = 37000; - } - - /* Switching from 12 kHz to 16 kHz is not allowed at this revision. - * If we let this happen, we have to take care of buffer_index and - * the last LPC vector. */ - if ((instISAC->bandwidthKHz != isac16kHz) && - (bottleneck > 46000)) { - bottleneck = 46000; - } - - /* We might need a rate allocation. */ - if (instISAC->encoderSamplingRateKHz == kIsacWideband) { - /* Wideband is the only choice we have here. */ - instISAC->instLB.ISACencLB_obj.bottleneck = - (bottleneck > 32000) ? 32000 : bottleneck; - instISAC->bandwidthKHz = isac8kHz; - } else { - /* Do the rate-allocation and get the new bandwidth. */ - enum ISACBandwidth bandwidth; - WebRtcIsac_RateAllocation(bottleneck, - &(instISAC->instLB.ISACencLB_obj.bottleneck), - &(instISAC->instUB.ISACencUB_obj.bottleneck), - &bandwidth); - if (bandwidth != isac8kHz) { - instISAC->instLB.ISACencLB_obj.new_framelength = 480; - } - if (bandwidth != instISAC->bandwidthKHz) { - /* Bandwidth is changing. */ - instISAC->bandwidthKHz = bandwidth; - UpdatePayloadSizeLimit(instISAC); - if (bandwidth == isac12kHz) { - instISAC->instLB.ISACencLB_obj.buffer_index = 0; - } - /* Currently we don't let the bandwidth to switch to 16 kHz - * if in adaptive mode. If we let this happen, we have to take - * care of buffer_index and the last LPC vector. */ - } - } - } -} - - -/**************************************************************************** - * GetSendBandwidthInfo(...) - * - * This is called to get the bandwidth info. This info is the bandwidth and - * the jitter of 'there-to-here' channel, estimated 'here.' These info - * is signaled in an in-band fashion to the other side. - * - * The call to the bandwidth estimator triggers a recursive averaging which - * has to be synchronized between encoder & decoder, therefore, the call to - * BWE should be once per packet. As the BWE info is inserted into bit-stream - * We need a valid info right before the encodeLB function is going to - * generate a bit-stream. That is when lower-band buffer has already 20ms - * of audio, and the 3rd block of 10ms is going to be injected into encoder. - * - * Inputs: - * - instISAC : iSAC instance. - * - * Outputs: - * - bandwidthIndex : an index which has to be encoded in - * lower-band bit-stream, indicating the - * bandwidth of there-to-here channel. - * - jitterInfo : this indicates if the jitter is high - * or low and it is encoded in upper-band - * bit-stream. - * - */ -static void GetSendBandwidthInfo(ISACMainStruct* instISAC, - int16_t* bandwidthIndex, - int16_t* jitterInfo) { - if ((instISAC->instLB.ISACencLB_obj.buffer_index == - (FRAMESAMPLES_10ms << 1)) && - (instISAC->instLB.ISACencLB_obj.frame_nb == 0)) { - /* Bandwidth estimation and coding. */ - WebRtcIsac_GetDownlinkBwJitIndexImpl(&(instISAC->bwestimator_obj), - bandwidthIndex, jitterInfo, - instISAC->decoderSamplingRateKHz); - } -} - - -/**************************************************************************** - * WebRtcIsac_Create(...) - * - * This function creates an ISAC instance, which will contain the state - * information for one coding/decoding channel. - * - * Input: - * - ISAC_main_inst : address of the pointer to the coder instance. - * - * Return value : 0 - Ok - * -1 - Error - */ -int16_t WebRtcIsac_Create(ISACStruct** ISAC_main_inst) { - ISACMainStruct* instISAC; - - if (ISAC_main_inst != NULL) { - instISAC = (ISACMainStruct*)malloc(sizeof(ISACMainStruct)); - *ISAC_main_inst = (ISACStruct*)instISAC; - if (*ISAC_main_inst != NULL) { - instISAC->errorCode = 0; - instISAC->initFlag = 0; - /* Default is wideband. */ - instISAC->bandwidthKHz = isac8kHz; - instISAC->encoderSamplingRateKHz = kIsacWideband; - instISAC->decoderSamplingRateKHz = kIsacWideband; - instISAC->in_sample_rate_hz = 16000; - - WebRtcIsac_InitTransform(&instISAC->transform_tables); - return 0; - } else { - return -1; - } - } else { - return -1; - } -} - - -/**************************************************************************** - * WebRtcIsac_Free(...) - * - * This function frees the ISAC instance created at the beginning. - * - * Input: - * - ISAC_main_inst : a ISAC instance. - * - * Return value : 0 - Ok - * -1 - Error - */ -int16_t WebRtcIsac_Free(ISACStruct* ISAC_main_inst) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - free(instISAC); - return 0; -} - - -/**************************************************************************** - * EncoderInitLb(...) - internal function for initialization of - * Lower Band - * EncoderInitUb(...) - internal function for initialization of - * Upper Band - * WebRtcIsac_EncoderInit(...) - API function - * - * This function initializes a ISAC instance prior to the encoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - CodingMode : 0 -> Bit rate and frame length are automatically - * adjusted to available bandwidth on - * transmission channel, applicable just to - * wideband mode. - * 1 -> User sets a frame length and a target bit - * rate which is taken as the maximum - * short-term average bit rate. - * - * Return value : 0 - Ok - * -1 - Error - */ -static int16_t EncoderInitLb(ISACLBStruct* instLB, - int16_t codingMode, - enum IsacSamplingRate sampRate) { - int16_t statusInit = 0; - int k; - - /* Init stream vector to zero */ - for (k = 0; k < STREAM_SIZE_MAX_60; k++) { - instLB->ISACencLB_obj.bitstr_obj.stream[k] = 0; - } - - if ((codingMode == 1) || (sampRate == kIsacSuperWideband)) { - /* 30 ms frame-size if either in super-wideband or - * instantaneous mode (I-mode). */ - instLB->ISACencLB_obj.new_framelength = 480; - } else { - instLB->ISACencLB_obj.new_framelength = INITIAL_FRAMESAMPLES; - } - - WebRtcIsac_InitMasking(&instLB->ISACencLB_obj.maskfiltstr_obj); - WebRtcIsac_InitPreFilterbank(&instLB->ISACencLB_obj.prefiltbankstr_obj); - WebRtcIsac_InitPitchFilter(&instLB->ISACencLB_obj.pitchfiltstr_obj); - WebRtcIsac_InitPitchAnalysis( - &instLB->ISACencLB_obj.pitchanalysisstr_obj); - - instLB->ISACencLB_obj.buffer_index = 0; - instLB->ISACencLB_obj.frame_nb = 0; - /* Default for I-mode. */ - instLB->ISACencLB_obj.bottleneck = 32000; - instLB->ISACencLB_obj.current_framesamples = 0; - instLB->ISACencLB_obj.s2nr = 0; - instLB->ISACencLB_obj.payloadLimitBytes30 = STREAM_SIZE_MAX_30; - instLB->ISACencLB_obj.payloadLimitBytes60 = STREAM_SIZE_MAX_60; - instLB->ISACencLB_obj.maxPayloadBytes = STREAM_SIZE_MAX_60; - instLB->ISACencLB_obj.maxRateInBytes = STREAM_SIZE_MAX_30; - instLB->ISACencLB_obj.enforceFrameSize = 0; - /* Invalid value prevents getRedPayload to - run before encoder is called. */ - instLB->ISACencLB_obj.lastBWIdx = -1; - return statusInit; -} - -static int16_t EncoderInitUb(ISACUBStruct* instUB, - int16_t bandwidth) { - int16_t statusInit = 0; - int k; - - /* Init stream vector to zero. */ - for (k = 0; k < STREAM_SIZE_MAX_60; k++) { - instUB->ISACencUB_obj.bitstr_obj.stream[k] = 0; - } - - WebRtcIsac_InitMasking(&instUB->ISACencUB_obj.maskfiltstr_obj); - WebRtcIsac_InitPreFilterbank(&instUB->ISACencUB_obj.prefiltbankstr_obj); - - if (bandwidth == isac16kHz) { - instUB->ISACencUB_obj.buffer_index = LB_TOTAL_DELAY_SAMPLES; - } else { - instUB->ISACencUB_obj.buffer_index = 0; - } - /* Default for I-mode. */ - instUB->ISACencUB_obj.bottleneck = 32000; - /* These store the limits for the wideband + super-wideband bit-stream. */ - instUB->ISACencUB_obj.maxPayloadSizeBytes = STREAM_SIZE_MAX_30 << 1; - /* This has to be updated after each lower-band encoding to guarantee - * a correct payload-limitation. */ - instUB->ISACencUB_obj.numBytesUsed = 0; - memset(instUB->ISACencUB_obj.data_buffer_float, 0, - (MAX_FRAMESAMPLES + LB_TOTAL_DELAY_SAMPLES) * sizeof(float)); - - memcpy(&(instUB->ISACencUB_obj.lastLPCVec), - WebRtcIsac_kMeanLarUb16, sizeof(double) * UB_LPC_ORDER); - - return statusInit; -} - - -int16_t WebRtcIsac_EncoderInit(ISACStruct* ISAC_main_inst, - int16_t codingMode) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - int16_t status; - - if ((codingMode != 0) && (codingMode != 1)) { - instISAC->errorCode = ISAC_DISALLOWED_CODING_MODE; - return -1; - } - /* Default bottleneck. */ - instISAC->bottleneck = MAX_ISAC_BW; - - if (instISAC->encoderSamplingRateKHz == kIsacWideband) { - instISAC->bandwidthKHz = isac8kHz; - instISAC->maxPayloadSizeBytes = STREAM_SIZE_MAX_60; - instISAC->maxRateBytesPer30Ms = STREAM_SIZE_MAX_30; - } else { - instISAC->bandwidthKHz = isac16kHz; - instISAC->maxPayloadSizeBytes = STREAM_SIZE_MAX; - instISAC->maxRateBytesPer30Ms = STREAM_SIZE_MAX; - } - - /* Channel-adaptive = 0; Instantaneous (Channel-independent) = 1. */ - instISAC->codingMode = codingMode; - - WebRtcIsac_InitBandwidthEstimator(&instISAC->bwestimator_obj, - instISAC->encoderSamplingRateKHz, - instISAC->decoderSamplingRateKHz); - - WebRtcIsac_InitRateModel(&instISAC->rate_data_obj); - /* Default for I-mode. */ - instISAC->MaxDelay = 10.0; - - status = EncoderInitLb(&instISAC->instLB, codingMode, - instISAC->encoderSamplingRateKHz); - if (status < 0) { - instISAC->errorCode = -status; - return -1; - } - - if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) { - /* Initialize encoder filter-bank. */ - memset(instISAC->analysisFBState1, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - memset(instISAC->analysisFBState2, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - - status = EncoderInitUb(&(instISAC->instUB), - instISAC->bandwidthKHz); - if (status < 0) { - instISAC->errorCode = -status; - return -1; - } - } - /* Initialization is successful, set the flag. */ - instISAC->initFlag |= BIT_MASK_ENC_INIT; - return 0; -} - - -/**************************************************************************** - * WebRtcIsac_Encode(...) - * - * This function encodes 10ms frame(s) and inserts it into a package. - * Input speech length has to be 160 samples (10ms). The encoder buffers those - * 10ms frames until it reaches the chosen Framesize (480 or 960 samples - * corresponding to 30 or 60 ms frames), and then proceeds to the encoding. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - speechIn : input speech vector. - * - * Output: - * - encoded : the encoded data vector - * - * Return value: - * : >0 - Length (in bytes) of coded data - * : 0 - The buffer didn't reach the chosen - * frameSize so it keeps buffering speech - * samples. - * : -1 - Error - */ -int WebRtcIsac_Encode(ISACStruct* ISAC_main_inst, - const int16_t* speechIn, - uint8_t* encoded) { - float inFrame[FRAMESAMPLES_10ms]; - int16_t speechInLB[FRAMESAMPLES_10ms]; - int16_t speechInUB[FRAMESAMPLES_10ms]; - int streamLenLB = 0; - int streamLenUB = 0; - int streamLen = 0; - size_t k = 0; - uint8_t garbageLen = 0; - int32_t bottleneck = 0; - int16_t bottleneckIdx = 0; - int16_t jitterInfo = 0; - - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - ISACLBStruct* instLB = &(instISAC->instLB); - ISACUBStruct* instUB = &(instISAC->instUB); - - /* Check if encoder initiated. */ - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - - if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) { - WebRtcSpl_AnalysisQMF(speechIn, SWBFRAMESAMPLES_10ms, speechInLB, - speechInUB, instISAC->analysisFBState1, - instISAC->analysisFBState2); - - /* Convert from fixed to floating point. */ - for (k = 0; k < FRAMESAMPLES_10ms; k++) { - inFrame[k] = (float)speechInLB[k]; - } - } else { - for (k = 0; k < FRAMESAMPLES_10ms; k++) { - inFrame[k] = (float) speechIn[k]; - } - } - - /* Add some noise to avoid denormal numbers. */ - inFrame[0] += (float)1.23455334e-3; - inFrame[1] -= (float)2.04324239e-3; - inFrame[2] += (float)1.90854954e-3; - inFrame[9] += (float)1.84854878e-3; - - /* This function will update the bottleneck if required. */ - UpdateBottleneck(instISAC); - - /* Get the bandwith information which has to be sent to the other side. */ - GetSendBandwidthInfo(instISAC, &bottleneckIdx, &jitterInfo); - - /* Encode lower-band. */ - streamLenLB = WebRtcIsac_EncodeLb(&instISAC->transform_tables, - inFrame, &instLB->ISACencLB_obj, - instISAC->codingMode, bottleneckIdx); - if (streamLenLB < 0) { - return -1; - } - - if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) { - instUB = &(instISAC->instUB); - - /* Convert to float. */ - for (k = 0; k < FRAMESAMPLES_10ms; k++) { - inFrame[k] = (float) speechInUB[k]; - } - - /* Add some noise to avoid denormal numbers. */ - inFrame[0] += (float)1.23455334e-3; - inFrame[1] -= (float)2.04324239e-3; - inFrame[2] += (float)1.90854954e-3; - inFrame[9] += (float)1.84854878e-3; - - /* Tell to upper-band the number of bytes used so far. - * This is for payload limitation. */ - instUB->ISACencUB_obj.numBytesUsed = - (int16_t)(streamLenLB + 1 + LEN_CHECK_SUM_WORD8); - /* Encode upper-band. */ - switch (instISAC->bandwidthKHz) { - case isac12kHz: { - streamLenUB = WebRtcIsac_EncodeUb12(&instISAC->transform_tables, - inFrame, &instUB->ISACencUB_obj, - jitterInfo); - break; - } - case isac16kHz: { - streamLenUB = WebRtcIsac_EncodeUb16(&instISAC->transform_tables, - inFrame, &instUB->ISACencUB_obj, - jitterInfo); - break; - } - case isac8kHz: { - streamLenUB = 0; - break; - } - } - - if ((streamLenUB < 0) && (streamLenUB != -ISAC_PAYLOAD_LARGER_THAN_LIMIT)) { - /* An error has happened but this is not the error due to a - * bit-stream larger than the limit. */ - return -1; - } - - if (streamLenLB == 0) { - return 0; - } - - /* One byte is allocated for the length. According to older decoders - so the length bit-stream plus one byte for size and - LEN_CHECK_SUM_WORD8 for the checksum should be less than or equal - to 255. */ - if ((streamLenUB > (255 - (LEN_CHECK_SUM_WORD8 + 1))) || - (streamLenUB == -ISAC_PAYLOAD_LARGER_THAN_LIMIT)) { - /* We have got a too long bit-stream we skip the upper-band - * bit-stream for this frame. */ - streamLenUB = 0; - } - - memcpy(encoded, instLB->ISACencLB_obj.bitstr_obj.stream, streamLenLB); - streamLen = streamLenLB; - if (streamLenUB > 0) { - encoded[streamLenLB] = (uint8_t)(streamLenUB + 1 + LEN_CHECK_SUM_WORD8); - memcpy(&encoded[streamLenLB + 1], - instUB->ISACencUB_obj.bitstr_obj.stream, - streamLenUB); - streamLen += encoded[streamLenLB]; - } else { - encoded[streamLenLB] = 0; - } - } else { - if (streamLenLB == 0) { - return 0; - } - memcpy(encoded, instLB->ISACencLB_obj.bitstr_obj.stream, streamLenLB); - streamLenUB = 0; - streamLen = streamLenLB; - } - - /* Add Garbage if required. */ - bottleneck = WebRtcIsac_GetUplinkBandwidth(&instISAC->bwestimator_obj); - if (instISAC->codingMode == 0) { - int minBytes; - int limit; - uint8_t* ptrGarbage; - - instISAC->MaxDelay = (double)WebRtcIsac_GetUplinkMaxDelay( - &instISAC->bwestimator_obj); - - /* Update rate model and get minimum number of bytes in this packet. */ - minBytes = WebRtcIsac_GetMinBytes( - &(instISAC->rate_data_obj), streamLen, - instISAC->instLB.ISACencLB_obj.current_framesamples, bottleneck, - instISAC->MaxDelay, instISAC->bandwidthKHz); - - /* Make sure MinBytes does not exceed packet size limit. */ - if (instISAC->bandwidthKHz == isac8kHz) { - if (instLB->ISACencLB_obj.current_framesamples == FRAMESAMPLES) { - limit = instLB->ISACencLB_obj.payloadLimitBytes30; - } else { - limit = instLB->ISACencLB_obj.payloadLimitBytes60; - } - } else { - limit = instUB->ISACencUB_obj.maxPayloadSizeBytes; - } - minBytes = (minBytes > limit) ? limit : minBytes; - - /* Make sure we don't allow more than 255 bytes of garbage data. - * We store the length of the garbage data in 8 bits in the bitstream, - * 255 is the max garbage length we can signal using 8 bits. */ - if ((instISAC->bandwidthKHz == isac8kHz) || - (streamLenUB == 0)) { - ptrGarbage = &encoded[streamLenLB]; - limit = streamLen + 255; - } else { - ptrGarbage = &encoded[streamLenLB + 1 + streamLenUB]; - limit = streamLen + (255 - encoded[streamLenLB]); - } - minBytes = (minBytes > limit) ? limit : minBytes; - - garbageLen = (minBytes > streamLen) ? (uint8_t)(minBytes - streamLen) : 0; - - /* Save data for creation of multiple bit-streams. */ - /* If bit-stream too short then add garbage at the end. */ - if (garbageLen > 0) { - /* Overwrite the garbage area to avoid leaking possibly sensitive data - over the network. This also makes the output deterministic. */ - memset(ptrGarbage, 0, garbageLen); - - /* For a correct length of the upper-band bit-stream together - * with the garbage. Garbage is embeded in upper-band bit-stream. - * That is the only way to preserve backward compatibility. */ - if ((instISAC->bandwidthKHz == isac8kHz) || - (streamLenUB == 0)) { - encoded[streamLenLB] = garbageLen; - } else { - encoded[streamLenLB] += garbageLen; - /* Write the length of the garbage at the end of the upper-band - * bit-stream, if exists. This helps for sanity check. */ - encoded[streamLenLB + 1 + streamLenUB] = garbageLen; - - } - streamLen += garbageLen; - } - } else { - /* update rate model */ - WebRtcIsac_UpdateRateModel( - &instISAC->rate_data_obj, streamLen, - instISAC->instLB.ISACencLB_obj.current_framesamples, bottleneck); - garbageLen = 0; - } - - /* Generate CRC if required. */ - if ((instISAC->bandwidthKHz != isac8kHz) && (streamLenUB > 0)) { - uint32_t crc; - - WebRtcIsac_GetCrc((int16_t*)(&(encoded[streamLenLB + 1])), - streamLenUB + garbageLen, &crc); -#ifndef WEBRTC_ARCH_BIG_ENDIAN - for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) { - encoded[streamLen - LEN_CHECK_SUM_WORD8 + k] = - (uint8_t)(crc >> (24 - k * 8)); - } -#else - memcpy(&encoded[streamLenLB + streamLenUB + 1], &crc, LEN_CHECK_SUM_WORD8); -#endif - } - return streamLen; -} - - -/****************************************************************************** - * WebRtcIsac_GetNewBitStream(...) - * - * This function returns encoded data, with the received bwe-index in the - * stream. If the rate is set to a value less than bottleneck of codec - * the new bistream will be re-encoded with the given target rate. - * It should always return a complete packet, i.e. only called once - * even for 60 msec frames. - * - * NOTE 1! This function does not write in the ISACStruct, it is not allowed. - * NOTE 2! Rates larger than the bottleneck of the codec will be limited - * to the current bottleneck. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - bweIndex : Index of bandwidth estimate to put in new - * bitstream - * - rate : target rate of the transcoder is bits/sec. - * Valid values are the accepted rate in iSAC, - * i.e. 10000 to 56000. - * - * Output: - * - encoded : The encoded data vector - * - * Return value : >0 - Length (in bytes) of coded data - * -1 - Error or called in SWB mode - * NOTE! No error code is written to - * the struct since it is only allowed to read - * the struct. - */ -int16_t WebRtcIsac_GetNewBitStream(ISACStruct* ISAC_main_inst, - int16_t bweIndex, - int16_t jitterInfo, - int32_t rate, - uint8_t* encoded, - int16_t isRCU) { - Bitstr iSACBitStreamInst; /* Local struct for bitstream handling */ - int16_t streamLenLB; - int16_t streamLenUB; - int16_t totalStreamLen; - double gain2; - double gain1; - float scale; - enum ISACBandwidth bandwidthKHz; - double rateLB; - double rateUB; - int32_t currentBN; - uint32_t crc; -#ifndef WEBRTC_ARCH_BIG_ENDIAN - int16_t k; -#endif - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - return -1; - } - - /* Get the bottleneck of this iSAC and limit the - * given rate to the current bottleneck. */ - WebRtcIsac_GetUplinkBw(ISAC_main_inst, ¤tBN); - if (rate > currentBN) { - rate = currentBN; - } - - if (WebRtcIsac_RateAllocation(rate, &rateLB, &rateUB, &bandwidthKHz) < 0) { - return -1; - } - - /* Cannot transcode from 16 kHz to 12 kHz. */ - if ((bandwidthKHz == isac12kHz) && - (instISAC->bandwidthKHz == isac16kHz)) { - return -1; - } - - /* A gain [dB] for the given rate. */ - gain1 = WebRtcIsac_GetSnr( - rateLB, instISAC->instLB.ISACencLB_obj.current_framesamples); - /* The gain [dB] of this iSAC. */ - gain2 = WebRtcIsac_GetSnr( - instISAC->instLB.ISACencLB_obj.bottleneck, - instISAC->instLB.ISACencLB_obj.current_framesamples); - - /* Scale is the ratio of two gains in normal domain. */ - scale = (float)pow(10, (gain1 - gain2) / 20.0); - /* Change the scale if this is a RCU bit-stream. */ - scale = (isRCU) ? (scale * RCU_TRANSCODING_SCALE) : scale; - - streamLenLB = WebRtcIsac_EncodeStoredDataLb( - &instISAC->instLB.ISACencLB_obj.SaveEnc_obj, - &iSACBitStreamInst, bweIndex, scale); - - if (streamLenLB < 0) { - return -1; - } - - /* Convert from bytes to int16_t. */ - memcpy(encoded, iSACBitStreamInst.stream, streamLenLB); - - if (bandwidthKHz == isac8kHz) { - return streamLenLB; - } - - totalStreamLen = streamLenLB; - /* super-wideband is always at 30ms. - * These gains are in dB. - * Gain for the given rate. */ - gain1 = WebRtcIsac_GetSnr(rateUB, FRAMESAMPLES); - /* Gain of this iSAC */ - gain2 = WebRtcIsac_GetSnr(instISAC->instUB.ISACencUB_obj.bottleneck, - FRAMESAMPLES); - - /* Scale is the ratio of two gains in normal domain. */ - scale = (float)pow(10, (gain1 - gain2) / 20.0); - - /* Change the scale if this is a RCU bit-stream. */ - scale = (isRCU)? (scale * RCU_TRANSCODING_SCALE_UB) : scale; - - streamLenUB = WebRtcIsac_EncodeStoredDataUb( - &(instISAC->instUB.ISACencUB_obj.SaveEnc_obj), - &iSACBitStreamInst, jitterInfo, scale, - instISAC->bandwidthKHz); - - if (streamLenUB < 0) { - return -1; - } - - if (streamLenUB + 1 + LEN_CHECK_SUM_WORD8 > 255) { - return streamLenLB; - } - - totalStreamLen = streamLenLB + streamLenUB + 1 + LEN_CHECK_SUM_WORD8; - encoded[streamLenLB] = streamLenUB + 1 + LEN_CHECK_SUM_WORD8; - - memcpy(&encoded[streamLenLB + 1], iSACBitStreamInst.stream, - streamLenUB); - - WebRtcIsac_GetCrc((int16_t*)(&(encoded[streamLenLB + 1])), - streamLenUB, &crc); -#ifndef WEBRTC_ARCH_BIG_ENDIAN - for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) { - encoded[totalStreamLen - LEN_CHECK_SUM_WORD8 + k] = - (uint8_t)((crc >> (24 - k * 8)) & 0xFF); - } -#else - memcpy(&encoded[streamLenLB + streamLenUB + 1], &crc, - LEN_CHECK_SUM_WORD8); -#endif - return totalStreamLen; -} - - -/**************************************************************************** - * DecoderInitLb(...) - internal function for initialization of - * Lower Band - * DecoderInitUb(...) - internal function for initialization of - * Upper Band - * WebRtcIsac_DecoderInit(...) - API function - * - * This function initializes a ISAC instance prior to the decoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - */ -static void DecoderInitLb(ISACLBStruct* instISAC) { - int i; - /* Initialize stream vector to zero. */ - for (i = 0; i < STREAM_SIZE_MAX_60; i++) { - instISAC->ISACdecLB_obj.bitstr_obj.stream[i] = 0; - } - - WebRtcIsac_InitMasking(&instISAC->ISACdecLB_obj.maskfiltstr_obj); - WebRtcIsac_InitPostFilterbank( - &instISAC->ISACdecLB_obj.postfiltbankstr_obj); - WebRtcIsac_InitPitchFilter(&instISAC->ISACdecLB_obj.pitchfiltstr_obj); -} - -static void DecoderInitUb(ISACUBStruct* instISAC) { - int i; - /* Init stream vector to zero */ - for (i = 0; i < STREAM_SIZE_MAX_60; i++) { - instISAC->ISACdecUB_obj.bitstr_obj.stream[i] = 0; - } - - WebRtcIsac_InitMasking(&instISAC->ISACdecUB_obj.maskfiltstr_obj); - WebRtcIsac_InitPostFilterbank( - &instISAC->ISACdecUB_obj.postfiltbankstr_obj); -} - -void WebRtcIsac_DecoderInit(ISACStruct* ISAC_main_inst) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - - DecoderInitLb(&instISAC->instLB); - if (instISAC->decoderSamplingRateKHz == kIsacSuperWideband) { - memset(instISAC->synthesisFBState1, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - memset(instISAC->synthesisFBState2, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - DecoderInitUb(&(instISAC->instUB)); - } - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != BIT_MASK_ENC_INIT) { - WebRtcIsac_InitBandwidthEstimator(&instISAC->bwestimator_obj, - instISAC->encoderSamplingRateKHz, - instISAC->decoderSamplingRateKHz); - } - instISAC->initFlag |= BIT_MASK_DEC_INIT; - instISAC->resetFlag_8kHz = 0; -} - - -/**************************************************************************** - * WebRtcIsac_UpdateBwEstimate(...) - * - * This function updates the estimate of the bandwidth. - * - * NOTE: - * The estimates of bandwidth is not valid if the sample rate of the far-end - * encoder is set to 48 kHz and send timestamps are increamented according to - * 48 kHz sampling rate. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - packet_size : size of the packet. - * - rtp_seq_number : the RTP number of the packet. - * - arr_ts : the arrival time of the packet (from NetEq) - * in samples. - * - * Return value : 0 - Ok - * -1 - Error - */ -int16_t WebRtcIsac_UpdateBwEstimate(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - Bitstr streamdata; -#ifndef WEBRTC_ARCH_BIG_ENDIAN - int k; -#endif - int16_t err; - - /* Check if decoder initiated. */ - if ((instISAC->initFlag & BIT_MASK_DEC_INIT) != BIT_MASK_DEC_INIT) { - instISAC->errorCode = ISAC_DECODER_NOT_INITIATED; - return -1; - } - - /* Check that the size of the packet is valid, and if not return without - * updating the bandwidth estimate. A valid size is at least 10 bytes. */ - if (packet_size < 10) { - /* Return error code if the packet length is null. */ - instISAC->errorCode = ISAC_EMPTY_PACKET; - return -1; - } - - WebRtcIsac_ResetBitstream(&(streamdata)); - -#ifndef WEBRTC_ARCH_BIG_ENDIAN - for (k = 0; k < 10; k++) { - uint16_t ek = ((const uint16_t*)encoded)[k >> 1]; - streamdata.stream[k] = (uint8_t)((ek >> ((k & 1) << 3)) & 0xff); - } -#else - memcpy(streamdata.stream, encoded, 10); -#endif - - err = WebRtcIsac_EstimateBandwidth(&instISAC->bwestimator_obj, &streamdata, - packet_size, rtp_seq_number, send_ts, - arr_ts, instISAC->encoderSamplingRateKHz, - instISAC->decoderSamplingRateKHz); - if (err < 0) { - /* Return error code if something went wrong. */ - instISAC->errorCode = -err; - return -1; - } - return 0; -} - -static int Decode(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t lenEncodedBytes, - int16_t* decoded, - int16_t* speechType, - int16_t isRCUPayload) { - /* Number of samples (480 or 960), output from decoder - that were actually used in the encoder/decoder - (determined on the fly). */ - int16_t numSamplesLB; - int16_t numSamplesUB; - int16_t speechIdx; - float outFrame[MAX_FRAMESAMPLES]; - int16_t outFrameLB[MAX_FRAMESAMPLES]; - int16_t outFrameUB[MAX_FRAMESAMPLES]; - int numDecodedBytesLBint; - size_t numDecodedBytesLB; - int numDecodedBytesUB; - size_t lenEncodedLBBytes; - int16_t validChecksum = 1; - int16_t k; - uint16_t numLayer; - size_t totSizeBytes; - int16_t err; - - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - ISACUBDecStruct* decInstUB = &(instISAC->instUB.ISACdecUB_obj); - ISACLBDecStruct* decInstLB = &(instISAC->instLB.ISACdecLB_obj); - - /* Check if decoder initiated. */ - if ((instISAC->initFlag & BIT_MASK_DEC_INIT) != - BIT_MASK_DEC_INIT) { - instISAC->errorCode = ISAC_DECODER_NOT_INITIATED; - return -1; - } - - if (lenEncodedBytes == 0) { - /* return error code if the packet length is null. */ - instISAC->errorCode = ISAC_EMPTY_PACKET; - return -1; - } - - /* The size of the encoded lower-band is bounded by - * STREAM_SIZE_MAX. If a payload with the size larger than STREAM_SIZE_MAX - * is received, it is not considered erroneous. */ - lenEncodedLBBytes = (lenEncodedBytes > STREAM_SIZE_MAX) ? - STREAM_SIZE_MAX : lenEncodedBytes; - - /* Copy to lower-band bit-stream structure. */ - memcpy(instISAC->instLB.ISACdecLB_obj.bitstr_obj.stream, encoded, - lenEncodedLBBytes); - - /* We need to initialize numSamplesLB to something; otherwise, in the test - for whether we should return -1 below, the compiler might generate code - that fools Memcheck (Valgrind) into thinking that the control flow depends - on the uninitialized value in numSamplesLB (since WebRtcIsac_DecodeLb will - not fill it in if it fails and returns -1). */ - numSamplesLB = 0; - - /* Regardless of that the current codec is setup to work in - * wideband or super-wideband, the decoding of the lower-band - * has to be performed. */ - numDecodedBytesLBint = WebRtcIsac_DecodeLb(&instISAC->transform_tables, - outFrame, decInstLB, - &numSamplesLB, isRCUPayload); - numDecodedBytesLB = (size_t)numDecodedBytesLBint; - if ((numDecodedBytesLBint < 0) || - (numDecodedBytesLB > lenEncodedLBBytes) || - (numSamplesLB > MAX_FRAMESAMPLES)) { - instISAC->errorCode = ISAC_LENGTH_MISMATCH; - return -1; - } - - /* Error Check, we accept multi-layer bit-stream This will limit number - * of iterations of the while loop. Even without this the number - * of iterations is limited. */ - numLayer = 1; - totSizeBytes = numDecodedBytesLB; - while (totSizeBytes != lenEncodedBytes) { - if ((totSizeBytes > lenEncodedBytes) || - (encoded[totSizeBytes] == 0) || - (numLayer > MAX_NUM_LAYERS)) { - instISAC->errorCode = ISAC_LENGTH_MISMATCH; - return -1; - } - totSizeBytes += encoded[totSizeBytes]; - numLayer++; - } - - if (instISAC->decoderSamplingRateKHz == kIsacWideband) { - for (k = 0; k < numSamplesLB; k++) { - if (outFrame[k] > 32767) { - decoded[k] = 32767; - } else if (outFrame[k] < -32768) { - decoded[k] = -32768; - } else { - decoded[k] = (int16_t)WebRtcIsac_lrint(outFrame[k]); - } - } - numSamplesUB = 0; - } else { - uint32_t crc; - /* We don't accept larger than 30ms (480 samples at lower-band) - * frame-size. */ - for (k = 0; k < numSamplesLB; k++) { - if (outFrame[k] > 32767) { - outFrameLB[k] = 32767; - } else if (outFrame[k] < -32768) { - outFrameLB[k] = -32768; - } else { - outFrameLB[k] = (int16_t)WebRtcIsac_lrint(outFrame[k]); - } - } - - /* Check for possible error, and if upper-band stream exists. */ - if (numDecodedBytesLB == lenEncodedBytes) { - /* Decoding was successful. No super-wideband bit-stream exists. */ - numSamplesUB = numSamplesLB; - memset(outFrameUB, 0, sizeof(int16_t) * numSamplesUB); - - /* Prepare for the potential increase of signal bandwidth. */ - instISAC->resetFlag_8kHz = 2; - } else { - /* This includes the checksum and the bytes that stores the length. */ - int16_t lenNextStream = encoded[numDecodedBytesLB]; - - /* Is this garbage or valid super-wideband bit-stream? - * Check if checksum is valid. */ - if (lenNextStream <= (LEN_CHECK_SUM_WORD8 + 1)) { - /* Such a small second layer cannot be super-wideband layer. - * It must be a short garbage. */ - validChecksum = 0; - } else { - /* Run CRC to see if the checksum match. */ - WebRtcIsac_GetCrc((int16_t*)(&encoded[numDecodedBytesLB + 1]), - lenNextStream - LEN_CHECK_SUM_WORD8 - 1, &crc); - - validChecksum = 1; - for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) { - validChecksum &= (((crc >> (24 - k * 8)) & 0xFF) == - encoded[numDecodedBytesLB + lenNextStream - - LEN_CHECK_SUM_WORD8 + k]); - } - } - - if (!validChecksum) { - /* This is a garbage, we have received a wideband - * bit-stream with garbage. */ - numSamplesUB = numSamplesLB; - memset(outFrameUB, 0, sizeof(int16_t) * numSamplesUB); - } else { - /* A valid super-wideband biststream exists. */ - enum ISACBandwidth bandwidthKHz; - int32_t maxDelayBit; - - /* If we have super-wideband bit-stream, we cannot - * have 60 ms frame-size. */ - if (numSamplesLB > FRAMESAMPLES) { - instISAC->errorCode = ISAC_LENGTH_MISMATCH; - return -1; - } - - /* The rest of the bit-stream contains the upper-band - * bit-stream curently this is the only thing there, - * however, we might add more layers. */ - - /* Have to exclude one byte where the length is stored - * and last 'LEN_CHECK_SUM_WORD8' bytes where the - * checksum is stored. */ - lenNextStream -= (LEN_CHECK_SUM_WORD8 + 1); - - memcpy(decInstUB->bitstr_obj.stream, - &encoded[numDecodedBytesLB + 1], lenNextStream); - - /* Reset bit-stream object, this is the first decoding. */ - WebRtcIsac_ResetBitstream(&(decInstUB->bitstr_obj)); - - /* Decode jitter information. */ - err = WebRtcIsac_DecodeJitterInfo(&decInstUB->bitstr_obj, &maxDelayBit); - if (err < 0) { - instISAC->errorCode = -err; - return -1; - } - - /* Update jitter info which is in the upper-band bit-stream - * only if the encoder is in super-wideband. Otherwise, - * the jitter info is already embedded in bandwidth index - * and has been updated. */ - if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) { - err = WebRtcIsac_UpdateUplinkJitter( - &(instISAC->bwestimator_obj), maxDelayBit); - if (err < 0) { - instISAC->errorCode = -err; - return -1; - } - } - - /* Decode bandwidth information. */ - err = WebRtcIsac_DecodeBandwidth(&decInstUB->bitstr_obj, - &bandwidthKHz); - if (err < 0) { - instISAC->errorCode = -err; - return -1; - } - - switch (bandwidthKHz) { - case isac12kHz: { - numDecodedBytesUB = WebRtcIsac_DecodeUb12( - &instISAC->transform_tables, outFrame, decInstUB, isRCUPayload); - - /* Hang-over for transient alleviation - - * wait two frames to add the upper band going up from 8 kHz. */ - if (instISAC->resetFlag_8kHz > 0) { - if (instISAC->resetFlag_8kHz == 2) { - /* Silence first and a half frame. */ - memset(outFrame, 0, MAX_FRAMESAMPLES * - sizeof(float)); - } else { - const float rampStep = 2.0f / MAX_FRAMESAMPLES; - float rampVal = 0; - memset(outFrame, 0, (MAX_FRAMESAMPLES >> 1) * - sizeof(float)); - - /* Ramp up second half of second frame. */ - for (k = MAX_FRAMESAMPLES / 2; k < MAX_FRAMESAMPLES; k++) { - outFrame[k] *= rampVal; - rampVal += rampStep; - } - } - instISAC->resetFlag_8kHz -= 1; - } - - break; - } - case isac16kHz: { - numDecodedBytesUB = WebRtcIsac_DecodeUb16( - &instISAC->transform_tables, outFrame, decInstUB, isRCUPayload); - break; - } - default: - return -1; - } - - if (numDecodedBytesUB < 0) { - instISAC->errorCode = numDecodedBytesUB; - return -1; - } - if (numDecodedBytesLB + numDecodedBytesUB > lenEncodedBytes) { - // We have supposedly decoded more bytes than we were given. Likely - // caused by bad input data. - instISAC->errorCode = ISAC_LENGTH_MISMATCH; - return -1; - } - - /* It might be less due to garbage. */ - if ((numDecodedBytesUB != lenNextStream) && - (numDecodedBytesLB + 1 + numDecodedBytesUB >= lenEncodedBytes || - numDecodedBytesUB != - (lenNextStream - - encoded[numDecodedBytesLB + 1 + numDecodedBytesUB]))) { - instISAC->errorCode = ISAC_LENGTH_MISMATCH; - return -1; - } - - /* If there is no error Upper-band always decodes - * 30 ms (480 samples). */ - numSamplesUB = FRAMESAMPLES; - - /* Convert to W16. */ - for (k = 0; k < numSamplesUB; k++) { - if (outFrame[k] > 32767) { - outFrameUB[k] = 32767; - } else if (outFrame[k] < -32768) { - outFrameUB[k] = -32768; - } else { - outFrameUB[k] = (int16_t)WebRtcIsac_lrint( - outFrame[k]); - } - } - } - } - - speechIdx = 0; - while (speechIdx < numSamplesLB) { - WebRtcSpl_SynthesisQMF(&outFrameLB[speechIdx], &outFrameUB[speechIdx], - FRAMESAMPLES_10ms, &decoded[(speechIdx << 1)], - instISAC->synthesisFBState1, - instISAC->synthesisFBState2); - - speechIdx += FRAMESAMPLES_10ms; - } - } - *speechType = 0; - return (numSamplesLB + numSamplesUB); -} - - - - - - - -/**************************************************************************** - * WebRtcIsac_Decode(...) - * - * This function decodes a ISAC frame. Output speech length - * will be a multiple of 480 samples: 480 or 960 samples, - * depending on the frameSize (30 or 60 ms). - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s) - * - len : bytes in encoded vector - * - * Output: - * - decoded : The decoded vector - * - * Return value : >0 - number of samples in decoded vector - * -1 - Error - */ - -int WebRtcIsac_Decode(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t lenEncodedBytes, - int16_t* decoded, - int16_t* speechType) { - int16_t isRCUPayload = 0; - return Decode(ISAC_main_inst, encoded, lenEncodedBytes, decoded, - speechType, isRCUPayload); -} - -/**************************************************************************** - * WebRtcIsac_DecodeRcu(...) - * - * This function decodes a redundant (RCU) iSAC frame. Function is called in - * NetEq with a stored RCU payload in case of packet loss. Output speech length - * will be a multiple of 480 samples: 480 or 960 samples, - * depending on the framesize (30 or 60 ms). - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC RCU frame(s) - * - len : bytes in encoded vector - * - * Output: - * - decoded : The decoded vector - * - * Return value : >0 - number of samples in decoded vector - * -1 - Error - */ - - - -int WebRtcIsac_DecodeRcu(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t lenEncodedBytes, - int16_t* decoded, - int16_t* speechType) { - int16_t isRCUPayload = 1; - return Decode(ISAC_main_inst, encoded, lenEncodedBytes, decoded, - speechType, isRCUPayload); -} - - -/**************************************************************************** - * WebRtcIsac_DecodePlc(...) - * - * This function conducts PLC for ISAC frame(s). Output speech length - * will be a multiple of 480 samples: 480 or 960 samples, - * depending on the frameSize (30 or 60 ms). - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - noOfLostFrames : Number of PLC frames to produce - * - * Output: - * - decoded : The decoded vector - * - * Return value : Number of samples in decoded PLC vector - */ -size_t WebRtcIsac_DecodePlc(ISACStruct* ISAC_main_inst, - int16_t* decoded, - size_t noOfLostFrames) { - size_t numSamples = 0; - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - - /* Limit number of frames to two = 60 millisecond. - * Otherwise we exceed data vectors. */ - if (noOfLostFrames > 2) { - noOfLostFrames = 2; - } - - /* Get the number of samples per frame */ - switch (instISAC->decoderSamplingRateKHz) { - case kIsacWideband: { - numSamples = 480 * noOfLostFrames; - break; - } - case kIsacSuperWideband: { - numSamples = 960 * noOfLostFrames; - break; - } - } - - /* Set output samples to zero. */ - memset(decoded, 0, numSamples * sizeof(int16_t)); - return numSamples; -} - - -/**************************************************************************** - * ControlLb(...) - Internal function for controlling Lower Band - * ControlUb(...) - Internal function for controlling Upper Band - * WebRtcIsac_Control(...) - API function - * - * This function sets the limit on the short-term average bit rate and the - * frame length. Should be used only in Instantaneous mode. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rate : limit on the short-term average bit rate, - * in bits/second (between 10000 and 32000) - * - frameSize : number of milliseconds per frame (30 or 60) - * - * Return value : 0 - ok - * -1 - Error - */ -static int16_t ControlLb(ISACLBStruct* instISAC, double rate, - int16_t frameSize) { - if ((rate >= 10000) && (rate <= 32000)) { - instISAC->ISACencLB_obj.bottleneck = rate; - } else { - return -ISAC_DISALLOWED_BOTTLENECK; - } - - if ((frameSize == 30) || (frameSize == 60)) { - instISAC->ISACencLB_obj.new_framelength = (FS / 1000) * frameSize; - } else { - return -ISAC_DISALLOWED_FRAME_LENGTH; - } - - return 0; -} - -static int16_t ControlUb(ISACUBStruct* instISAC, double rate) { - if ((rate >= 10000) && (rate <= 32000)) { - instISAC->ISACencUB_obj.bottleneck = rate; - } else { - return -ISAC_DISALLOWED_BOTTLENECK; - } - return 0; -} - -int16_t WebRtcIsac_Control(ISACStruct* ISAC_main_inst, - int32_t bottleneckBPS, - int frameSize) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - int16_t status; - double rateLB; - double rateUB; - enum ISACBandwidth bandwidthKHz; - - if (instISAC->codingMode == 0) { - /* In adaptive mode. */ - instISAC->errorCode = ISAC_MODE_MISMATCH; - return -1; - } - - /* Check if encoder initiated */ - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - - if (instISAC->encoderSamplingRateKHz == kIsacWideband) { - /* If the sampling rate is 16kHz then bandwith should be 8kHz, - * regardless of bottleneck. */ - bandwidthKHz = isac8kHz; - rateLB = (bottleneckBPS > 32000) ? 32000 : bottleneckBPS; - rateUB = 0; - } else { - if (WebRtcIsac_RateAllocation(bottleneckBPS, &rateLB, &rateUB, - &bandwidthKHz) < 0) { - return -1; - } - } - - if ((instISAC->encoderSamplingRateKHz == kIsacSuperWideband) && - (frameSize != 30) && - (bandwidthKHz != isac8kHz)) { - /* Cannot have 60 ms in super-wideband. */ - instISAC->errorCode = ISAC_DISALLOWED_FRAME_LENGTH; - return -1; - } - - status = ControlLb(&instISAC->instLB, rateLB, (int16_t)frameSize); - if (status < 0) { - instISAC->errorCode = -status; - return -1; - } - if (bandwidthKHz != isac8kHz) { - status = ControlUb(&(instISAC->instUB), rateUB); - if (status < 0) { - instISAC->errorCode = -status; - return -1; - } - } - - - /* Check if bandwidth is changing from wideband to super-wideband - * then we have to synch data buffer of lower & upper-band. Also - * clean up the upper-band data buffer. */ - - if ((instISAC->bandwidthKHz == isac8kHz) && (bandwidthKHz != isac8kHz)) { - memset(instISAC->instUB.ISACencUB_obj.data_buffer_float, 0, - sizeof(float) * (MAX_FRAMESAMPLES + LB_TOTAL_DELAY_SAMPLES)); - - if (bandwidthKHz == isac12kHz) { - instISAC->instUB.ISACencUB_obj.buffer_index = - instISAC->instLB.ISACencLB_obj.buffer_index; - } else { - instISAC->instUB.ISACencUB_obj.buffer_index = - LB_TOTAL_DELAY_SAMPLES + instISAC->instLB.ISACencLB_obj.buffer_index; - - memcpy(&(instISAC->instUB.ISACencUB_obj.lastLPCVec), - WebRtcIsac_kMeanLarUb16, sizeof(double) * UB_LPC_ORDER); - } - } - - /* Update the payload limit if the bandwidth is changing. */ - if (instISAC->bandwidthKHz != bandwidthKHz) { - instISAC->bandwidthKHz = bandwidthKHz; - UpdatePayloadSizeLimit(instISAC); - } - instISAC->bottleneck = bottleneckBPS; - return 0; -} - -void WebRtcIsac_SetInitialBweBottleneck(ISACStruct* ISAC_main_inst, - int bottleneck_bits_per_second) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - RTC_DCHECK_GE(bottleneck_bits_per_second, 10000); - RTC_DCHECK_LE(bottleneck_bits_per_second, 32000); - instISAC->bwestimator_obj.send_bw_avg = (float)bottleneck_bits_per_second; -} - -/**************************************************************************** - * WebRtcIsac_ControlBwe(...) - * - * This function sets the initial values of bottleneck and frame-size if - * iSAC is used in channel-adaptive mode. Through this API, users can - * enforce a frame-size for all values of bottleneck. Then iSAC will not - * automatically change the frame-size. - * - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rateBPS : initial value of bottleneck in bits/second - * 10000 <= rateBPS <= 32000 is accepted - * For default bottleneck set rateBPS = 0 - * - frameSizeMs : number of milliseconds per frame (30 or 60) - * - enforceFrameSize : 1 to enforce the given frame-size through out - * the adaptation process, 0 to let iSAC change - * the frame-size if required. - * - * Return value : 0 - ok - * -1 - Error - */ -int16_t WebRtcIsac_ControlBwe(ISACStruct* ISAC_main_inst, - int32_t bottleneckBPS, - int frameSizeMs, - int16_t enforceFrameSize) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - enum ISACBandwidth bandwidth; - - /* Check if encoder initiated */ - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - - /* Check that we are in channel-adaptive mode, otherwise, return (-1) */ - if (instISAC->codingMode != 0) { - instISAC->errorCode = ISAC_MODE_MISMATCH; - return -1; - } - if ((frameSizeMs != 30) && - (instISAC->encoderSamplingRateKHz == kIsacSuperWideband)) { - return -1; - } - - /* Set structure variable if enforceFrameSize is set. ISAC will then - * keep the chosen frame size. */ - if (enforceFrameSize != 0) { - instISAC->instLB.ISACencLB_obj.enforceFrameSize = 1; - } else { - instISAC->instLB.ISACencLB_obj.enforceFrameSize = 0; - } - - /* Set the initial rate. If the input value is zero then the default intial - * rate is used. Otehrwise, values between 10 to 32 kbps are accepted. */ - if (bottleneckBPS != 0) { - double rateLB; - double rateUB; - if (WebRtcIsac_RateAllocation(bottleneckBPS, &rateLB, &rateUB, - &bandwidth) < 0) { - return -1; - } - instISAC->bwestimator_obj.send_bw_avg = (float)bottleneckBPS; - instISAC->bandwidthKHz = bandwidth; - } - - /* Set the initial frame-size. If 'enforceFrameSize' is set, the frame-size - * will not change */ - if (frameSizeMs != 0) { - if ((frameSizeMs == 30) || (frameSizeMs == 60)) { - instISAC->instLB.ISACencLB_obj.new_framelength = - (int16_t)((FS / 1000) * frameSizeMs); - } else { - instISAC->errorCode = ISAC_DISALLOWED_FRAME_LENGTH; - return -1; - } - } - return 0; -} - - -/**************************************************************************** - * WebRtcIsac_GetDownLinkBwIndex(...) - * - * This function returns index representing the Bandwidth estimate from - * the other side to this side. - * - * Input: - * - ISAC_main_inst : iSAC structure - * - * Output: - * - bweIndex : Bandwidth estimate to transmit to other side. - * - */ -int16_t WebRtcIsac_GetDownLinkBwIndex(ISACStruct* ISAC_main_inst, - int16_t* bweIndex, - int16_t* jitterInfo) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - - /* Check if encoder initialized. */ - if ((instISAC->initFlag & BIT_MASK_DEC_INIT) != - BIT_MASK_DEC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - - /* Call function to get Bandwidth Estimate. */ - WebRtcIsac_GetDownlinkBwJitIndexImpl(&(instISAC->bwestimator_obj), bweIndex, - jitterInfo, - instISAC->decoderSamplingRateKHz); - return 0; -} - - -/**************************************************************************** - * WebRtcIsac_UpdateUplinkBw(...) - * - * This function takes an index representing the Bandwidth estimate from - * this side to other side and updates BWE. - * - * Input: - * - ISAC_main_inst : iSAC structure - * - rateIndex : Bandwidth estimate from other side. - * - * Return value : 0 - ok - * -1 - index out of range - */ -int16_t WebRtcIsac_UpdateUplinkBw(ISACStruct* ISAC_main_inst, - int16_t bweIndex) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - int16_t returnVal; - - /* Check if encoder initiated. */ - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - - /* Call function to get Bandwidth Estimate. */ - returnVal = WebRtcIsac_UpdateUplinkBwImpl( - &(instISAC->bwestimator_obj), bweIndex, - instISAC->encoderSamplingRateKHz); - - if (returnVal < 0) { - instISAC->errorCode = -returnVal; - return -1; - } else { - return 0; - } -} - - -/**************************************************************************** - * WebRtcIsac_ReadBwIndex(...) - * - * This function returns the index of the Bandwidth estimate from the - * bit-stream. - * - * Input: - * - encoded : Encoded bit-stream - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - bweIndex : Bandwidth estimate in bit-stream - * - */ -int16_t WebRtcIsac_ReadBwIndex(const uint8_t* encoded, - int16_t* bweIndex) { - Bitstr streamdata; -#ifndef WEBRTC_ARCH_BIG_ENDIAN - int k; -#endif - int16_t err; - - WebRtcIsac_ResetBitstream(&(streamdata)); - -#ifndef WEBRTC_ARCH_BIG_ENDIAN - for (k = 0; k < 10; k++) { - int16_t ek2 = ((const int16_t*)encoded)[k >> 1]; - streamdata.stream[k] = (uint8_t)((ek2 >> ((k & 1) << 3)) & 0xff); - } -#else - memcpy(streamdata.stream, encoded, 10); -#endif - - /* Decode frame length. */ - err = WebRtcIsac_DecodeFrameLen(&streamdata, bweIndex); - if (err < 0) { - return err; - } - - /* Decode BW estimation. */ - err = WebRtcIsac_DecodeSendBW(&streamdata, bweIndex); - if (err < 0) { - return err; - } - - return 0; -} - - -/**************************************************************************** - * WebRtcIsac_ReadFrameLen(...) - * - * This function returns the number of samples the decoder will generate if - * the given payload is decoded. - * - * Input: - * - encoded : Encoded bitstream - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - */ -int16_t WebRtcIsac_ReadFrameLen(const ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - int16_t* frameLength) { - Bitstr streamdata; -#ifndef WEBRTC_ARCH_BIG_ENDIAN - int k; -#endif - int16_t err; - ISACMainStruct* instISAC; - - WebRtcIsac_ResetBitstream(&(streamdata)); - -#ifndef WEBRTC_ARCH_BIG_ENDIAN - for (k = 0; k < 10; k++) { - int16_t ek2 = ((const int16_t*)encoded)[k >> 1]; - streamdata.stream[k] = (uint8_t)((ek2 >> ((k & 1) << 3)) & 0xff); - } -#else - memcpy(streamdata.stream, encoded, 10); -#endif - - /* Decode frame length. */ - err = WebRtcIsac_DecodeFrameLen(&streamdata, frameLength); - if (err < 0) { - return -1; - } - instISAC = (ISACMainStruct*)ISAC_main_inst; - - if (instISAC->decoderSamplingRateKHz == kIsacSuperWideband) { - /* The decoded frame length indicates the number of samples in - * lower-band in this case, multiply by 2 to get the total number - * of samples. */ - *frameLength <<= 1; - } - return 0; -} - - -/******************************************************************************* - * WebRtcIsac_GetNewFrameLen(...) - * - * This function returns the frame length (in samples) of the next packet. - * In the case of channel-adaptive mode, iSAC decides on its frame length based - * on the estimated bottleneck, this AOI allows a user to prepare for the next - * packet (at the encoder). - * - * The primary usage is in CE to make the iSAC works in channel-adaptive mode - * - * Input: - * - ISAC_main_inst : iSAC struct - * - * Return Value : frame lenght in samples - * - */ -int16_t WebRtcIsac_GetNewFrameLen(ISACStruct* ISAC_main_inst) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - - /* Return new frame length. */ - if (instISAC->in_sample_rate_hz == 16000) - return (instISAC->instLB.ISACencLB_obj.new_framelength); - else /* 32000 Hz */ - return ((instISAC->instLB.ISACencLB_obj.new_framelength) * 2); -} - - -/**************************************************************************** - * WebRtcIsac_GetErrorCode(...) - * - * This function can be used to check the error code of an iSAC instance. - * When a function returns -1 an error code will be set for that instance. - * The function below extracts the code of the last error that occurred in - * the specified instance. - * - * Input: - * - ISAC_main_inst : ISAC instance - * - * Return value : Error code - */ -int16_t WebRtcIsac_GetErrorCode(ISACStruct* ISAC_main_inst) { - return ((ISACMainStruct*)ISAC_main_inst)->errorCode; -} - - -/**************************************************************************** - * WebRtcIsac_GetUplinkBw(...) - * - * This function outputs the target bottleneck of the codec. In - * channel-adaptive mode, the target bottleneck is specified through an in-band - * signalling retrieved by bandwidth estimator. - * In channel-independent, also called instantaneous mode, the target - * bottleneck is provided to the encoder by calling xxx_control(...) (if - * xxx_control is never called, the default values are used.). - * Note that the output is the iSAC internal operating bottleneck which might - * differ slightly from the one provided through xxx_control(). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Output: - * - *bottleneck : bottleneck in bits/sec - * - * Return value : -1 if error happens - * 0 bit-rates computed correctly. - */ -int16_t WebRtcIsac_GetUplinkBw(ISACStruct* ISAC_main_inst, - int32_t* bottleneck) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - - if (instISAC->codingMode == 0) { - /* We are in adaptive mode then get the bottleneck from BWE. */ - *bottleneck = (int32_t)instISAC->bwestimator_obj.send_bw_avg; - } else { - *bottleneck = instISAC->bottleneck; - } - - if ((*bottleneck > 32000) && (*bottleneck < 38000)) { - *bottleneck = 32000; - } else if ((*bottleneck > 45000) && (*bottleneck < 50000)) { - *bottleneck = 45000; - } else if (*bottleneck > 56000) { - *bottleneck = 56000; - } - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_SetMaxPayloadSize(...) - * - * This function sets a limit for the maximum payload size of iSAC. The same - * value is used both for 30 and 60 ms packets. If the encoder sampling rate - * is 16 kHz the maximum payload size is between 120 and 400 bytes. If the - * encoder sampling rate is 32 kHz the maximum payload size is between 120 - * and 600 bytes. - * - * --------------- - * IMPORTANT NOTES - * --------------- - * The size of a packet is limited to the minimum of 'max-payload-size' and - * 'max-rate.' For instance, let's assume the max-payload-size is set to - * 170 bytes, and max-rate is set to 40 kbps. Note that a limit of 40 kbps - * translates to 150 bytes for 30ms frame-size & 300 bytes for 60ms - * frame-size. Then a packet with a frame-size of 30 ms is limited to 150, - * i.e. min(170, 150), and a packet with 60 ms frame-size is limited to - * 170 bytes, i.e. min(170, 300). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxPayloadBytes : maximum size of the payload in bytes - * valid values are between 100 and 400 bytes - * if encoder sampling rate is 16 kHz. For - * 32 kHz encoder sampling rate valid values - * are between 100 and 600 bytes. - * - * Return value : 0 if successful - * -1 if error happens - */ -int16_t WebRtcIsac_SetMaxPayloadSize(ISACStruct* ISAC_main_inst, - int16_t maxPayloadBytes) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - int16_t status = 0; - - /* Check if encoder initiated */ - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - - if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) { - /* Sanity check. */ - if (maxPayloadBytes < 120) { - /* 'maxRate' is out of valid range - * set to the acceptable value and return -1. */ - maxPayloadBytes = 120; - status = -1; - } - - /* sanity check */ - if (maxPayloadBytes > STREAM_SIZE_MAX) { - /* maxRate is out of valid range, - * set to the acceptable value and return -1. */ - maxPayloadBytes = STREAM_SIZE_MAX; - status = -1; - } - } else { - if (maxPayloadBytes < 120) { - /* Max payload-size is out of valid range - * set to the acceptable value and return -1. */ - maxPayloadBytes = 120; - status = -1; - } - if (maxPayloadBytes > STREAM_SIZE_MAX_60) { - /* Max payload-size is out of valid range - * set to the acceptable value and return -1. */ - maxPayloadBytes = STREAM_SIZE_MAX_60; - status = -1; - } - } - instISAC->maxPayloadSizeBytes = maxPayloadBytes; - UpdatePayloadSizeLimit(instISAC); - return status; -} - - -/****************************************************************************** - * WebRtcIsac_SetMaxRate(...) - * - * This function sets the maximum rate which the codec may not exceed for - * any signal packet. The maximum rate is defined and payload-size per - * frame-size in bits per second. - * - * The codec has a maximum rate of 53400 bits per second (200 bytes per 30 - * ms) if the encoder sampling rate is 16kHz, and 160 kbps (600 bytes/30 ms) - * if the encoder sampling rate is 32 kHz. - * - * It is possible to set a maximum rate between 32000 and 53400 bits/sec - * in wideband mode, and 32000 to 160000 bits/sec in super-wideband mode. - * - * --------------- - * IMPORTANT NOTES - * --------------- - * The size of a packet is limited to the minimum of 'max-payload-size' and - * 'max-rate.' For instance, let's assume the max-payload-size is set to - * 170 bytes, and max-rate is set to 40 kbps. Note that a limit of 40 kbps - * translates to 150 bytes for 30ms frame-size & 300 bytes for 60ms - * frame-size. Then a packet with a frame-size of 30 ms is limited to 150, - * i.e. min(170, 150), and a packet with 60 ms frame-size is limited to - * 170 bytes, min(170, 300). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxRate : maximum rate in bits per second, - * valid values are 32000 to 53400 bits/sec in - * wideband mode, and 32000 to 160000 bits/sec in - * super-wideband mode. - * - * Return value : 0 if successful - * -1 if error happens - */ -int16_t WebRtcIsac_SetMaxRate(ISACStruct* ISAC_main_inst, - int32_t maxRate) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - int16_t maxRateInBytesPer30Ms; - int16_t status = 0; - - /* check if encoder initiated */ - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - /* Calculate maximum number of bytes per 30 msec packets for the - given maximum rate. Multiply with 30/1000 to get number of - bits per 30 ms, divide by 8 to get number of bytes per 30 ms: - maxRateInBytes = floor((maxRate * 30/1000) / 8); */ - maxRateInBytesPer30Ms = (int16_t)(maxRate * 3 / 800); - - if (instISAC->encoderSamplingRateKHz == kIsacWideband) { - if (maxRate < 32000) { - /* 'maxRate' is out of valid range. - * Set to the acceptable value and return -1. */ - maxRateInBytesPer30Ms = 120; - status = -1; - } - - if (maxRate > 53400) { - /* 'maxRate' is out of valid range. - * Set to the acceptable value and return -1. */ - maxRateInBytesPer30Ms = 200; - status = -1; - } - } else { - if (maxRateInBytesPer30Ms < 120) { - /* 'maxRate' is out of valid range - * Set to the acceptable value and return -1. */ - maxRateInBytesPer30Ms = 120; - status = -1; - } - - if (maxRateInBytesPer30Ms > STREAM_SIZE_MAX) { - /* 'maxRate' is out of valid range. - * Set to the acceptable value and return -1. */ - maxRateInBytesPer30Ms = STREAM_SIZE_MAX; - status = -1; - } - } - instISAC->maxRateBytesPer30Ms = maxRateInBytesPer30Ms; - UpdatePayloadSizeLimit(instISAC); - return status; -} - - -/**************************************************************************** - * WebRtcIsac_GetRedPayload(...) - * - * This function populates "encoded" with the redundant payload of the recently - * encodedframe. This function has to be called once that WebRtcIsac_Encode(...) - * returns a positive value. Regardless of the frame-size this function will - * be called only once after encoding is completed. The bit-stream is - * targeted for 16000 bit/sec. - * - * Input: - * - ISAC_main_inst : iSAC struct - * - * Output: - * - encoded : the encoded data vector - * - * - * Return value : >0 - Length (in bytes) of coded data - * : -1 - Error - */ -int16_t WebRtcIsac_GetRedPayload(ISACStruct* ISAC_main_inst, - uint8_t* encoded) { - Bitstr iSACBitStreamInst; - int16_t streamLenLB; - int16_t streamLenUB; - int16_t streamLen; - int16_t totalLenUB; - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; -#ifndef WEBRTC_ARCH_BIG_ENDIAN - int k; -#endif - - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - } - - WebRtcIsac_ResetBitstream(&(iSACBitStreamInst)); - - streamLenLB = WebRtcIsac_EncodeStoredDataLb( - &instISAC->instLB.ISACencLB_obj.SaveEnc_obj, - &iSACBitStreamInst, - instISAC->instLB.ISACencLB_obj.lastBWIdx, - RCU_TRANSCODING_SCALE); - if (streamLenLB < 0) { - return -1; - } - - /* convert from bytes to int16_t. */ - memcpy(encoded, iSACBitStreamInst.stream, streamLenLB); - streamLen = streamLenLB; - if (instISAC->bandwidthKHz == isac8kHz) { - return streamLenLB; - } - - streamLenUB = WebRtcIsac_GetRedPayloadUb( - &instISAC->instUB.ISACencUB_obj.SaveEnc_obj, - &iSACBitStreamInst, instISAC->bandwidthKHz); - if (streamLenUB < 0) { - /* An error has happened but this is not the error due to a - * bit-stream larger than the limit. */ - return -1; - } - - /* We have one byte to write the total length of the upper-band. - * The length includes the bit-stream length, check-sum and the - * single byte where the length is written to. This is according to - * iSAC wideband and how the "garbage" is dealt. */ - totalLenUB = streamLenUB + 1 + LEN_CHECK_SUM_WORD8; - if (totalLenUB > 255) { - streamLenUB = 0; - } - - /* Generate CRC if required. */ - if ((instISAC->bandwidthKHz != isac8kHz) && - (streamLenUB > 0)) { - uint32_t crc; - streamLen += totalLenUB; - encoded[streamLenLB] = (uint8_t)totalLenUB; - memcpy(&encoded[streamLenLB + 1], iSACBitStreamInst.stream, - streamLenUB); - - WebRtcIsac_GetCrc((int16_t*)(&(encoded[streamLenLB + 1])), - streamLenUB, &crc); -#ifndef WEBRTC_ARCH_BIG_ENDIAN - for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) { - encoded[streamLen - LEN_CHECK_SUM_WORD8 + k] = - (uint8_t)((crc >> (24 - k * 8)) & 0xFF); - } -#else - memcpy(&encoded[streamLenLB + streamLenUB + 1], &crc, - LEN_CHECK_SUM_WORD8); -#endif - } - return streamLen; -} - - -/**************************************************************************** - * WebRtcIsac_version(...) - * - * This function returns the version number. - * - * Output: - * - version : Pointer to character string - * - */ -void WebRtcIsac_version(char* version) { - strcpy(version, "4.3.0"); -} - - -/****************************************************************************** - * WebRtcIsac_SetEncSampRate() - * This function sets the sampling rate of the encoder. Initialization of the - * encoder WILL NOT overwrite the sampling rate of the encoder. The default - * value is 16 kHz which is set when the instance is created. The encoding-mode - * and the bottleneck remain unchanged by this call, however, the maximum rate - * and maximum payload-size will be reset to their default values. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - sample_rate_hz : sampling rate in Hertz, valid values are 16000 - * and 32000. - * - * Return value : 0 if successful - * -1 if failed. - */ -int16_t WebRtcIsac_SetEncSampRate(ISACStruct* ISAC_main_inst, - uint16_t sample_rate_hz) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - enum IsacSamplingRate encoder_operational_rate; - - if ((sample_rate_hz != 16000) && (sample_rate_hz != 32000)) { - /* Sampling Frequency is not supported. */ - instISAC->errorCode = ISAC_UNSUPPORTED_SAMPLING_FREQUENCY; - return -1; - } - if (sample_rate_hz == 16000) { - encoder_operational_rate = kIsacWideband; - } else { - encoder_operational_rate = kIsacSuperWideband; - } - - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - if (encoder_operational_rate == kIsacWideband) { - instISAC->bandwidthKHz = isac8kHz; - } else { - instISAC->bandwidthKHz = isac16kHz; - } - } else { - ISACUBStruct* instUB = &(instISAC->instUB); - ISACLBStruct* instLB = &(instISAC->instLB); - int32_t bottleneck = instISAC->bottleneck; - int16_t codingMode = instISAC->codingMode; - int16_t frameSizeMs = instLB->ISACencLB_obj.new_framelength / - (FS / 1000); - - if ((encoder_operational_rate == kIsacWideband) && - (instISAC->encoderSamplingRateKHz == kIsacSuperWideband)) { - /* Changing from super-wideband to wideband. - * we don't need to re-initialize the encoder of the lower-band. */ - instISAC->bandwidthKHz = isac8kHz; - if (codingMode == 1) { - ControlLb(instLB, - (bottleneck > 32000) ? 32000 : bottleneck, FRAMESIZE); - } - instISAC->maxPayloadSizeBytes = STREAM_SIZE_MAX_60; - instISAC->maxRateBytesPer30Ms = STREAM_SIZE_MAX_30; - } else if ((encoder_operational_rate == kIsacSuperWideband) && - (instISAC->encoderSamplingRateKHz == kIsacWideband)) { - double bottleneckLB = 0; - double bottleneckUB = 0; - if (codingMode == 1) { - WebRtcIsac_RateAllocation(bottleneck, &bottleneckLB, &bottleneckUB, - &(instISAC->bandwidthKHz)); - } - - instISAC->bandwidthKHz = isac16kHz; - instISAC->maxPayloadSizeBytes = STREAM_SIZE_MAX; - instISAC->maxRateBytesPer30Ms = STREAM_SIZE_MAX; - - EncoderInitLb(instLB, codingMode, encoder_operational_rate); - EncoderInitUb(instUB, instISAC->bandwidthKHz); - - memset(instISAC->analysisFBState1, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - memset(instISAC->analysisFBState2, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - - if (codingMode == 1) { - instISAC->bottleneck = bottleneck; - ControlLb(instLB, bottleneckLB, - (instISAC->bandwidthKHz == isac8kHz) ? frameSizeMs:FRAMESIZE); - if (instISAC->bandwidthKHz > isac8kHz) { - ControlUb(instUB, bottleneckUB); - } - } else { - instLB->ISACencLB_obj.enforceFrameSize = 0; - instLB->ISACencLB_obj.new_framelength = FRAMESAMPLES; - } - } - } - instISAC->encoderSamplingRateKHz = encoder_operational_rate; - instISAC->in_sample_rate_hz = sample_rate_hz; - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_SetDecSampRate() - * This function sets the sampling rate of the decoder. Initialization of the - * decoder WILL NOT overwrite the sampling rate of the encoder. The default - * value is 16 kHz which is set when the instance is created. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - sample_rate_hz : sampling rate in Hertz, valid values are 16000 - * and 32000. - * - * Return value : 0 if successful - * -1 if failed. - */ -int16_t WebRtcIsac_SetDecSampRate(ISACStruct* ISAC_main_inst, - uint16_t sample_rate_hz) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - enum IsacSamplingRate decoder_operational_rate; - - if (sample_rate_hz == 16000) { - decoder_operational_rate = kIsacWideband; - } else if (sample_rate_hz == 32000) { - decoder_operational_rate = kIsacSuperWideband; - } else { - /* Sampling Frequency is not supported. */ - instISAC->errorCode = ISAC_UNSUPPORTED_SAMPLING_FREQUENCY; - return -1; - } - - if ((instISAC->decoderSamplingRateKHz == kIsacWideband) && - (decoder_operational_rate == kIsacSuperWideband)) { - /* Switching from wideband to super-wideband at the decoder - * we need to reset the filter-bank and initialize upper-band decoder. */ - memset(instISAC->synthesisFBState1, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - memset(instISAC->synthesisFBState2, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - - DecoderInitUb(&instISAC->instUB); - } - instISAC->decoderSamplingRateKHz = decoder_operational_rate; - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_EncSampRate() - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : sampling rate in Hertz. The input to encoder - * is expected to be sampled in this rate. - * - */ -uint16_t WebRtcIsac_EncSampRate(ISACStruct* ISAC_main_inst) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - return instISAC->in_sample_rate_hz; -} - - -/****************************************************************************** - * WebRtcIsac_DecSampRate() - * Return the sampling rate of the decoded audio. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : sampling rate in Hertz. Decoder output is - * sampled at this rate. - * - */ -uint16_t WebRtcIsac_DecSampRate(ISACStruct* ISAC_main_inst) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - return instISAC->decoderSamplingRateKHz == kIsacWideband ? 16000 : 32000; -} - -void WebRtcIsac_SetEncSampRateInDecoder(ISACStruct* inst, - int sample_rate_hz) { - ISACMainStruct* instISAC = (ISACMainStruct*)inst; - RTC_DCHECK_NE(0, instISAC->initFlag & BIT_MASK_DEC_INIT); - RTC_DCHECK(!(instISAC->initFlag & BIT_MASK_ENC_INIT)); - RTC_DCHECK(sample_rate_hz == 16000 || sample_rate_hz == 32000); - instISAC->encoderSamplingRateKHz = sample_rate_hz / 1000; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/isac_float_type.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/isac_float_type.h deleted file mode 100644 index 511bc97ee6e3..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/isac_float_type.h +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ISAC_FLOAT_TYPE_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ISAC_FLOAT_TYPE_H_ - -#include "modules/audio_coding/codecs/isac/main/include/isac.h" - -namespace webrtc { - -struct IsacFloat { - using instance_type = ISACStruct; - static const bool has_swb = true; - static inline int16_t Control(instance_type* inst, - int32_t rate, - int framesize) { - return WebRtcIsac_Control(inst, rate, framesize); - } - static inline int16_t ControlBwe(instance_type* inst, - int32_t rate_bps, - int frame_size_ms, - int16_t enforce_frame_size) { - return WebRtcIsac_ControlBwe(inst, rate_bps, frame_size_ms, - enforce_frame_size); - } - static inline int16_t Create(instance_type** inst) { - return WebRtcIsac_Create(inst); - } - static inline int DecodeInternal(instance_type* inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speech_type) { - return WebRtcIsac_Decode(inst, encoded, len, decoded, speech_type); - } - static inline size_t DecodePlc(instance_type* inst, - int16_t* decoded, - size_t num_lost_frames) { - return WebRtcIsac_DecodePlc(inst, decoded, num_lost_frames); - } - - static inline void DecoderInit(instance_type* inst) { - WebRtcIsac_DecoderInit(inst); - } - static inline int Encode(instance_type* inst, - const int16_t* speech_in, - uint8_t* encoded) { - return WebRtcIsac_Encode(inst, speech_in, encoded); - } - static inline int16_t EncoderInit(instance_type* inst, int16_t coding_mode) { - return WebRtcIsac_EncoderInit(inst, coding_mode); - } - static inline uint16_t EncSampRate(instance_type* inst) { - return WebRtcIsac_EncSampRate(inst); - } - - static inline int16_t Free(instance_type* inst) { - return WebRtcIsac_Free(inst); - } - static inline int16_t GetErrorCode(instance_type* inst) { - return WebRtcIsac_GetErrorCode(inst); - } - - static inline int16_t GetNewFrameLen(instance_type* inst) { - return WebRtcIsac_GetNewFrameLen(inst); - } - static inline int16_t SetDecSampRate(instance_type* inst, - uint16_t sample_rate_hz) { - return WebRtcIsac_SetDecSampRate(inst, sample_rate_hz); - } - static inline int16_t SetEncSampRate(instance_type* inst, - uint16_t sample_rate_hz) { - return WebRtcIsac_SetEncSampRate(inst, sample_rate_hz); - } - static inline void SetEncSampRateInDecoder(instance_type* inst, - uint16_t sample_rate_hz) { - WebRtcIsac_SetEncSampRateInDecoder(inst, sample_rate_hz); - } - static inline void SetInitialBweBottleneck(instance_type* inst, - int bottleneck_bits_per_second) { - WebRtcIsac_SetInitialBweBottleneck(inst, bottleneck_bits_per_second); - } - static inline int16_t SetMaxPayloadSize(instance_type* inst, - int16_t max_payload_size_bytes) { - return WebRtcIsac_SetMaxPayloadSize(inst, max_payload_size_bytes); - } - static inline int16_t SetMaxRate(instance_type* inst, int32_t max_bit_rate) { - return WebRtcIsac_SetMaxRate(inst, max_bit_rate); - } -}; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ISAC_FLOAT_TYPE_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/isac_unittest.cc b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/isac_unittest.cc deleted file mode 100644 index c98b21d86fd9..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/isac_unittest.cc +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/audio_coding/codecs/isac/main/include/isac.h" - -#include - -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -struct WebRtcISACStruct; - -namespace webrtc { - -// Number of samples in a 60 ms, sampled at 32 kHz. -const int kIsacNumberOfSamples = 320 * 6; -// Maximum number of bytes in output bitstream. -const size_t kMaxBytes = 1000; - -class IsacTest : public ::testing::Test { - protected: - IsacTest(); - virtual void SetUp(); - - WebRtcISACStruct* isac_codec_; - - int16_t speech_data_[kIsacNumberOfSamples]; - int16_t output_data_[kIsacNumberOfSamples]; - uint8_t bitstream_[kMaxBytes]; - uint8_t bitstream_small_[7]; // Simulate sync packets. -}; - -IsacTest::IsacTest() : isac_codec_(NULL) {} - -void IsacTest::SetUp() { - // Read some samples from a speech file, to be used in the encode test. - FILE* input_file; - const std::string file_name = - webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"); - input_file = fopen(file_name.c_str(), "rb"); - ASSERT_TRUE(input_file != NULL); - ASSERT_EQ(kIsacNumberOfSamples, - static_cast(fread(speech_data_, sizeof(int16_t), - kIsacNumberOfSamples, input_file))); - fclose(input_file); - input_file = NULL; -} - -// Test failing Create. -TEST_F(IsacTest, IsacCreateFail) { - // Test to see that an invalid pointer is caught. - EXPECT_EQ(-1, WebRtcIsac_Create(NULL)); -} - -// Test failing Free. -TEST_F(IsacTest, IsacFreeFail) { - // Test to see that free function doesn't crash. - EXPECT_EQ(0, WebRtcIsac_Free(NULL)); -} - -// Test normal Create and Free. -TEST_F(IsacTest, IsacCreateFree) { - EXPECT_EQ(0, WebRtcIsac_Create(&isac_codec_)); - EXPECT_TRUE(isac_codec_ != NULL); - EXPECT_EQ(0, WebRtcIsac_Free(isac_codec_)); -} - -TEST_F(IsacTest, IsacUpdateBWE) { - // Create encoder memory. - EXPECT_EQ(0, WebRtcIsac_Create(&isac_codec_)); - - // Init encoder (adaptive mode) and decoder. - WebRtcIsac_EncoderInit(isac_codec_, 0); - WebRtcIsac_DecoderInit(isac_codec_); - - int encoded_bytes; - - // Test with call with a small packet (sync packet). - EXPECT_EQ(-1, WebRtcIsac_UpdateBwEstimate(isac_codec_, bitstream_small_, 7, 1, - 12345, 56789)); - - // Encode 60 ms of data (needed to create a first packet). - encoded_bytes = WebRtcIsac_Encode(isac_codec_, speech_data_, bitstream_); - EXPECT_EQ(0, encoded_bytes); - encoded_bytes = WebRtcIsac_Encode(isac_codec_, speech_data_, bitstream_); - EXPECT_EQ(0, encoded_bytes); - encoded_bytes = WebRtcIsac_Encode(isac_codec_, speech_data_, bitstream_); - EXPECT_EQ(0, encoded_bytes); - encoded_bytes = WebRtcIsac_Encode(isac_codec_, speech_data_, bitstream_); - EXPECT_EQ(0, encoded_bytes); - encoded_bytes = WebRtcIsac_Encode(isac_codec_, speech_data_, bitstream_); - EXPECT_EQ(0, encoded_bytes); - encoded_bytes = WebRtcIsac_Encode(isac_codec_, speech_data_, bitstream_); - EXPECT_GT(encoded_bytes, 0); - - // Call to update bandwidth estimator with real data. - EXPECT_EQ(0, WebRtcIsac_UpdateBwEstimate(isac_codec_, bitstream_, - static_cast(encoded_bytes), - 1, 12345, 56789)); - - // Free memory. - EXPECT_EQ(0, WebRtcIsac_Free(isac_codec_)); -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lattice.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lattice.c deleted file mode 100644 index d9d2d656659f..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lattice.c +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lattice.c - * - * contains the normalized lattice filter routines (MA and AR) for iSAC codec - * - */ - -#include -#include -#include -#ifdef WEBRTC_ANDROID -#include -#endif - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" - -/* filter the signal using normalized lattice filter */ -/* MA filter */ -void WebRtcIsac_NormLatticeFilterMa(int orderCoef, - float *stateF, - float *stateG, - float *lat_in, - double *filtcoeflo, - double *lat_out) -{ - int n,k,i,u,temp1; - int ord_1 = orderCoef+1; - float sth[MAX_AR_MODEL_ORDER]; - float cth[MAX_AR_MODEL_ORDER]; - float inv_cth[MAX_AR_MODEL_ORDER]; - double a[MAX_AR_MODEL_ORDER+1]; - float f[MAX_AR_MODEL_ORDER+1][HALF_SUBFRAMELEN], g[MAX_AR_MODEL_ORDER+1][HALF_SUBFRAMELEN]; - float gain1; - - for (u=0;u=0;i--) //get the state of f&g for the first input, for all orders - { - ARf[i][0] = cth[i]*ARf[i+1][0] - sth[i]*stateG[i]; - ARg[i+1][0] = sth[i]*ARf[i+1][0] + cth[i]* stateG[i]; - } - ARg[0][0] = ARf[0][0]; - - for(n=0;n<(HALF_SUBFRAMELEN-1);n++) - { - for(k=orderCoef-1;k>=0;k--) - { - ARf[k][n+1] = cth[k]*ARf[k+1][n+1] - sth[k]*ARg[k][n]; - ARg[k+1][n+1] = sth[k]*ARf[k+1][n+1] + cth[k]* ARg[k][n]; - } - ARg[0][n+1] = ARf[0][n+1]; - } - - memcpy(lat_out+u * HALF_SUBFRAMELEN, &(ARf[0][0]), sizeof(float) * HALF_SUBFRAMELEN); - - /* cannot use memcpy in the following */ - for (i=0;i0; m--) - { - tmp_inv = 1.0f / cth2; - for (k=1; k<=m; k++) - { - tmp[k] = ((float)a[k] - sth[m] * (float)a[m-k+1]) * tmp_inv; - } - - for (k=1; k -#include - -#include "modules/audio_coding/codecs/isac/main/source/lpc_analysis.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/main/source/filter_functions.h" -#include "modules/audio_coding/codecs/isac/main/source/isac_vad.h" - -/* window */ -/* Matlab generation code: - * t = (1:256)/257; r = 1-(1-t).^.45; w = sin(r*pi).^3; w = w/sum(w); plot((1:256)/8, w); grid; - * for k=1:16, fprintf(1, '%.8f, ', w(k*16 + (-15:0))); fprintf(1, '\n'); end - */ -static const double kLpcCorrWindow[WINLEN] = { - 0.00000000, 0.00000001, 0.00000004, 0.00000010, 0.00000020, - 0.00000035, 0.00000055, 0.00000083, 0.00000118, 0.00000163, - 0.00000218, 0.00000283, 0.00000361, 0.00000453, 0.00000558, 0.00000679, - 0.00000817, 0.00000973, 0.00001147, 0.00001342, 0.00001558, - 0.00001796, 0.00002058, 0.00002344, 0.00002657, 0.00002997, - 0.00003365, 0.00003762, 0.00004190, 0.00004651, 0.00005144, 0.00005673, - 0.00006236, 0.00006837, 0.00007476, 0.00008155, 0.00008875, - 0.00009636, 0.00010441, 0.00011290, 0.00012186, 0.00013128, - 0.00014119, 0.00015160, 0.00016252, 0.00017396, 0.00018594, 0.00019846, - 0.00021155, 0.00022521, 0.00023946, 0.00025432, 0.00026978, - 0.00028587, 0.00030260, 0.00031998, 0.00033802, 0.00035674, - 0.00037615, 0.00039626, 0.00041708, 0.00043863, 0.00046092, 0.00048396, - 0.00050775, 0.00053233, 0.00055768, 0.00058384, 0.00061080, - 0.00063858, 0.00066720, 0.00069665, 0.00072696, 0.00075813, - 0.00079017, 0.00082310, 0.00085692, 0.00089164, 0.00092728, 0.00096384, - 0.00100133, 0.00103976, 0.00107914, 0.00111947, 0.00116077, - 0.00120304, 0.00124630, 0.00129053, 0.00133577, 0.00138200, - 0.00142924, 0.00147749, 0.00152676, 0.00157705, 0.00162836, 0.00168070, - 0.00173408, 0.00178850, 0.00184395, 0.00190045, 0.00195799, - 0.00201658, 0.00207621, 0.00213688, 0.00219860, 0.00226137, - 0.00232518, 0.00239003, 0.00245591, 0.00252284, 0.00259079, 0.00265977, - 0.00272977, 0.00280078, 0.00287280, 0.00294582, 0.00301984, - 0.00309484, 0.00317081, 0.00324774, 0.00332563, 0.00340446, - 0.00348421, 0.00356488, 0.00364644, 0.00372889, 0.00381220, 0.00389636, - 0.00398135, 0.00406715, 0.00415374, 0.00424109, 0.00432920, - 0.00441802, 0.00450754, 0.00459773, 0.00468857, 0.00478001, - 0.00487205, 0.00496464, 0.00505775, 0.00515136, 0.00524542, 0.00533990, - 0.00543476, 0.00552997, 0.00562548, 0.00572125, 0.00581725, - 0.00591342, 0.00600973, 0.00610612, 0.00620254, 0.00629895, - 0.00639530, 0.00649153, 0.00658758, 0.00668341, 0.00677894, 0.00687413, - 0.00696891, 0.00706322, 0.00715699, 0.00725016, 0.00734266, - 0.00743441, 0.00752535, 0.00761540, 0.00770449, 0.00779254, - 0.00787947, 0.00796519, 0.00804963, 0.00813270, 0.00821431, 0.00829437, - 0.00837280, 0.00844949, 0.00852436, 0.00859730, 0.00866822, - 0.00873701, 0.00880358, 0.00886781, 0.00892960, 0.00898884, - 0.00904542, 0.00909923, 0.00915014, 0.00919805, 0.00924283, 0.00928436, - 0.00932252, 0.00935718, 0.00938821, 0.00941550, 0.00943890, - 0.00945828, 0.00947351, 0.00948446, 0.00949098, 0.00949294, - 0.00949020, 0.00948262, 0.00947005, 0.00945235, 0.00942938, 0.00940099, - 0.00936704, 0.00932738, 0.00928186, 0.00923034, 0.00917268, - 0.00910872, 0.00903832, 0.00896134, 0.00887763, 0.00878706, - 0.00868949, 0.00858478, 0.00847280, 0.00835343, 0.00822653, 0.00809199, - 0.00794970, 0.00779956, 0.00764145, 0.00747530, 0.00730103, - 0.00711857, 0.00692787, 0.00672888, 0.00652158, 0.00630597, - 0.00608208, 0.00584994, 0.00560962, 0.00536124, 0.00510493, 0.00484089, - 0.00456935, 0.00429062, 0.00400505, 0.00371310, 0.00341532, - 0.00311238, 0.00280511, 0.00249452, 0.00218184, 0.00186864, - 0.00155690, 0.00124918, 0.00094895, 0.00066112, 0.00039320, 0.00015881 -}; - -static void WebRtcIsac_GetVars(const double* input, - const int16_t* pitchGains_Q12, - double* oldEnergy, - double* varscale) { - double nrg[4], chng, pg; - int k; - - double pitchGains[4]={0,0,0,0};; - - /* Calculate energies of first and second frame halfs */ - nrg[0] = 0.0001; - for (k = QLOOKAHEAD/2; k < (FRAMESAMPLES_QUARTER + QLOOKAHEAD) / 2; k++) { - nrg[0] += input[k]*input[k]; - } - nrg[1] = 0.0001; - for ( ; k < (FRAMESAMPLES_HALF + QLOOKAHEAD) / 2; k++) { - nrg[1] += input[k]*input[k]; - } - nrg[2] = 0.0001; - for ( ; k < (FRAMESAMPLES*3/4 + QLOOKAHEAD) / 2; k++) { - nrg[2] += input[k]*input[k]; - } - nrg[3] = 0.0001; - for ( ; k < (FRAMESAMPLES + QLOOKAHEAD) / 2; k++) { - nrg[3] += input[k]*input[k]; - } - - /* Calculate average level change */ - chng = 0.25 * (fabs(10.0 * log10(nrg[3] / nrg[2])) + - fabs(10.0 * log10(nrg[2] / nrg[1])) + - fabs(10.0 * log10(nrg[1] / nrg[0])) + - fabs(10.0 * log10(nrg[0] / *oldEnergy))); - - - /* Find average pitch gain */ - pg = 0.0; - for (k=0; k<4; k++) - { - pitchGains[k] = ((float)pitchGains_Q12[k])/4096; - pg += pitchGains[k]; - } - pg *= 0.25; - - /* If pitch gain is low and energy constant - increase noise level*/ - /* Matlab code: - pg = 0:.01:.45; plot(pg, 0.0 + 1.0 * exp( -1.0 * exp(-200.0 * pg.*pg.*pg) / (1.0 + 0.4 * 0) )) - */ - *varscale = 0.0 + 1.0 * exp( -1.4 * exp(-200.0 * pg*pg*pg) / (1.0 + 0.4 * chng) ); - - *oldEnergy = nrg[3]; -} - -static void WebRtcIsac_GetVarsUB(const double* input, - double* oldEnergy, - double* varscale) { - double nrg[4], chng; - int k; - - /* Calculate energies of first and second frame halfs */ - nrg[0] = 0.0001; - for (k = 0; k < (FRAMESAMPLES_QUARTER) / 2; k++) { - nrg[0] += input[k]*input[k]; - } - nrg[1] = 0.0001; - for ( ; k < (FRAMESAMPLES_HALF) / 2; k++) { - nrg[1] += input[k]*input[k]; - } - nrg[2] = 0.0001; - for ( ; k < (FRAMESAMPLES*3/4) / 2; k++) { - nrg[2] += input[k]*input[k]; - } - nrg[3] = 0.0001; - for ( ; k < (FRAMESAMPLES) / 2; k++) { - nrg[3] += input[k]*input[k]; - } - - /* Calculate average level change */ - chng = 0.25 * (fabs(10.0 * log10(nrg[3] / nrg[2])) + - fabs(10.0 * log10(nrg[2] / nrg[1])) + - fabs(10.0 * log10(nrg[1] / nrg[0])) + - fabs(10.0 * log10(nrg[0] / *oldEnergy))); - - - /* If pitch gain is low and energy constant - increase noise level*/ - /* Matlab code: - pg = 0:.01:.45; plot(pg, 0.0 + 1.0 * exp( -1.0 * exp(-200.0 * pg.*pg.*pg) / (1.0 + 0.4 * 0) )) - */ - *varscale = exp( -1.4 / (1.0 + 0.4 * chng) ); - - *oldEnergy = nrg[3]; -} - -void WebRtcIsac_GetLpcCoefLb(double *inLo, double *inHi, MaskFiltstr *maskdata, - double signal_noise_ratio, const int16_t *pitchGains_Q12, - double *lo_coeff, double *hi_coeff) -{ - int k, n, j, pos1, pos2; - double varscale; - - double DataLo[WINLEN], DataHi[WINLEN]; - double corrlo[ORDERLO+2], corrlo2[ORDERLO+1]; - double corrhi[ORDERHI+1]; - double k_veclo[ORDERLO], k_vechi[ORDERHI]; - - double a_LO[ORDERLO+1], a_HI[ORDERHI+1]; - double tmp, res_nrg; - - double FwdA, FwdB; - - /* hearing threshold level in dB; higher value gives more noise */ - const double HearThresOffset = -28.0; - - /* bandwdith expansion factors for low- and high band */ - const double gammaLo = 0.9; - const double gammaHi = 0.8; - - /* less-noise-at-low-frequencies factor */ - double aa; - - - /* convert from dB to signal level */ - const double H_T_H = pow(10.0, 0.05 * HearThresOffset); - double S_N_R = pow(10.0, 0.05 * signal_noise_ratio) / 3.46; /* divide by sqrt(12) */ - - /* change quallevel depending on pitch gains and level fluctuations */ - WebRtcIsac_GetVars(inLo, pitchGains_Q12, &(maskdata->OldEnergy), &varscale); - - /* less-noise-at-low-frequencies factor */ - aa = 0.35 * (0.5 + 0.5 * varscale); - - /* replace data in buffer by new look-ahead data */ - for (pos1 = 0; pos1 < QLOOKAHEAD; pos1++) - maskdata->DataBufferLo[pos1 + WINLEN - QLOOKAHEAD] = inLo[pos1]; - - for (k = 0; k < SUBFRAMES; k++) { - - /* Update input buffer and multiply signal with window */ - for (pos1 = 0; pos1 < WINLEN - UPDATE/2; pos1++) { - maskdata->DataBufferLo[pos1] = maskdata->DataBufferLo[pos1 + UPDATE/2]; - maskdata->DataBufferHi[pos1] = maskdata->DataBufferHi[pos1 + UPDATE/2]; - DataLo[pos1] = maskdata->DataBufferLo[pos1] * kLpcCorrWindow[pos1]; - DataHi[pos1] = maskdata->DataBufferHi[pos1] * kLpcCorrWindow[pos1]; - } - pos2 = k * UPDATE/2; - for (n = 0; n < UPDATE/2; n++, pos1++) { - maskdata->DataBufferLo[pos1] = inLo[QLOOKAHEAD + pos2]; - maskdata->DataBufferHi[pos1] = inHi[pos2++]; - DataLo[pos1] = maskdata->DataBufferLo[pos1] * kLpcCorrWindow[pos1]; - DataHi[pos1] = maskdata->DataBufferHi[pos1] * kLpcCorrWindow[pos1]; - } - - /* Get correlation coefficients */ - WebRtcIsac_AutoCorr(corrlo, DataLo, WINLEN, ORDERLO+1); /* computing autocorrelation */ - WebRtcIsac_AutoCorr(corrhi, DataHi, WINLEN, ORDERHI); - - - /* less noise for lower frequencies, by filtering/scaling autocorrelation sequences */ - corrlo2[0] = (1.0+aa*aa) * corrlo[0] - 2.0*aa * corrlo[1]; - tmp = (1.0 + aa*aa); - for (n = 1; n <= ORDERLO; n++) { - corrlo2[n] = tmp * corrlo[n] - aa * (corrlo[n-1] + corrlo[n+1]); - } - tmp = (1.0+aa) * (1.0+aa); - for (n = 0; n <= ORDERHI; n++) { - corrhi[n] = tmp * corrhi[n]; - } - - /* add white noise floor */ - corrlo2[0] += 1e-6; - corrhi[0] += 1e-6; - - - FwdA = 0.01; - FwdB = 0.01; - - /* recursive filtering of correlation over subframes */ - for (n = 0; n <= ORDERLO; n++) { - maskdata->CorrBufLo[n] = FwdA * maskdata->CorrBufLo[n] + corrlo2[n]; - corrlo2[n] = ((1.0-FwdA)*FwdB) * maskdata->CorrBufLo[n] + (1.0-FwdB) * corrlo2[n]; - } - for (n = 0; n <= ORDERHI; n++) { - maskdata->CorrBufHi[n] = FwdA * maskdata->CorrBufHi[n] + corrhi[n]; - corrhi[n] = ((1.0-FwdA)*FwdB) * maskdata->CorrBufHi[n] + (1.0-FwdB) * corrhi[n]; - } - - /* compute prediction coefficients */ - WebRtcIsac_LevDurb(a_LO, k_veclo, corrlo2, ORDERLO); - WebRtcIsac_LevDurb(a_HI, k_vechi, corrhi, ORDERHI); - - /* bandwidth expansion */ - tmp = gammaLo; - for (n = 1; n <= ORDERLO; n++) { - a_LO[n] *= tmp; - tmp *= gammaLo; - } - - /* residual energy */ - res_nrg = 0.0; - for (j = 0; j <= ORDERLO; j++) { - for (n = 0; n <= j; n++) { - res_nrg += a_LO[j] * corrlo2[j-n] * a_LO[n]; - } - for (n = j+1; n <= ORDERLO; n++) { - res_nrg += a_LO[j] * corrlo2[n-j] * a_LO[n]; - } - } - - /* add hearing threshold and compute the gain */ - *lo_coeff++ = S_N_R / (sqrt(res_nrg) / varscale + H_T_H); - - /* copy coefficients to output array */ - for (n = 1; n <= ORDERLO; n++) { - *lo_coeff++ = a_LO[n]; - } - - - /* bandwidth expansion */ - tmp = gammaHi; - for (n = 1; n <= ORDERHI; n++) { - a_HI[n] *= tmp; - tmp *= gammaHi; - } - - /* residual energy */ - res_nrg = 0.0; - for (j = 0; j <= ORDERHI; j++) { - for (n = 0; n <= j; n++) { - res_nrg += a_HI[j] * corrhi[j-n] * a_HI[n]; - } - for (n = j+1; n <= ORDERHI; n++) { - res_nrg += a_HI[j] * corrhi[n-j] * a_HI[n]; - } - } - - /* add hearing threshold and compute of the gain */ - *hi_coeff++ = S_N_R / (sqrt(res_nrg) / varscale + H_T_H); - - /* copy coefficients to output array */ - for (n = 1; n <= ORDERHI; n++) { - *hi_coeff++ = a_HI[n]; - } - } -} - - - -/****************************************************************************** - * WebRtcIsac_GetLpcCoefUb() - * - * Compute LP coefficients and correlation coefficients. At 12 kHz LP - * coefficients of the first and the last sub-frame is computed. At 16 kHz - * LP coefficients of 4th, 8th and 12th sub-frames are computed. We always - * compute correlation coefficients of all sub-frames. - * - * Inputs: - * -inSignal : Input signal - * -maskdata : a structure keeping signal from previous frame. - * -bandwidth : specifies if the codec is in 0-16 kHz mode or - * 0-12 kHz mode. - * - * Outputs: - * -lpCoeff : pointer to a buffer where A-polynomials are - * written to (first coeff is 1 and it is not - * written) - * -corrMat : a matrix where correlation coefficients of each - * sub-frame are written to one row. - * -varscale : a scale used to compute LPC gains. - */ -void -WebRtcIsac_GetLpcCoefUb( - double* inSignal, - MaskFiltstr* maskdata, - double* lpCoeff, - double corrMat[][UB_LPC_ORDER + 1], - double* varscale, - int16_t bandwidth) -{ - int frameCntr, activeFrameCntr, n, pos1, pos2; - int16_t criterion1; - int16_t criterion2; - int16_t numSubFrames = SUBFRAMES * (1 + (bandwidth == isac16kHz)); - double data[WINLEN]; - double corrSubFrame[UB_LPC_ORDER+2]; - double reflecCoeff[UB_LPC_ORDER]; - - double aPolynom[UB_LPC_ORDER+1]; - double tmp; - - /* bandwdith expansion factors */ - const double gamma = 0.9; - - /* change quallevel depending on pitch gains and level fluctuations */ - WebRtcIsac_GetVarsUB(inSignal, &(maskdata->OldEnergy), varscale); - - /* replace data in buffer by new look-ahead data */ - for(frameCntr = 0, activeFrameCntr = 0; frameCntr < numSubFrames; - frameCntr++) - { - if(frameCntr == SUBFRAMES) - { - // we are in 16 kHz - varscale++; - WebRtcIsac_GetVarsUB(&inSignal[FRAMESAMPLES_HALF], - &(maskdata->OldEnergy), varscale); - } - /* Update input buffer and multiply signal with window */ - for(pos1 = 0; pos1 < WINLEN - UPDATE/2; pos1++) - { - maskdata->DataBufferLo[pos1] = maskdata->DataBufferLo[pos1 + - UPDATE/2]; - data[pos1] = maskdata->DataBufferLo[pos1] * kLpcCorrWindow[pos1]; - } - pos2 = frameCntr * UPDATE/2; - for(n = 0; n < UPDATE/2; n++, pos1++, pos2++) - { - maskdata->DataBufferLo[pos1] = inSignal[pos2]; - data[pos1] = maskdata->DataBufferLo[pos1] * kLpcCorrWindow[pos1]; - } - - /* Get correlation coefficients */ - /* computing autocorrelation */ - WebRtcIsac_AutoCorr(corrSubFrame, data, WINLEN, UB_LPC_ORDER+1); - memcpy(corrMat[frameCntr], corrSubFrame, - (UB_LPC_ORDER+1)*sizeof(double)); - - criterion1 = ((frameCntr == 0) || (frameCntr == (SUBFRAMES - 1))) && - (bandwidth == isac12kHz); - criterion2 = (((frameCntr+1) % 4) == 0) && - (bandwidth == isac16kHz); - if(criterion1 || criterion2) - { - /* add noise */ - corrSubFrame[0] += 1e-6; - /* compute prediction coefficients */ - WebRtcIsac_LevDurb(aPolynom, reflecCoeff, corrSubFrame, - UB_LPC_ORDER); - - /* bandwidth expansion */ - tmp = gamma; - for (n = 1; n <= UB_LPC_ORDER; n++) - { - *lpCoeff++ = aPolynom[n] * tmp; - tmp *= gamma; - } - activeFrameCntr++; - } - } -} - - - -/****************************************************************************** - * WebRtcIsac_GetLpcGain() - * - * Compute the LPC gains for each sub-frame, given the LPC of each sub-frame - * and the corresponding correlation coefficients. - * - * Inputs: - * -signal_noise_ratio : the desired SNR in dB. - * -numVecs : number of sub-frames - * -corrMat : a matrix of correlation coefficients where - * each row is a set of correlation coefficients of - * one sub-frame. - * -varscale : a scale computed when WebRtcIsac_GetLpcCoefUb() - * is called. - * - * Outputs: - * -gain : pointer to a buffer where LP gains are written. - * - */ -void -WebRtcIsac_GetLpcGain( - double signal_noise_ratio, - const double* filtCoeffVecs, - int numVecs, - double* gain, - double corrMat[][UB_LPC_ORDER + 1], - const double* varscale) -{ - int16_t j, n; - int16_t subFrameCntr; - double aPolynom[ORDERLO + 1]; - double res_nrg; - - const double HearThresOffset = -28.0; - const double H_T_H = pow(10.0, 0.05 * HearThresOffset); - /* divide by sqrt(12) = 3.46 */ - const double S_N_R = pow(10.0, 0.05 * signal_noise_ratio) / 3.46; - - aPolynom[0] = 1; - for(subFrameCntr = 0; subFrameCntr < numVecs; subFrameCntr++) - { - if(subFrameCntr == SUBFRAMES) - { - // we are in second half of a SWB frame. use new varscale - varscale++; - } - memcpy(&aPolynom[1], &filtCoeffVecs[(subFrameCntr * (UB_LPC_ORDER + 1)) + - 1], sizeof(double) * UB_LPC_ORDER); - - /* residual energy */ - res_nrg = 0.0; - for(j = 0; j <= UB_LPC_ORDER; j++) - { - for(n = 0; n <= j; n++) - { - res_nrg += aPolynom[j] * corrMat[subFrameCntr][j-n] * - aPolynom[n]; - } - for(n = j+1; n <= UB_LPC_ORDER; n++) - { - res_nrg += aPolynom[j] * corrMat[subFrameCntr][n-j] * - aPolynom[n]; - } - } - - /* add hearing threshold and compute the gain */ - gain[subFrameCntr] = S_N_R / (sqrt(res_nrg) / *varscale + H_T_H); - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_analysis.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_analysis.h deleted file mode 100644 index 5503e2d49b78..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_analysis.h +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_analysis.h - * - * LPC functions - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_ANALYSIS_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_ANALYSIS_H_ - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -void WebRtcIsac_GetLpcCoefLb(double* inLo, - double* inHi, - MaskFiltstr* maskdata, - double signal_noise_ratio, - const int16_t* pitchGains_Q12, - double* lo_coeff, - double* hi_coeff); - -void WebRtcIsac_GetLpcGain(double signal_noise_ratio, - const double* filtCoeffVecs, - int numVecs, - double* gain, - double corrLo[][UB_LPC_ORDER + 1], - const double* varscale); - -void WebRtcIsac_GetLpcCoefUb(double* inSignal, - MaskFiltstr* maskdata, - double* lpCoeff, - double corr[][UB_LPC_ORDER + 1], - double* varscale, - int16_t bandwidth); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_ANALYIS_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c deleted file mode 100644 index 670754065f4a..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * SWB_KLT_Tables_LPCGain.c - * - * This file defines tables used for entropy coding of LPC Gain - * of upper-band. - * - */ - -#include "modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -const double WebRtcIsac_kQSizeLpcGain = 0.100000; - -const double WebRtcIsac_kMeanLpcGain = -3.3822; - -/* -* The smallest reconstruction points for quantiztion of -* LPC gains. -*/ -const double WebRtcIsac_kLeftRecPointLpcGain[SUBFRAMES] = -{ - -0.800000, -1.000000, -1.200000, -2.200000, -3.000000, -12.700000 -}; - -/* -* Number of reconstruction points of quantizers for LPC Gains. -*/ -const int16_t WebRtcIsac_kNumQCellLpcGain[SUBFRAMES] = -{ - 17, 20, 25, 45, 77, 170 -}; -/* -* Starting index for entropy decoder to search for the right interval, -* one entry per LAR coefficient -*/ -const uint16_t WebRtcIsac_kLpcGainEntropySearch[SUBFRAMES] = -{ - 8, 10, 12, 22, 38, 85 -}; - -/* -* The following 6 vectors define CDF of 6 decorrelated LPC -* gains. -*/ -const uint16_t WebRtcIsac_kLpcGainCdfVec0[18] = -{ - 0, 10, 27, 83, 234, 568, 1601, 4683, 16830, 57534, 63437, - 64767, 65229, 65408, 65483, 65514, 65527, 65535 -}; - -const uint16_t WebRtcIsac_kLpcGainCdfVec1[21] = -{ - 0, 15, 33, 84, 185, 385, 807, 1619, 3529, 7850, 19488, - 51365, 62437, 64548, 65088, 65304, 65409, 65484, 65507, 65522, 65535 -}; - -const uint16_t WebRtcIsac_kLpcGainCdfVec2[26] = -{ - 0, 15, 29, 54, 89, 145, 228, 380, 652, 1493, 4260, - 12359, 34133, 50749, 57224, 60814, 62927, 64078, 64742, 65103, 65311, 65418, - 65473, 65509, 65521, 65535 -}; - -const uint16_t WebRtcIsac_kLpcGainCdfVec3[46] = -{ - 0, 8, 12, 16, 26, 42, 56, 76, 111, 164, 247, - 366, 508, 693, 1000, 1442, 2155, 3188, 4854, 7387, 11249, 17617, - 30079, 46711, 56291, 60127, 62140, 63258, 63954, 64384, 64690, 64891, 65031, - 65139, 65227, 65293, 65351, 65399, 65438, 65467, 65492, 65504, 65510, 65518, - 65523, 65535 -}; - -const uint16_t WebRtcIsac_kLpcGainCdfVec4[78] = -{ - 0, 17, 29, 39, 51, 70, 104, 154, 234, 324, 443, - 590, 760, 971, 1202, 1494, 1845, 2274, 2797, 3366, 4088, 4905, - 5899, 7142, 8683, 10625, 12983, 16095, 20637, 28216, 38859, 47237, 51537, - 54150, 56066, 57583, 58756, 59685, 60458, 61103, 61659, 62144, 62550, 62886, - 63186, 63480, 63743, 63954, 64148, 64320, 64467, 64600, 64719, 64837, 64939, - 65014, 65098, 65160, 65211, 65250, 65290, 65325, 65344, 65366, 65391, 65410, - 65430, 65447, 65460, 65474, 65487, 65494, 65501, 65509, 65513, 65518, 65520, - 65535 -}; - -const uint16_t WebRtcIsac_kLpcGainCdfVec5[171] = -{ - 0, 10, 12, 14, 16, 18, 23, 29, 35, 42, 51, - 58, 65, 72, 78, 87, 96, 103, 111, 122, 134, 150, - 167, 184, 202, 223, 244, 265, 289, 315, 346, 379, 414, - 450, 491, 532, 572, 613, 656, 700, 751, 802, 853, 905, - 957, 1021, 1098, 1174, 1250, 1331, 1413, 1490, 1565, 1647, 1730, - 1821, 1913, 2004, 2100, 2207, 2314, 2420, 2532, 2652, 2783, 2921, - 3056, 3189, 3327, 3468, 3640, 3817, 3993, 4171, 4362, 4554, 4751, - 4948, 5142, 5346, 5566, 5799, 6044, 6301, 6565, 6852, 7150, 7470, - 7797, 8143, 8492, 8835, 9181, 9547, 9919, 10315, 10718, 11136, 11566, - 12015, 12482, 12967, 13458, 13953, 14432, 14903, 15416, 15936, 16452, 16967, - 17492, 18024, 18600, 19173, 19736, 20311, 20911, 21490, 22041, 22597, 23157, - 23768, 24405, 25034, 25660, 26280, 26899, 27614, 28331, 29015, 29702, 30403, - 31107, 31817, 32566, 33381, 34224, 35099, 36112, 37222, 38375, 39549, 40801, - 42074, 43350, 44626, 45982, 47354, 48860, 50361, 51845, 53312, 54739, 56026, - 57116, 58104, 58996, 59842, 60658, 61488, 62324, 63057, 63769, 64285, 64779, - 65076, 65344, 65430, 65500, 65517, 65535 -}; - -/* -* An array of pointers to CDFs of decorrelated LPC Gains -*/ -const uint16_t* WebRtcIsac_kLpcGainCdfMat[SUBFRAMES] = -{ - WebRtcIsac_kLpcGainCdfVec0, WebRtcIsac_kLpcGainCdfVec1, - WebRtcIsac_kLpcGainCdfVec2, WebRtcIsac_kLpcGainCdfVec3, - WebRtcIsac_kLpcGainCdfVec4, WebRtcIsac_kLpcGainCdfVec5 -}; - -/* -* A matrix to decorrellate LPC gains of subframes. -*/ -const double WebRtcIsac_kLpcGainDecorrMat[SUBFRAMES][SUBFRAMES] = -{ - {-0.150860, 0.327872, 0.367220, 0.504613, 0.559270, 0.409234}, - { 0.457128, -0.613591, -0.289283, -0.029734, 0.393760, 0.418240}, - {-0.626043, 0.136489, -0.439118, -0.448323, 0.135987, 0.420869}, - { 0.526617, 0.480187, 0.242552, -0.488754, -0.158713, 0.411331}, - {-0.302587, -0.494953, 0.588112, -0.063035, -0.404290, 0.387510}, - { 0.086378, 0.147714, -0.428875, 0.548300, -0.570121, 0.401391} -}; diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h deleted file mode 100644 index 39c4a24ef403..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * SWB_KLT_Tables_LPCGain.h - * - * This file declares tables used for entropy coding of LPC Gain - * of upper-band. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_GAIN_SWB_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_GAIN_SWB_TABLES_H_ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -extern const double WebRtcIsac_kQSizeLpcGain; - -extern const double WebRtcIsac_kLeftRecPointLpcGain[SUBFRAMES]; - -extern const int16_t WebRtcIsac_kNumQCellLpcGain[SUBFRAMES]; - -extern const uint16_t WebRtcIsac_kLpcGainEntropySearch[SUBFRAMES]; - -extern const uint16_t WebRtcIsac_kLpcGainCdfVec0[18]; - -extern const uint16_t WebRtcIsac_kLpcGainCdfVec1[21]; - -extern const uint16_t WebRtcIsac_kLpcGainCdfVec2[26]; - -extern const uint16_t WebRtcIsac_kLpcGainCdfVec3[46]; - -extern const uint16_t WebRtcIsac_kLpcGainCdfVec4[78]; - -extern const uint16_t WebRtcIsac_kLpcGainCdfVec5[171]; - -extern const uint16_t* WebRtcIsac_kLpcGainCdfMat[SUBFRAMES]; - -extern const double WebRtcIsac_kLpcGainDecorrMat[SUBFRAMES][SUBFRAMES]; - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_GAIN_SWB_TABLES_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c deleted file mode 100644 index e3600a7fab4a..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * SWB_KLT_Tables.c - * - * This file defines tables used for entropy coding of LPC shape of - * upper-band signal if the bandwidth is 12 kHz. - * - */ - -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/* -* Mean value of LAR -*/ -const double WebRtcIsac_kMeanLarUb12[UB_LPC_ORDER] = -{ - 0.03748928306641, 0.09453441192543, -0.01112522344398, 0.03800237516842 -}; - -/* -* A rotation matrix to decorrelate intra-vector correlation, -* i.e. correlation among components of LAR vector. -*/ -const double WebRtcIsac_kIntraVecDecorrMatUb12[UB_LPC_ORDER][UB_LPC_ORDER] = -{ - {-0.00075365493856, -0.05809964887743, -0.23397966154116, 0.97050367376411}, - { 0.00625021257734, -0.17299965610679, 0.95977735920651, 0.22104179375008}, - { 0.20543384258374, -0.96202143495696, -0.15301870801552, -0.09432375099565}, - {-0.97865075648479, -0.20300322280841, -0.02581111653779, -0.01913568980258} -}; - -/* -* A rotation matrix to remove correlation among LAR coefficients -* of different LAR vectors. One might guess that decorrelation matrix -* for the first component should differ from the second component -* but we haven't observed a significant benefit of having different -* decorrelation matrices for different components. -*/ -const double WebRtcIsac_kInterVecDecorrMatUb12 -[UB_LPC_VEC_PER_FRAME][UB_LPC_VEC_PER_FRAME] = -{ - { 0.70650597970460, -0.70770707262373}, - {-0.70770707262373, -0.70650597970460} -}; - -/* -* LAR quantization step-size. -*/ -const double WebRtcIsac_kLpcShapeQStepSizeUb12 = 0.150000; - -/* -* The smallest reconstruction points for quantiztion of LAR coefficients. -*/ -const double WebRtcIsac_kLpcShapeLeftRecPointUb12 -[UB_LPC_ORDER*UB_LPC_VEC_PER_FRAME] = -{ - -0.900000, -1.050000, -1.350000, -1.800000, -1.350000, -1.650000, - -2.250000, -3.450000 -}; - -/* -* Number of reconstruction points of quantizers for LAR coefficients. -*/ -const int16_t WebRtcIsac_kLpcShapeNumRecPointUb12 -[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME] = -{ - 13, 15, 19, 27, 19, 24, 32, 48 -}; - -/* -* Starting index for entropy decoder to search for the right interval, -* one entry per LAR coefficient -*/ -const uint16_t WebRtcIsac_kLpcShapeEntropySearchUb12 -[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME] = -{ - 6, 7, 9, 13, 9, 12, 16, 24 -}; - -/* -* The following 8 vectors define CDF of 8 decorrelated LAR -* coefficients. -*/ -const uint16_t WebRtcIsac_kLpcShapeCdfVec0Ub12[14] = -{ - 0, 13, 95, 418, 1687, 6498, 21317, 44200, 59029, 63849, 65147, - 65449, 65525, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec1Ub12[16] = -{ - 0, 10, 59, 255, 858, 2667, 8200, 22609, 42988, 57202, 62947, - 64743, 65308, 65476, 65522, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec2Ub12[20] = -{ - 0, 18, 40, 118, 332, 857, 2017, 4822, 11321, 24330, 41279, - 54342, 60637, 63394, 64659, 65184, 65398, 65482, 65518, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec3Ub12[28] = -{ - 0, 21, 38, 90, 196, 398, 770, 1400, 2589, 4650, 8211, - 14933, 26044, 39592, 50814, 57452, 60971, 62884, 63995, 64621, 65019, 65273, - 65410, 65480, 65514, 65522, 65531, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec4Ub12[20] = -{ - 0, 7, 46, 141, 403, 969, 2132, 4649, 10633, 24902, 43254, - 54665, 59928, 62674, 64173, 64938, 65293, 65464, 65523, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec5Ub12[25] = -{ - 0, 7, 22, 72, 174, 411, 854, 1737, 3545, 6774, 13165, - 25221, 40980, 52821, 58714, 61706, 63472, 64437, 64989, 65287, 65430, 65503, - 65525, 65529, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec6Ub12[33] = -{ - 0, 11, 21, 36, 65, 128, 228, 401, 707, 1241, 2126, - 3589, 6060, 10517, 18853, 31114, 42477, 49770, 54271, 57467, 59838, 61569, - 62831, 63772, 64433, 64833, 65123, 65306, 65419, 65466, 65499, 65519, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec7Ub12[49] = -{ - 0, 14, 34, 67, 107, 167, 245, 326, 449, 645, 861, - 1155, 1508, 2003, 2669, 3544, 4592, 5961, 7583, 9887, 13256, 18765, - 26519, 34077, 40034, 44349, 47795, 50663, 53262, 55473, 57458, 59122, 60592, - 61742, 62690, 63391, 63997, 64463, 64794, 65045, 65207, 65309, 65394, 65443, - 65478, 65504, 65514, 65523, 65535 -}; - -/* -* An array of pointers to CDFs of decorrelated LARs -*/ -const uint16_t* WebRtcIsac_kLpcShapeCdfMatUb12 -[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME] = -{ - WebRtcIsac_kLpcShapeCdfVec0Ub12, WebRtcIsac_kLpcShapeCdfVec1Ub12, - WebRtcIsac_kLpcShapeCdfVec2Ub12, WebRtcIsac_kLpcShapeCdfVec3Ub12, - WebRtcIsac_kLpcShapeCdfVec4Ub12, WebRtcIsac_kLpcShapeCdfVec5Ub12, - WebRtcIsac_kLpcShapeCdfVec6Ub12, WebRtcIsac_kLpcShapeCdfVec7Ub12 -}; diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h deleted file mode 100644 index 7448a1e76bea..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_shape_swb12_tables.h - * - * This file declares tables used for entropy coding of LPC shape of - * upper-band signal if the bandwidth is 12 kHz. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB12_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB12_TABLES_H_ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -extern const double WebRtcIsac_kMeanLarUb12[UB_LPC_ORDER]; - -extern const double WebRtcIsac_kMeanLpcGain; - -extern const double WebRtcIsac_kIntraVecDecorrMatUb12[UB_LPC_ORDER] - [UB_LPC_ORDER]; - -extern const double WebRtcIsac_kInterVecDecorrMatUb12[UB_LPC_VEC_PER_FRAME] - [UB_LPC_VEC_PER_FRAME]; - -extern const double WebRtcIsac_kLpcShapeQStepSizeUb12; - -extern const double - WebRtcIsac_kLpcShapeLeftRecPointUb12[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME]; - -extern const int16_t - WebRtcIsac_kLpcShapeNumRecPointUb12[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME]; - -extern const uint16_t - WebRtcIsac_kLpcShapeEntropySearchUb12[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec0Ub12[14]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec1Ub12[16]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec2Ub12[20]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec3Ub12[28]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec4Ub12[20]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec5Ub12[25]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec6Ub12[33]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec7Ub12[49]; - -extern const uint16_t* - WebRtcIsac_kLpcShapeCdfMatUb12[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME]; - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB12_TABLES_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c deleted file mode 100644 index 59617fd2746e..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c +++ /dev/null @@ -1,247 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * SWB16_KLT_Tables.c - * - * This file defines tables used for entropy coding of LPC shape of - * upper-band signal if the bandwidth is 16 kHz. - * - */ - -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/* -* Mean value of LAR -*/ -const double WebRtcIsac_kMeanLarUb16[UB_LPC_ORDER] = -{ -0.454978, 0.364747, 0.102999, 0.104523 -}; - -/* -* A rotation matrix to decorrelate intra-vector correlation, -* i.e. correlation among components of LAR vector. -*/ -const double WebRtcIsac_kIintraVecDecorrMatUb16[UB_LPC_ORDER][UB_LPC_ORDER] = -{ - {-0.020528, -0.085858, -0.002431, 0.996093}, - {-0.033155, 0.036102, 0.998786, 0.004866}, - { 0.202627, 0.974853, -0.028940, 0.088132}, - {-0.978479, 0.202454, -0.039785, -0.002811} -}; - -/* -* A rotation matrix to remove correlation among LAR coefficients -* of different LAR vectors. One might guess that decorrelation matrix -* for the first component should differ from the second component -* but we haven't observed a significant benefit of having different -* decorrelation matrices for different components. -*/ -const double WebRtcIsac_kInterVecDecorrMatUb16 -[UB16_LPC_VEC_PER_FRAME][UB16_LPC_VEC_PER_FRAME] = -{ - { 0.291675, -0.515786, 0.644927, 0.482658}, - {-0.647220, 0.479712, 0.289556, 0.516856}, - { 0.643084, 0.485489, -0.289307, 0.516763}, - {-0.287185, -0.517823, -0.645389, 0.482553} -}; - -/* -* The following 16 vectors define CDF of 16 decorrelated LAR -* coefficients. -*/ -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub16[14] = -{ - 0, 2, 20, 159, 1034, 5688, 20892, 44653, - 59849, 64485, 65383, 65518, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec1Ub16[16] = -{ - 0, 1, 7, 43, 276, 1496, 6681, 21653, - 43891, 58859, 64022, 65248, 65489, 65529, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec2Ub16[18] = -{ - 0, 1, 9, 54, 238, 933, 3192, 9461, - 23226, 42146, 56138, 62413, 64623, 65300, 65473, 65521, - 65533, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec3Ub16[30] = -{ - 0, 2, 4, 8, 17, 36, 75, 155, - 329, 683, 1376, 2662, 5047, 9508, 17526, 29027, - 40363, 48997, 55096, 59180, 61789, 63407, 64400, 64967, - 65273, 65429, 65497, 65526, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec4Ub16[16] = -{ - 0, 1, 10, 63, 361, 1785, 7407, 22242, - 43337, 58125, 63729, 65181, 65472, 65527, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec5Ub16[17] = -{ - 0, 1, 7, 29, 134, 599, 2443, 8590, - 22962, 42635, 56911, 63060, 64940, 65408, 65513, 65531, - 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec6Ub16[21] = -{ - 0, 1, 5, 16, 57, 191, 611, 1808, - 4847, 11755, 24612, 40910, 53789, 60698, 63729, 64924, - 65346, 65486, 65523, 65532, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec7Ub16[36] = -{ - 0, 1, 4, 12, 25, 55, 104, 184, - 314, 539, 926, 1550, 2479, 3861, 5892, 8845, - 13281, 20018, 29019, 38029, 45581, 51557, 56057, 59284, - 61517, 63047, 64030, 64648, 65031, 65261, 65402, 65480, - 65518, 65530, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec8Ub16[21] = -{ - 0, 1, 2, 7, 26, 103, 351, 1149, - 3583, 10204, 23846, 41711, 55361, 61917, 64382, 65186, - 65433, 65506, 65528, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub160[21] = -{ - 0, 6, 19, 63, 205, 638, 1799, 4784, - 11721, 24494, 40803, 53805, 60886, 63822, 64931, 65333, - 65472, 65517, 65530, 65533, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub161[28] = -{ - 0, 1, 3, 11, 31, 86, 221, 506, - 1101, 2296, 4486, 8477, 15356, 26079, 38941, 49952, - 57165, 61257, 63426, 64549, 65097, 65351, 65463, 65510, - 65526, 65532, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub162[55] = -{ - 0, 3, 12, 23, 42, 65, 89, 115, - 150, 195, 248, 327, 430, 580, 784, 1099, - 1586, 2358, 3651, 5899, 9568, 14312, 19158, 23776, - 28267, 32663, 36991, 41153, 45098, 48680, 51870, 54729, - 57141, 59158, 60772, 62029, 63000, 63761, 64322, 64728, - 65000, 65192, 65321, 65411, 65463, 65496, 65514, 65523, - 65527, 65529, 65531, 65532, 65533, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub163[26] = -{ - 0, 2, 4, 10, 21, 48, 114, 280, - 701, 1765, 4555, 11270, 24267, 41213, 54285, 61003, - 63767, 64840, 65254, 65421, 65489, 65514, 65526, 65532, - 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub164[28] = -{ - 0, 1, 3, 6, 15, 36, 82, 196, - 453, 1087, 2557, 5923, 13016, 25366, 40449, 52582, - 59539, 62896, 64389, 65033, 65316, 65442, 65494, 65519, - 65529, 65533, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub165[34] = -{ - 0, 2, 4, 8, 18, 35, 73, 146, - 279, 524, 980, 1789, 3235, 5784, 10040, 16998, - 27070, 38543, 48499, 55421, 59712, 62257, 63748, 64591, - 65041, 65278, 65410, 65474, 65508, 65522, 65530, 65533, - 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub166[71] = -{ - 0, 1, 2, 6, 13, 26, 55, 92, - 141, 191, 242, 296, 355, 429, 522, 636, - 777, 947, 1162, 1428, 1753, 2137, 2605, 3140, - 3743, 4409, 5164, 6016, 6982, 8118, 9451, 10993, - 12754, 14810, 17130, 19780, 22864, 26424, 30547, 35222, - 40140, 44716, 48698, 52056, 54850, 57162, 59068, 60643, - 61877, 62827, 63561, 64113, 64519, 64807, 65019, 65167, - 65272, 65343, 65399, 65440, 65471, 65487, 65500, 65509, - 65518, 65524, 65527, 65531, 65533, 65534, 65535 -}; - -/* -* An array of pointers to CDFs of decorrelated LARs -*/ -const uint16_t* WebRtcIsac_kLpcShapeCdfMatUb16 -[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] = { - WebRtcIsac_kLpcShapeCdfVec01Ub16, - WebRtcIsac_kLpcShapeCdfVec1Ub16, - WebRtcIsac_kLpcShapeCdfVec2Ub16, - WebRtcIsac_kLpcShapeCdfVec3Ub16, - WebRtcIsac_kLpcShapeCdfVec4Ub16, - WebRtcIsac_kLpcShapeCdfVec5Ub16, - WebRtcIsac_kLpcShapeCdfVec6Ub16, - WebRtcIsac_kLpcShapeCdfVec7Ub16, - WebRtcIsac_kLpcShapeCdfVec8Ub16, - WebRtcIsac_kLpcShapeCdfVec01Ub160, - WebRtcIsac_kLpcShapeCdfVec01Ub161, - WebRtcIsac_kLpcShapeCdfVec01Ub162, - WebRtcIsac_kLpcShapeCdfVec01Ub163, - WebRtcIsac_kLpcShapeCdfVec01Ub164, - WebRtcIsac_kLpcShapeCdfVec01Ub165, - WebRtcIsac_kLpcShapeCdfVec01Ub166 -}; - -/* -* The smallest reconstruction points for quantiztion of LAR coefficients. -*/ -const double WebRtcIsac_kLpcShapeLeftRecPointUb16 -[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] = -{ - -0.8250, -0.9750, -1.1250, -2.1750, -0.9750, -1.1250, -1.4250, - -2.6250, -1.4250, -1.2750, -1.8750, -3.6750, -1.7250, -1.8750, - -2.3250, -5.4750 -}; - -/* -* Number of reconstruction points of quantizers for LAR coefficients. -*/ -const int16_t WebRtcIsac_kLpcShapeNumRecPointUb16 -[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] = -{ - 13, 15, 17, 29, 15, 16, 20, 35, 20, - 20, 27, 54, 25, 27, 33, 70 -}; - -/* -* Starting index for entropy decoder to search for the right interval, -* one entry per LAR coefficient -*/ -const uint16_t WebRtcIsac_kLpcShapeEntropySearchUb16 -[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] = -{ - 6, 7, 8, 14, 7, 8, 10, 17, 10, - 10, 13, 27, 12, 13, 16, 35 -}; - -/* -* LAR quantization step-size. -*/ -const double WebRtcIsac_kLpcShapeQStepSizeUb16 = 0.150000; diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h deleted file mode 100644 index 51101db936bb..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_shape_swb16_tables.h - * - * This file declares tables used for entropy coding of LPC shape of - * upper-band signal if the bandwidth is 16 kHz. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB16_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB16_TABLES_H_ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -extern const double WebRtcIsac_kMeanLarUb16[UB_LPC_ORDER]; - -extern const double WebRtcIsac_kIintraVecDecorrMatUb16[UB_LPC_ORDER] - [UB_LPC_ORDER]; - -extern const double WebRtcIsac_kInterVecDecorrMatUb16[UB16_LPC_VEC_PER_FRAME] - [UB16_LPC_VEC_PER_FRAME]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub16[14]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec1Ub16[16]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec2Ub16[18]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec3Ub16[30]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec4Ub16[16]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec5Ub16[17]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec6Ub16[21]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec7Ub16[36]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec8Ub16[21]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub160[21]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub161[28]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub162[55]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub163[26]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub164[28]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub165[34]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub166[71]; - -extern const uint16_t* - WebRtcIsac_kLpcShapeCdfMatUb16[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - -extern const double - WebRtcIsac_kLpcShapeLeftRecPointUb16[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - -extern const int16_t - WebRtcIsac_kLpcShapeNumRecPointUb16[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - -extern const uint16_t - WebRtcIsac_kLpcShapeEntropySearchUb16[UB_LPC_ORDER * - UB16_LPC_VEC_PER_FRAME]; - -extern const double WebRtcIsac_kLpcShapeQStepSizeUb16; - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB16_TABLES_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.c deleted file mode 100644 index 461b92eb8a44..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.c +++ /dev/null @@ -1,601 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* coding tables for the KLT coefficients */ - -#include "modules/audio_coding/codecs/isac/main/source/lpc_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/* cdf array for model indicator */ -const uint16_t WebRtcIsac_kQKltModelCdf[4] = { - 0, 15434, 37548, 65535 }; - -/* pointer to cdf array for model indicator */ -const uint16_t *WebRtcIsac_kQKltModelCdfPtr[1] = { - WebRtcIsac_kQKltModelCdf }; - -/* initial cdf index for decoder of model indicator */ -const uint16_t WebRtcIsac_kQKltModelInitIndex[1] = { 1 }; - -/* offset to go from rounded value to quantization index */ -const short WebRtcIsac_kQKltQuantMinGain[12] = { - 3, 6, 4, 6, 6, 9, 5, 16, 11, 34, 32, 47 }; - - -const short WebRtcIsac_kQKltQuantMinShape[108] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 1, 1, 1, 1, 2, 2, 2, 3, 0, 0, - 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, - 1, 1, 1, 2, 2, 3, 0, 0, 0, 0, - 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, - 2, 4, 3, 5, 0, 0, 0, 0, 1, 1, - 1, 1, 1, 1, 2, 1, 2, 2, 3, 4, - 4, 7, 0, 0, 1, 1, 1, 1, 1, 1, - 1, 2, 3, 2, 3, 4, 4, 5, 7, 13, - 0, 1, 1, 2, 3, 2, 2, 2, 4, 4, - 5, 6, 7, 11, 9, 13, 12, 26 }; - -/* maximum quantization index */ -const uint16_t WebRtcIsac_kQKltMaxIndGain[12] = { - 6, 12, 8, 14, 10, 19, 12, 31, 22, 56, 52, 138 }; - -const uint16_t WebRtcIsac_kQKltMaxIndShape[108] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 2, 2, 2, 2, 4, 4, 5, 6, 0, 0, - 0, 0, 1, 0, 0, 0, 0, 1, 2, 2, - 2, 2, 3, 4, 5, 7, 0, 0, 0, 0, - 2, 0, 2, 2, 2, 2, 3, 2, 2, 4, - 4, 6, 6, 9, 0, 0, 0, 0, 2, 2, - 2, 2, 2, 2, 3, 2, 4, 4, 7, 7, - 9, 13, 0, 0, 2, 2, 2, 2, 2, 2, - 3, 4, 5, 4, 6, 8, 8, 10, 16, 25, - 0, 2, 2, 4, 5, 4, 4, 4, 7, 8, - 9, 10, 13, 19, 17, 23, 25, 49 }; - -/* index offset */ -const uint16_t WebRtcIsac_kQKltOffsetGain[12] = { - 0, 7, 20, 29, 44, 55, 75, 88, 120, 143, 200, 253 }; - -const uint16_t WebRtcIsac_kQKltOffsetShape[108] = { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 11, 14, 17, 20, 23, 28, 33, 39, 46, 47, - 48, 49, 50, 52, 53, 54, 55, 56, 58, 61, - 64, 67, 70, 74, 79, 85, 93, 94, 95, 96, - 97, 100, 101, 104, 107, 110, 113, 117, 120, 123, - 128, 133, 140, 147, 157, 158, 159, 160, 161, 164, - 167, 170, 173, 176, 179, 183, 186, 191, 196, 204, - 212, 222, 236, 237, 238, 241, 244, 247, 250, 253, - 256, 260, 265, 271, 276, 283, 292, 301, 312, 329, - 355, 356, 359, 362, 367, 373, 378, 383, 388, 396, - 405, 415, 426, 440, 460, 478, 502, 528 }; - -/* initial cdf index for KLT coefficients */ -const uint16_t WebRtcIsac_kQKltInitIndexGain[12] = { - 3, 6, 4, 7, 5, 10, 6, 16, 11, 28, 26, 69}; - -const uint16_t WebRtcIsac_kQKltInitIndexShape[108] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 1, 1, 1, 1, 2, 2, 3, 3, 0, 0, - 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, - 1, 1, 2, 2, 3, 4, 0, 0, 0, 0, - 1, 0, 1, 1, 1, 1, 2, 1, 1, 2, - 2, 3, 3, 5, 0, 0, 0, 0, 1, 1, - 1, 1, 1, 1, 2, 1, 2, 2, 4, 4, - 5, 7, 0, 0, 1, 1, 1, 1, 1, 1, - 2, 2, 3, 2, 3, 4, 4, 5, 8, 13, - 0, 1, 1, 2, 3, 2, 2, 2, 4, 4, - 5, 5, 7, 10, 9, 12, 13, 25 }; - - -/* quantizer representation levels */ -const double WebRtcIsac_kQKltLevelsGain[392] = { - -2.78127126, -1.76745590, -0.77913790, -0.00437329, 0.79961206, - 1.81775776, 2.81389782, -5.78753143, -4.88384084, -3.89320940, - -2.88133610, -1.92859977, -0.86347396, 0.02003888, 0.86140400, - 1.89667156, 2.97134967, 3.98781964, 4.91727277, 5.82865898, - -4.11195874, -2.80898424, -1.87547977, -0.80943825, -0.00679084, - 0.79573851, 1.83953397, 2.67586037, 3.76274082, -6.10933968, - -4.93034581, -3.89281296, -2.91530625, -1.89684163, -0.85319130, - -0.02275767, 0.86862017, 1.91578276, 2.96107339, 3.96543056, - 4.91369908, 5.91058154, 6.83848343, 8.07136925, -5.87470395, - -4.84703049, -3.84284597, -2.86168446, -1.89290192, -0.82798145, - -0.00080013, 0.82594974, 1.85754329, 2.88351798, 3.96172628, - -8.85684885, -7.87387461, -6.97811862, -5.93256270, -4.94301439, - -3.95513701, -2.96041544, -1.94031192, -0.87961478, -0.00456201, - 0.89911505, 1.91723376, 2.94011511, 3.93302540, 4.97990967, - 5.93133404, 7.02181199, 7.92407762, 8.80155440, 10.04665814, - -4.82396678, -3.85612158, -2.89482244, -1.89558408, -0.90036978, - -0.00677823, 0.90607989, 1.90937981, 2.91175777, 3.91637730, - 4.97565723, 5.84771228, 7.11145863, -16.07879840, -15.03776309, - -13.93905670, -12.95671800, -11.89171202, -10.95820934, -9.95923714, - -8.94357334, -7.99068299, -6.97481009, -5.94826231, -4.96673988, - -3.97490466, -2.97846970, -1.95130435, -0.94215262, -0.01444043, - 0.96770704, 1.95848598, 2.94107862, 3.95666119, 4.97253085, - 5.97191122, 6.93277360, 7.96608727, 8.87958779, 10.00264269, - 10.86560820, 12.07449071, 13.04491775, 13.97507061, 14.91845261, - -10.85696295, -9.83365357, -9.01245635, -7.95915145, -6.95625003, - -5.95362618, -4.93468444, -3.98760978, -2.95044407, -1.97041277, - -0.97701799, -0.00840234, 0.97834289, 1.98361415, 2.97802439, - 3.96415871, 4.95369042, 5.94101770, 6.92756798, 7.94063998, - 8.85951828, 9.97077022, 11.00068503, -33.92030406, -32.81426422, - -32.00000000, -31.13243639, -30.11886909, -29.06017570, -28.12598824, - -27.22045482, -25.81215858, -25.07849962, -23.93018013, -23.02097643, - -21.89529725, -20.99091085, -19.98889048, -18.94327044, -17.96562071, - -16.96126218, -15.95054062, -14.98516200, -13.97101012, -13.02106500, - -11.98438006, -11.03216748, -9.95930286, -8.97043946, -7.98085082, - -6.98360995, -5.98998802, -4.98668173, -4.00032906, -3.00420619, - -1.98701132, -0.99324682, -0.00609324, 0.98297834, 1.99483076, - 3.00305044, 3.97142097, 4.97525759, 5.98612258, 6.97448236, - 7.97575900, 9.01086211, 9.98665542, 11.00541438, 11.98078628, - 12.92352471, 14.06849675, 14.99949430, 15.94904834, 16.97440321, - 18.04040916, 18.88987609, 20.05312391, 21.00000000, 21.79443341, - -31.98578825, -31.00000000, -29.89060567, -28.98555686, -27.97114102, - -26.84935410, -26.02402230, -24.94195278, -23.92336849, -22.95552382, - -21.97932836, -20.96055470, -19.99649553, -19.03436122, -17.96706525, - -17.01139515, -16.01363516, -14.99154248, -14.00298333, -12.99630613, - -11.99955519, -10.99000421, -10.00819092, -8.99763648, -7.98431793, - -7.01769025, -5.99604690, -4.99980697, -3.99334671, -3.01748192, - -2.02051217, -1.00848371, -0.01942358, 1.00477757, 1.95477872, - 2.98593031, 3.98779079, 4.96862849, 6.02694771, 6.93983733, - 7.89874717, 8.99615862, 10.02367921, 10.96293452, 11.84351528, - 12.92207187, 13.85122329, 15.05146877, 15.99371264, 17.00000000, - 18.00000000, 19.00000000, 19.82763573, -47.00000000, -46.00000000, - -44.87138498, -44.00000000, -43.00000000, -42.00000000, -41.00000000, - -39.88966612, -38.98913239, -37.80306486, -37.23584325, -35.94200288, - -34.99881301, -34.11361858, -33.06507360, -32.13129135, -30.90891364, - -29.81511907, -28.99250380, -28.04535391, -26.99767800, -26.04418164, - -24.95687851, -24.04865595, -23.03392645, -21.89366707, -20.93517364, - -19.99388660, -18.91620943, -18.03749683, -16.99532379, -15.98683813, - -15.06421479, -13.99359211, -12.99714098, -11.97022520, -10.98500279, - -9.98834422, -8.95729330, -8.01232284, -7.00253661, -5.99681626, - -5.01207817, -3.95914904, -3.01232178, -1.96615919, -0.97687670, - 0.01228030, 0.98412288, 2.01753544, 3.00580570, 3.97783510, - 4.98846894, 6.01321400, 7.00867732, 8.00416375, 9.01771966, - 9.98637729, 10.98255180, 11.99194163, 13.01807333, 14.00999545, - 15.00118556, 16.00089224, 17.00584148, 17.98251763, 18.99942091, - 19.96917690, 20.97839265, 21.98207297, 23.00171271, 23.99930737, - 24.99746061, 26.00936304, 26.98240132, 28.01126868, 29.01395915, - 29.98153507, 31.01376711, 31.99876818, 33.00475317, 33.99753994, - 34.99493913, 35.98933585, 36.95620160, 37.98428461, 38.99317544, - 40.01832073, 40.98048133, 41.95999283, 42.98232091, 43.96523612, - 44.99574268, 45.99524194, 47.05464025, 48.03821548, 48.99354366, - 49.96400411, 50.98017973, 51.95184408, 52.96291806, 54.00194392, - 54.96603783, 55.95623778, 57.03076595, 58.05889901, 58.99081551, - 59.97928121, 61.05071612, 62.03971580, 63.01286038, 64.01290338, - 65.02074503, 65.99454594, 67.00399425, 67.96571257, 68.95305727, - 69.92030664, 70.95594862, 71.98088567, 73.04764124, 74.00285480, - 75.02696330, 75.89837673, 76.93459997, 78.16266309, 78.83317543, - 80.00000000, 80.87251574, 82.09803524, 83.10671664, 84.00000000, - 84.77023523, 86.00000000, 87.00000000, 87.92946897, 88.69159118, - 90.00000000, 90.90535270 }; - -const double WebRtcIsac_kQKltLevelsShape[578] = { - 0.00032397, 0.00008053, -0.00061202, -0.00012620, 0.00030437, - 0.00054764, -0.00027902, 0.00069360, 0.00029449, -0.80219239, - 0.00091089, -0.74514927, -0.00094283, 0.64030631, -0.60509119, - 0.00035575, 0.61851665, -0.62129957, 0.00375219, 0.60054900, - -0.61554359, 0.00054977, 0.63362016, -1.73118727, -0.65422341, - 0.00524568, 0.66165298, 1.76785515, -1.83182018, -0.65997434, - -0.00011887, 0.67524299, 1.79933938, -1.76344480, -0.72547708, - -0.00133017, 0.73104704, 1.75305377, 2.85164534, -2.80423916, - -1.71959639, -0.75419722, -0.00329945, 0.77196760, 1.72211069, - 2.87339653, 0.00031089, -0.00015311, 0.00018201, -0.00035035, - -0.77357251, 0.00154647, -0.00047625, -0.00045299, 0.00086590, - 0.00044762, -0.83383829, 0.00024787, -0.68526258, -0.00122472, - 0.64643255, -0.60904942, -0.00448987, 0.62309184, -0.59626442, - -0.00574132, 0.62296546, -0.63222115, 0.00013441, 0.63609545, - -0.66911055, -0.00369971, 0.66346095, 2.07281301, -1.77184694, - -0.67640425, -0.00010145, 0.64818392, 1.74948973, -1.69420224, - -0.71943894, -0.00004680, 0.75303493, 1.81075983, 2.80610041, - -2.80005755, -1.79866753, -0.77409777, -0.00084220, 0.80141293, - 1.78291081, 2.73954236, 3.82994169, 0.00015140, -0.00012766, - -0.00034241, -0.00119125, -0.76113497, 0.00069246, 0.76722027, - 0.00132862, -0.69107530, 0.00010656, 0.77061578, -0.78012970, - 0.00095947, 0.77828502, -0.64787758, 0.00217168, 0.63050167, - -0.58601125, 0.00306596, 0.59466308, -0.58603410, 0.00059779, - 0.64257970, 1.76512766, -0.61193600, -0.00259517, 0.59767574, - -0.61026273, 0.00315811, 0.61725479, -1.69169719, -0.65816029, - 0.00067575, 0.65576890, 2.00000000, -1.72689193, -0.69780808, - -0.00040990, 0.70668487, 1.74198458, -3.79028154, -3.00000000, - -1.73194459, -0.70179341, -0.00106695, 0.71302629, 1.76849782, - -2.89332364, -1.78585007, -0.78731491, -0.00132610, 0.79692976, - 1.75247009, 2.97828682, -5.26238694, -3.69559829, -2.87286122, - -1.84908818, -0.84434577, -0.01167975, 0.84641753, 1.84087672, - 2.87628156, 3.83556679, -0.00190204, 0.00092642, 0.00354385, - -0.00012982, -0.67742785, 0.00229509, 0.64935672, -0.58444751, - 0.00470733, 0.57299534, -0.58456202, -0.00097715, 0.64593607, - -0.64060330, -0.00638534, 0.59680157, -0.59287537, 0.00490772, - 0.58919707, -0.60306173, -0.00417464, 0.60562100, -1.75218757, - -0.63018569, -0.00225922, 0.63863300, -0.63949939, -0.00126421, - 0.64268914, -1.75851182, -0.68318060, 0.00510418, 0.69049211, - 1.88178506, -1.71136148, -0.72710534, -0.00815559, 0.73412917, - 1.79996711, -2.77111145, -1.73940498, -0.78212945, 0.01074476, - 0.77688916, 1.76873972, 2.87281379, 3.77554698, -3.75832725, - -2.95463235, -1.80451491, -0.80017226, 0.00149902, 0.80729206, - 1.78265046, 2.89391793, -3.78236148, -2.83640598, -1.82532067, - -0.88844327, -0.00620952, 0.88208030, 1.85757631, 2.81712391, - 3.88430176, 5.16179367, -7.00000000, -5.93805408, -4.87172597, - -3.87524433, -2.89399744, -1.92359563, -0.92136341, -0.00172725, - 0.93087018, 1.90528280, 2.89809686, 3.88085708, 4.89147740, - 5.89078692, -0.00239502, 0.00312564, -1.00000000, 0.00178325, - 1.00000000, -0.62198029, 0.00143254, 0.65344051, -0.59851220, - -0.00676987, 0.61510140, -0.58894151, 0.00385055, 0.59794203, - -0.59808568, -0.00038214, 0.57625703, -0.63009713, -0.01107985, - 0.61278758, -0.64206758, -0.00154369, 0.65480598, 1.80604162, - -1.80909286, -0.67810514, 0.00205762, 0.68571097, 1.79453891, - -3.22682422, -1.73808453, -0.71870305, -0.00738594, 0.71486172, - 1.73005326, -1.66891897, -0.73689615, -0.00616203, 0.74262409, - 1.73807899, -2.92417482, -1.73866741, -0.78133871, 0.00764425, - 0.80027264, 1.78668732, 2.74992588, -4.00000000, -2.75578740, - -1.83697516, -0.83117035, -0.00355191, 0.83527172, 1.82814700, - 2.77377675, 3.80718693, -3.81667698, -2.83575471, -1.83372350, - -0.86579471, 0.00547578, 0.87582281, 1.82858793, 2.87265007, - 3.91405377, -4.87521600, -3.78999094, -2.86437014, -1.86964365, - -0.90618018, 0.00128243, 0.91497811, 1.87374952, 2.83199819, - 3.91519130, 4.76632822, -6.68713448, -6.01252467, -4.94587936, - -3.88795368, -2.91299088, -1.92592211, -0.95504570, -0.00089980, - 0.94565200, 1.93239633, 2.91832808, 3.91363475, 4.88920034, - 5.96471415, 6.83905252, 7.86195009, 8.81571018,-12.96141759, - -11.73039516,-10.96459719, -9.97382433, -9.04414433, -7.89460619, - -6.96628608, -5.93236595, -4.93337924, -3.95479990, -2.96451499, - -1.96635876, -0.97271229, -0.00402238, 0.98343930, 1.98348291, - 2.96641164, 3.95456471, 4.95517089, 5.98975714, 6.90322073, - 7.90468849, 8.85639467, 9.97255498, 10.79006309, 11.81988596, - 0.04950500, -1.00000000, -0.01226628, 1.00000000, -0.59479469, - -0.10438305, 0.59822144, -2.00000000, -0.67109149, -0.09256692, - 0.65171621, 2.00000000, -3.00000000, -1.68391999, -0.76681039, - -0.03354151, 0.71509146, 1.77615472, -2.00000000, -0.68661511, - -0.02497881, 0.66478398, 2.00000000, -2.00000000, -0.67032784, - -0.00920582, 0.64892756, 2.00000000, -2.00000000, -0.68561894, - 0.03641869, 0.73021611, 1.68293863, -4.00000000, -2.72024184, - -1.80096059, -0.81696185, 0.03604685, 0.79232033, 1.70070730, - 3.00000000, -4.00000000, -2.71795670, -1.80482986, -0.86001162, - 0.03764903, 0.87723968, 1.79970771, 2.72685932, 3.67589143, - -5.00000000, -4.00000000, -2.85492548, -1.78996365, -0.83250358, - -0.01376828, 0.84195506, 1.78161105, 2.76754458, 4.00000000, - -6.00000000, -5.00000000, -3.82268811, -2.77563624, -1.82608163, - -0.86486114, -0.02671886, 0.86693165, 1.88422879, 2.86248347, - 3.95632216, -7.00000000, -6.00000000, -5.00000000, -3.77533988, - -2.86391432, -1.87052039, -0.90513658, 0.06271236, 0.91083620, - 1.85734756, 2.86031688, 3.82019418, 4.94420394, 6.00000000, - -11.00000000,-10.00000000, -9.00000000, -8.00000000, -6.91952415, - -6.00000000, -4.92044374, -3.87845165, -2.87392362, -1.88413020, - -0.91915740, 0.00318517, 0.91602800, 1.89664838, 2.88925058, - 3.84123856, 4.78988651, 5.94526812, 6.81953917, 8.00000000, - -9.00000000, -8.00000000, -7.03319143, -5.94530963, -4.86669720, - -3.92438007, -2.88620396, -1.92848070, -0.94365985, 0.01671855, - 0.97349410, 1.93419878, 2.89740109, 3.89662823, 4.83235583, - 5.88106535, 6.80328232, 8.00000000,-13.00000000,-12.00000000, - -11.00000000,-10.00000000, -9.00000000, -7.86033489, -6.83344055, - -5.89844215, -4.90811454, -3.94841298, -2.95820490, -1.98627966, - -0.99161468, -0.02286136, 0.96055651, 1.95052433, 2.93969396, - 3.94304346, 4.88522624, 5.87434241, 6.78309433, 7.87244101, - 9.00000000, 10.00000000,-12.09117356,-11.00000000,-10.00000000, - -8.84766108, -7.86934236, -6.98544896, -5.94233429, -4.95583292, - -3.95575986, -2.97085529, -1.98955811, -0.99359873, -0.00485413, - 0.98298870, 1.98093258, 2.96430203, 3.95540216, 4.96915010, - 5.96775124, 6.99236918, 7.96503302, 8.99864542, 9.85857723, - 10.96541926, 11.91647197, 12.71060069,-26.00000000,-25.00000000, - -24.00585596,-23.11642573,-22.14271284,-20.89800711,-19.87815799, - -19.05036354,-17.88555651,-16.86471209,-15.97711073,-14.94012359, - -14.02661226,-12.98243228,-11.97489256,-10.97402777, -9.96425624, - -9.01085220, -7.97372506, -6.98795002, -5.97271328, -5.00191694, - -3.98055849, -2.98458048, -1.99470442, -0.99656768, -0.00825666, - 1.00272004, 1.99922218, 2.99357669, 4.01407905, 5.01003897, - 5.98115528, 7.00018958, 8.00338125, 8.98981046, 9.98990318, - 10.96341479, 11.96866930, 12.99175139, 13.94580443, 14.95745083, - 15.98992869, 16.97484646, 17.99630043, 18.93396897, 19.88347741, - 20.96532482, 21.92191032, 23.22314702 }; - - -/* cdf tables for quantizer indices */ -const uint16_t WebRtcIsac_kQKltCdfGain[404] = { - 0, 13, 301, 3730, 61784, 65167, 65489, 65535, 0, 17, - 142, 314, 929, 2466, 7678, 56450, 63463, 64740, 65204, 65426, - 65527, 65535, 0, 8, 100, 724, 6301, 60105, 65125, 65510, - 65531, 65535, 0, 13, 117, 368, 1068, 3010, 11928, 53603, - 61177, 63404, 64505, 65108, 65422, 65502, 65531, 65535, 0, 4, - 17, 96, 410, 1859, 12125, 54361, 64103, 65305, 65497, 65535, - 0, 4, 88, 230, 469, 950, 1746, 3228, 6092, 16592, - 44756, 56848, 61256, 63308, 64325, 64920, 65309, 65460, 65502, - 65522, 65535, 0, 88, 352, 1675, 6339, 20749, 46686, 59284, 63525, - 64949, 65359, 65502, 65527, 65535, 0, 13, 38, 63, 117, - 234, 381, 641, 929, 1407, 2043, 2809, 4032, 5753, 8792, - 14407, 24308, 38941, 48947, 55403, 59293, 61411, 62688, 63630, - 64329, 64840, 65188, 65376, 65472, 65506, 65527, 65531, 65535, - 0, 8, 29, 75, 222, 615, 1327, 2801, 5623, 9931, 16094, 24966, - 34419, 43458, 50676, 56186, 60055, 62500, 63936, 64765, 65225, - 65435, 65514, 65535, 0, 8, 13, 15, 17, 21, 33, 59, - 71, 92, 151, 243, 360, 456, 674, 934, 1223, 1583, - 1989, 2504, 3031, 3617, 4354, 5154, 6163, 7411, 8780, 10747, - 12874, 15591, 18974, 23027, 27436, 32020, 36948, 41830, 46205, - 49797, 53042, 56094, 58418, 60360, 61763, 62818, 63559, 64103, - 64509, 64798, 65045, 65162, 65288, 65363, 65447, 65506, 65522, - 65531, 65533, 65535, 0, 4, 6, 25, 38, 71, 138, 264, 519, 808, - 1227, 1825, 2516, 3408, 4279, 5560, 7092, 9197, 11420, 14108, - 16947, 20300, 23926, 27459, 31164, 34827, 38575, 42178, 45540, - 48747, 51444, 54090, 56426, 58460, 60080, 61595, 62734, 63668, - 64275, 64673, 64936, 65112, 65217, 65334, 65426, 65464, 65477, - 65489, 65518, 65527, 65529, 65531, 65533, 65535, 0, 2, 4, 8, 10, - 12, 14, 16, 21, 33, 50, 71, 84, 92, 105, 138, 180, 255, 318, - 377, 435, 473, 511, 590, 682, 758, 913, 1097, 1256, 1449, 1671, - 1884, 2169, 2445, 2772, 3157, 3563, 3944, 4375, 4848, 5334, 5820, - 6448, 7101, 7716, 8378, 9102, 9956, 10752, 11648, 12707, 13670, - 14758, 15910, 17187, 18472, 19627, 20649, 21951, 23169, 24283, - 25552, 26862, 28227, 29391, 30764, 31882, 33213, 34432, 35600, - 36910, 38116, 39464, 40729, 41872, 43144, 44371, 45514, 46762, - 47813, 48968, 50069, 51032, 51974, 52908, 53737, 54603, 55445, - 56282, 56990, 57572, 58191, 58840, 59410, 59887, 60264, 60607, - 60946, 61269, 61516, 61771, 61960, 62198, 62408, 62558, 62776, - 62985, 63207, 63408, 63546, 63739, 63906, 64070, 64237, 64371, - 64551, 64677, 64836, 64999, 65095, 65213, 65284, 65338, 65380, - 65426, 65447, 65472, 65485, 65487, 65489, 65502, 65510, 65512, - 65514, 65516, 65518, 65522, 65531, 65533, 65535 }; - - -const uint16_t WebRtcIsac_kQKltCdfShape[686] = { - 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, - 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 4, - 65535, 0, 8, 65514, 65535, 0, 29, 65481, 65535, 0, - 121, 65439, 65535, 0, 239, 65284, 65535, 0, 8, 779, - 64999, 65527, 65535, 0, 8, 888, 64693, 65522, 65535, 0, - 29, 2604, 62843, 65497, 65531, 65535, 0, 25, 176, 4576, - 61164, 65275, 65527, 65535, 0, 65535, 0, 65535, 0, 65535, - 0, 65535, 0, 4, 65535, 0, 65535, 0, 65535, 0, - 65535, 0, 65535, 0, 4, 65535, 0, 33, 65502, 65535, - 0, 54, 65481, 65535, 0, 251, 65309, 65535, 0, 611, - 65074, 65535, 0, 1273, 64292, 65527, 65535, 0, 4, 1809, - 63940, 65518, 65535, 0, 88, 4392, 60603, 65426, 65531, 65535, - 0, 25, 419, 7046, 57756, 64961, 65514, 65531, 65535, 0, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 4, 65531, - 65535, 0, 65535, 0, 8, 65531, 65535, 0, 4, 65527, - 65535, 0, 17, 65510, 65535, 0, 42, 65481, 65535, 0, - 197, 65342, 65531, 65535, 0, 385, 65154, 65535, 0, 1005, - 64522, 65535, 0, 8, 1985, 63469, 65533, 65535, 0, 38, - 3119, 61884, 65514, 65535, 0, 4, 6, 67, 4961, 60804, - 65472, 65535, 0, 17, 565, 9182, 56538, 65087, 65514, 65535, - 0, 8, 63, 327, 2118, 14490, 52774, 63839, 65376, 65522, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, - 17, 65522, 65535, 0, 59, 65489, 65535, 0, 50, 65522, - 65535, 0, 54, 65489, 65535, 0, 310, 65179, 65535, 0, - 615, 64836, 65535, 0, 4, 1503, 63965, 65535, 0, 2780, - 63383, 65535, 0, 21, 3919, 61051, 65527, 65535, 0, 84, - 6674, 59929, 65435, 65535, 0, 4, 255, 7976, 55784, 65150, - 65518, 65531, 65535, 0, 4, 8, 582, 10726, 53465, 64949, - 65518, 65535, 0, 29, 339, 3006, 17555, 49517, 62956, 65200, - 65497, 65531, 65535, 0, 2, 33, 138, 565, 2324, 7670, - 22089, 45966, 58949, 63479, 64966, 65380, 65518, 65535, 0, 65535, - 0, 65535, 0, 2, 65533, 65535, 0, 46, 65514, 65535, - 0, 414, 65091, 65535, 0, 540, 64911, 65535, 0, 419, - 65162, 65535, 0, 976, 64790, 65535, 0, 2977, 62495, 65531, - 65535, 0, 4, 3852, 61034, 65527, 65535, 0, 4, 29, - 6021, 60243, 65468, 65535, 0, 84, 6711, 58066, 65418, 65535, - 0, 13, 281, 9550, 54917, 65125, 65506, 65535, 0, 2, - 63, 984, 12108, 52644, 64342, 65435, 65527, 65535, 0, 29, - 251, 2014, 14871, 47553, 62881, 65229, 65518, 65535, 0, 13, - 142, 749, 4220, 18497, 45200, 60913, 64823, 65426, 65527, 65535, - 0, 13, 71, 264, 1176, 3789, 10500, 24480, 43488, 56324, - 62315, 64493, 65242, 65464, 65514, 65522, 65531, 65535, 0, 4, - 13, 38, 109, 205, 448, 850, 1708, 3429, 6276, 11371, - 19221, 29734, 40955, 49391, 55411, 59460, 62102, 63793, 64656, - 65150, 65401, 65485, 65522, 65531, 65535, 0, 65535, 0, 2, 65533, - 65535, 0, 1160, 65476, 65535, 0, 2, 6640, 64763, 65533, - 65535, 0, 2, 38, 9923, 61009, 65527, 65535, 0, 2, - 4949, 63092, 65533, 65535, 0, 2, 3090, 63398, 65533, 65535, - 0, 2, 2520, 58744, 65510, 65535, 0, 2, 13, 544, - 8784, 51403, 65148, 65533, 65535, 0, 2, 25, 1017, 10412, - 43550, 63651, 65489, 65527, 65535, 0, 2, 4, 29, 783, - 13377, 52462, 64524, 65495, 65533, 65535, 0, 2, 4, 6, - 100, 1817, 18451, 52590, 63559, 65376, 65531, 65535, 0, 2, - 4, 6, 46, 385, 2562, 11225, 37416, 60488, 65026, 65487, - 65529, 65533, 65535, 0, 2, 4, 6, 8, 10, 12, - 42, 222, 971, 5221, 19811, 45048, 60312, 64486, 65294, 65474, - 65525, 65529, 65533, 65535, 0, 2, 4, 8, 71, 167, - 666, 2533, 7875, 19622, 38082, 54359, 62108, 64633, 65290, 65495, - 65529, 65533, 65535, 0, 2, 4, 6, 8, 10, 13, - 109, 586, 1930, 4949, 11600, 22641, 36125, 48312, 56899, 61495, - 63927, 64932, 65389, 65489, 65518, 65531, 65533, 65535, 0, 4, - 6, 8, 67, 209, 712, 1838, 4195, 8432, 14432, 22834, - 31723, 40523, 48139, 53929, 57865, 60657, 62403, 63584, 64363, - 64907, 65167, 65372, 65472, 65514, 65535, 0, 2, 4, 13, 25, - 42, 46, 50, 75, 113, 147, 281, 448, 657, 909, - 1185, 1591, 1976, 2600, 3676, 5317, 7398, 9914, 12941, 16169, - 19477, 22885, 26464, 29851, 33360, 37228, 41139, 44802, 48654, - 52058, 55181, 57676, 59581, 61022, 62190, 63107, 63676, 64199, - 64547, 64924, 65158, 65313, 65430, 65481, 65518, 65535 }; - - -/* pointers to cdf tables for quantizer indices */ -const uint16_t *WebRtcIsac_kQKltCdfPtrGain[12] = { - WebRtcIsac_kQKltCdfGain +0 +0, WebRtcIsac_kQKltCdfGain +0 +8, - WebRtcIsac_kQKltCdfGain +0 +22, WebRtcIsac_kQKltCdfGain +0 +32, - WebRtcIsac_kQKltCdfGain +0 +48, WebRtcIsac_kQKltCdfGain +0 +60, - WebRtcIsac_kQKltCdfGain +0 +81, WebRtcIsac_kQKltCdfGain +0 +95, - WebRtcIsac_kQKltCdfGain +0 +128, WebRtcIsac_kQKltCdfGain +0 +152, - WebRtcIsac_kQKltCdfGain +0 +210, WebRtcIsac_kQKltCdfGain +0 +264 }; - -const uint16_t *WebRtcIsac_kQKltCdfPtrShape[108] = { - WebRtcIsac_kQKltCdfShape +0 +0, WebRtcIsac_kQKltCdfShape +0 +2, - WebRtcIsac_kQKltCdfShape +0 +4, WebRtcIsac_kQKltCdfShape +0 +6, - WebRtcIsac_kQKltCdfShape +0 +8, WebRtcIsac_kQKltCdfShape +0 +10, - WebRtcIsac_kQKltCdfShape +0 +12, WebRtcIsac_kQKltCdfShape +0 +14, - WebRtcIsac_kQKltCdfShape +0 +16, WebRtcIsac_kQKltCdfShape +0 +18, - WebRtcIsac_kQKltCdfShape +0 +21, WebRtcIsac_kQKltCdfShape +0 +25, - WebRtcIsac_kQKltCdfShape +0 +29, WebRtcIsac_kQKltCdfShape +0 +33, - WebRtcIsac_kQKltCdfShape +0 +37, WebRtcIsac_kQKltCdfShape +0 +43, - WebRtcIsac_kQKltCdfShape +0 +49, WebRtcIsac_kQKltCdfShape +0 +56, - WebRtcIsac_kQKltCdfShape +0 +64, WebRtcIsac_kQKltCdfShape +0 +66, - WebRtcIsac_kQKltCdfShape +0 +68, WebRtcIsac_kQKltCdfShape +0 +70, - WebRtcIsac_kQKltCdfShape +0 +72, WebRtcIsac_kQKltCdfShape +0 +75, - WebRtcIsac_kQKltCdfShape +0 +77, WebRtcIsac_kQKltCdfShape +0 +79, - WebRtcIsac_kQKltCdfShape +0 +81, WebRtcIsac_kQKltCdfShape +0 +83, - WebRtcIsac_kQKltCdfShape +0 +86, WebRtcIsac_kQKltCdfShape +0 +90, - WebRtcIsac_kQKltCdfShape +0 +94, WebRtcIsac_kQKltCdfShape +0 +98, - WebRtcIsac_kQKltCdfShape +0 +102, WebRtcIsac_kQKltCdfShape +0 +107, - WebRtcIsac_kQKltCdfShape +0 +113, WebRtcIsac_kQKltCdfShape +0 +120, - WebRtcIsac_kQKltCdfShape +0 +129, WebRtcIsac_kQKltCdfShape +0 +131, - WebRtcIsac_kQKltCdfShape +0 +133, WebRtcIsac_kQKltCdfShape +0 +135, - WebRtcIsac_kQKltCdfShape +0 +137, WebRtcIsac_kQKltCdfShape +0 +141, - WebRtcIsac_kQKltCdfShape +0 +143, WebRtcIsac_kQKltCdfShape +0 +147, - WebRtcIsac_kQKltCdfShape +0 +151, WebRtcIsac_kQKltCdfShape +0 +155, - WebRtcIsac_kQKltCdfShape +0 +159, WebRtcIsac_kQKltCdfShape +0 +164, - WebRtcIsac_kQKltCdfShape +0 +168, WebRtcIsac_kQKltCdfShape +0 +172, - WebRtcIsac_kQKltCdfShape +0 +178, WebRtcIsac_kQKltCdfShape +0 +184, - WebRtcIsac_kQKltCdfShape +0 +192, WebRtcIsac_kQKltCdfShape +0 +200, - WebRtcIsac_kQKltCdfShape +0 +211, WebRtcIsac_kQKltCdfShape +0 +213, - WebRtcIsac_kQKltCdfShape +0 +215, WebRtcIsac_kQKltCdfShape +0 +217, - WebRtcIsac_kQKltCdfShape +0 +219, WebRtcIsac_kQKltCdfShape +0 +223, - WebRtcIsac_kQKltCdfShape +0 +227, WebRtcIsac_kQKltCdfShape +0 +231, - WebRtcIsac_kQKltCdfShape +0 +235, WebRtcIsac_kQKltCdfShape +0 +239, - WebRtcIsac_kQKltCdfShape +0 +243, WebRtcIsac_kQKltCdfShape +0 +248, - WebRtcIsac_kQKltCdfShape +0 +252, WebRtcIsac_kQKltCdfShape +0 +258, - WebRtcIsac_kQKltCdfShape +0 +264, WebRtcIsac_kQKltCdfShape +0 +273, - WebRtcIsac_kQKltCdfShape +0 +282, WebRtcIsac_kQKltCdfShape +0 +293, - WebRtcIsac_kQKltCdfShape +0 +308, WebRtcIsac_kQKltCdfShape +0 +310, - WebRtcIsac_kQKltCdfShape +0 +312, WebRtcIsac_kQKltCdfShape +0 +316, - WebRtcIsac_kQKltCdfShape +0 +320, WebRtcIsac_kQKltCdfShape +0 +324, - WebRtcIsac_kQKltCdfShape +0 +328, WebRtcIsac_kQKltCdfShape +0 +332, - WebRtcIsac_kQKltCdfShape +0 +336, WebRtcIsac_kQKltCdfShape +0 +341, - WebRtcIsac_kQKltCdfShape +0 +347, WebRtcIsac_kQKltCdfShape +0 +354, - WebRtcIsac_kQKltCdfShape +0 +360, WebRtcIsac_kQKltCdfShape +0 +368, - WebRtcIsac_kQKltCdfShape +0 +378, WebRtcIsac_kQKltCdfShape +0 +388, - WebRtcIsac_kQKltCdfShape +0 +400, WebRtcIsac_kQKltCdfShape +0 +418, - WebRtcIsac_kQKltCdfShape +0 +445, WebRtcIsac_kQKltCdfShape +0 +447, - WebRtcIsac_kQKltCdfShape +0 +451, WebRtcIsac_kQKltCdfShape +0 +455, - WebRtcIsac_kQKltCdfShape +0 +461, WebRtcIsac_kQKltCdfShape +0 +468, - WebRtcIsac_kQKltCdfShape +0 +474, WebRtcIsac_kQKltCdfShape +0 +480, - WebRtcIsac_kQKltCdfShape +0 +486, WebRtcIsac_kQKltCdfShape +0 +495, - WebRtcIsac_kQKltCdfShape +0 +505, WebRtcIsac_kQKltCdfShape +0 +516, - WebRtcIsac_kQKltCdfShape +0 +528, WebRtcIsac_kQKltCdfShape +0 +543, - WebRtcIsac_kQKltCdfShape +0 +564, WebRtcIsac_kQKltCdfShape +0 +583, - WebRtcIsac_kQKltCdfShape +0 +608, WebRtcIsac_kQKltCdfShape +0 +635 }; - - -/* left KLT transforms */ -const double WebRtcIsac_kKltT1Gain[4] = { - -0.79742827, 0.60341375, 0.60341375, 0.79742827 }; - -const double WebRtcIsac_kKltT1Shape[324] = { - 0.00159597, 0.00049320, 0.00513821, 0.00021066, 0.01338581, - -0.00422367, -0.00272072, 0.00935107, 0.02047622, 0.02691189, - 0.00478236, 0.03969702, 0.00886698, 0.04877604, -0.10898362, - -0.05930891, -0.03415047, 0.98889721, 0.00293558, -0.00035282, - 0.01156321, -0.00195341, -0.00937631, 0.01052213, -0.02551163, - 0.01644059, 0.03189927, 0.07754773, -0.08742313, -0.03026338, - 0.05136248, -0.14395974, 0.17725040, 0.22664856, 0.93380230, - 0.07076411, 0.00557890, -0.00222834, 0.01377569, 0.01466808, - 0.02847361, -0.00603178, 0.02382480, -0.01210452, 0.03797267, - -0.02371480, 0.11260335, -0.07366682, 0.00453436, -0.04136941, - -0.07912843, -0.95031418, 0.25295337, -0.05302216, -0.00617554, - -0.00044040, -0.00653778, 0.01097838, 0.01529174, 0.01374431, - -0.00748512, -0.00020034, 0.02432713, 0.11101570, -0.08556891, - 0.09282249, -0.01029446, 0.67556443, -0.67454300, 0.06910063, - 0.20866865, -0.10318050, 0.00932175, 0.00524058, 0.00803610, - -0.00594676, -0.01082578, 0.01069906, 0.00546768, 0.01565291, - 0.06816200, 0.10201227, 0.16812734, 0.22984074, 0.58213170, - -0.54138651, -0.51379962, 0.06847390, -0.01920037, -0.04592324, - -0.00467394, 0.00328858, 0.00377424, -0.00987448, 0.08222096, - -0.00377301, 0.04551941, -0.02592517, 0.16317082, 0.13077530, - 0.22702921, -0.31215289, -0.69645962, -0.38047101, -0.39339411, - 0.11124777, 0.02508035, -0.00708074, 0.00400344, 0.00040331, - 0.01142402, 0.01725406, 0.01635170, 0.14285366, 0.03949233, - -0.05905676, 0.05877154, -0.17497577, -0.32479440, 0.80754464, - -0.38085603, -0.17055430, -0.03168622, -0.07531451, 0.02942002, - -0.02148095, -0.00754114, -0.00322372, 0.00567812, -0.01701521, - -0.12358320, 0.11473564, 0.09070136, 0.06533068, -0.22560802, - 0.19209022, 0.81605094, 0.36592275, -0.09919829, 0.16667122, - 0.16300725, 0.04803807, 0.06739263, -0.00156752, -0.01685302, - -0.00905240, -0.02297836, -0.00469939, 0.06310613, -0.16391930, - 0.10919511, 0.12529293, 0.85581322, -0.32145522, 0.24539076, - 0.07181839, 0.07289591, 0.14066759, 0.10406711, 0.05815518, - 0.01072680, -0.00759339, 0.00053486, -0.00044865, 0.03407361, - 0.01645348, 0.08758579, 0.27722240, 0.53665485, -0.74853376, - -0.01118192, -0.19805430, 0.06130619, -0.09675299, 0.08978480, - 0.03405255, -0.00706867, 0.05102045, 0.03250746, 0.01849966, - -0.01216314, -0.01184187, -0.01579288, 0.00114807, 0.11376166, - 0.88342114, -0.36425379, 0.13863190, 0.12524180, -0.13553892, - 0.04715856, -0.12341103, 0.04531568, 0.01899360, -0.00206897, - 0.00567768, -0.01444163, 0.00411946, -0.00855896, 0.00381663, - -0.01664861, -0.05534280, 0.21328278, 0.20161162, 0.72360394, - 0.59130708, -0.08043791, 0.08757349, -0.13893918, -0.05147377, - 0.02680690, -0.01144070, 0.00625162, -0.00634215, -0.01248947, - -0.00329455, -0.00609625, -0.00136305, -0.05097048, -0.01029851, - 0.25065384, -0.16856837, -0.07123372, 0.15992623, -0.39487617, - -0.79972301, 0.18118185, -0.04826639, -0.01805578, -0.02927253, - -0.16400618, 0.07472763, 0.10376449, 0.01705406, 0.01065801, - -0.01500498, 0.02039914, 0.37776349, -0.84484186, 0.10434286, - 0.15616990, 0.13474456, -0.00906238, -0.25238368, -0.03820885, - -0.10650905, -0.03880833, -0.03660028, -0.09640894, 0.00583314, - 0.01922097, 0.01489911, -0.02431117, -0.09372217, 0.39404721, - -0.84786223, -0.31277121, 0.03193850, 0.01974060, 0.01887901, - 0.00337911, -0.11359599, -0.02792521, -0.03220184, -0.01533311, - 0.00015962, -0.04225043, -0.00933965, 0.00675311, 0.00206060, - 0.15926771, 0.40199829, -0.80792558, -0.35591604, -0.17169764, - 0.02830436, 0.02459982, -0.03438589, 0.00718705, -0.01798329, - -0.01594508, -0.00702430, -0.00952419, -0.00962701, -0.01307212, - -0.01749740, 0.01299602, 0.00587270, -0.36103108, -0.82039266, - -0.43092844, -0.08500097, -0.04361674, -0.00333482, 0.01250434, - -0.02538295, -0.00921797, 0.01645071, -0.01400872, 0.00317607, - 0.00003277, -0.01617646, -0.00616863, -0.00882661, 0.00466157, - 0.00353237, 0.91803104, -0.39503305, -0.02048964, 0.00060125, - 0.01980634, 0.00300109, 0.00313880, 0.00657337, 0.00715163, - 0.00000261, 0.00854276, -0.00154825, -0.00516128, 0.00909527, - 0.00095609, 0.00701196, -0.00221867, -0.00156741 }; - -/* right KLT transforms */ -const double WebRtcIsac_kKltT2Gain[36] = { - 0.14572837, -0.45446306, 0.61990621, -0.52197033, 0.32145074, - -0.11026900, -0.20698282, 0.48962182, -0.27127933, -0.33627476, - 0.65094037, -0.32715751, 0.40262573, -0.47844405, -0.33876075, - 0.44130653, 0.37383966, -0.39964662, -0.51730480, 0.06611973, - 0.49030187, 0.47512886, -0.02141226, -0.51129451, -0.58578569, - -0.39132064, -0.13187771, 0.15649421, 0.40735596, 0.54396897, - 0.40381276, 0.40904942, 0.41179766, 0.41167576, 0.40840251, - 0.40468132 }; - -const double WebRtcIsac_kKltT2Shape[36] = { - 0.13427386, -0.35132558, 0.52506528, -0.59419077, 0.45075085, - -0.16312057, 0.29857439, -0.58660147, 0.34265431, 0.20879510, - -0.56063262, 0.30238345, 0.43308283, -0.41186999, -0.35288681, - 0.42768996, 0.36094634, -0.45284910, -0.47116680, 0.02893449, - 0.54326135, 0.45249040, -0.06264420, -0.52283830, 0.57137758, - 0.44298139, 0.12617554, -0.20819946, -0.42324603, -0.48876443, - 0.39597050, 0.40713935, 0.41389880, 0.41512486, 0.41130400, - 0.40575001 }; - -/* means of log gains and LAR coefficients*/ -const double WebRtcIsac_kLpcMeansGain[12] = { - -6.86881911, -5.35075273, -6.86792680, -5.36200897, -6.86401538, - -5.36921533, -6.86802969, -5.36893966, -6.86538097, -5.36315063, - -6.85535304, -5.35155315 }; - -const double WebRtcIsac_kLpcMeansShape[108] = { - -0.91232981, 0.26258634, -0.33716701, 0.08477430, -0.03378426, - 0.14423909, 0.07036185, 0.06155019, 0.01490385, 0.04138740, - 0.01427317, 0.01288970, 0.83872106, 0.25750199, 0.07988929, - -0.01957923, 0.00831390, 0.01770300, -0.90957164, 0.25732216, - -0.33385344, 0.08735740, -0.03715332, 0.14584917, 0.06998990, - 0.06131968, 0.01504379, 0.04067339, 0.01428039, 0.01406460, - 0.83846243, 0.26169862, 0.08109025, -0.01767055, 0.00970539, - 0.01954310, -0.90490803, 0.24656405, -0.33578607, 0.08843286, - -0.03749139, 0.14443959, 0.07214669, 0.06170993, 0.01449947, - 0.04134309, 0.01314762, 0.01413471, 0.83895203, 0.26748062, - 0.08197507, -0.01781298, 0.00885967, 0.01922394, -0.90922472, - 0.24495889, -0.33921540, 0.08877169, -0.03581332, 0.14199172, - 0.07444032, 0.06185940, 0.01502054, 0.04185113, 0.01276579, - 0.01355457, 0.83645358, 0.26631720, 0.08119697, -0.01835449, - 0.00788512, 0.01846446, -0.90482253, 0.24658310, -0.34019734, - 0.08281090, -0.03486038, 0.14359248, 0.07401336, 0.06001471, - 0.01528421, 0.04254560, 0.01321472, 0.01240799, 0.83857127, - 0.26281654, 0.08174380, -0.02099842, 0.00755176, 0.01699448, - -0.90132307, 0.25174308, -0.33838268, 0.07883863, -0.02877906, - 0.14105407, 0.07220290, 0.06000352, 0.01684879, 0.04226844, - 0.01331331, 0.01269244, 0.83832138, 0.25467485, 0.08118028, - -0.02120528, 0.00747832, 0.01567212 }; diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.h deleted file mode 100644 index 56ff22c06ca3..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.h +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_tables.h - * - * header file for coding tables for the LPC coefficients - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_TABLES_H_ - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -#define KLT_STEPSIZE 1.00000000 -#define KLT_NUM_AVG_GAIN 0 -#define KLT_NUM_AVG_SHAPE 0 -#define KLT_NUM_MODELS 3 -#define LPC_GAIN_SCALE 4.000f -#define LPC_LOBAND_SCALE 2.100f -#define LPC_LOBAND_ORDER ORDERLO -#define LPC_HIBAND_SCALE 0.450f -#define LPC_HIBAND_ORDER ORDERHI -#define LPC_GAIN_ORDER 2 - -#define LPC_SHAPE_ORDER (LPC_LOBAND_ORDER + LPC_HIBAND_ORDER) - -#define KLT_ORDER_GAIN (LPC_GAIN_ORDER * SUBFRAMES) -#define KLT_ORDER_SHAPE (LPC_SHAPE_ORDER * SUBFRAMES) - -/* cdf array for model indicator */ -extern const uint16_t WebRtcIsac_kQKltModelCdf[KLT_NUM_MODELS + 1]; - -/* pointer to cdf array for model indicator */ -extern const uint16_t* WebRtcIsac_kQKltModelCdfPtr[1]; - -/* initial cdf index for decoder of model indicator */ -extern const uint16_t WebRtcIsac_kQKltModelInitIndex[1]; - -/* offset to go from rounded value to quantization index */ -extern const short WebRtcIsac_kQKltQuantMinGain[12]; - -extern const short WebRtcIsac_kQKltQuantMinShape[108]; - -/* maximum quantization index */ -extern const uint16_t WebRtcIsac_kQKltMaxIndGain[12]; - -extern const uint16_t WebRtcIsac_kQKltMaxIndShape[108]; - -/* index offset */ -extern const uint16_t WebRtcIsac_kQKltOffsetGain[12]; - -extern const uint16_t WebRtcIsac_kQKltOffsetShape[108]; - -/* initial cdf index for KLT coefficients */ -extern const uint16_t WebRtcIsac_kQKltInitIndexGain[12]; - -extern const uint16_t WebRtcIsac_kQKltInitIndexShape[108]; - -/* quantizer representation levels */ -extern const double WebRtcIsac_kQKltLevelsGain[392]; - -extern const double WebRtcIsac_kQKltLevelsShape[578]; - -/* cdf tables for quantizer indices */ -extern const uint16_t WebRtcIsac_kQKltCdfGain[404]; - -extern const uint16_t WebRtcIsac_kQKltCdfShape[686]; - -/* pointers to cdf tables for quantizer indices */ -extern const uint16_t* WebRtcIsac_kQKltCdfPtrGain[12]; - -extern const uint16_t* WebRtcIsac_kQKltCdfPtrShape[108]; - -/* left KLT transforms */ -extern const double WebRtcIsac_kKltT1Gain[4]; - -extern const double WebRtcIsac_kKltT1Shape[324]; - -/* right KLT transforms */ -extern const double WebRtcIsac_kKltT2Gain[36]; - -extern const double WebRtcIsac_kKltT2Shape[36]; - -/* means of log gains and LAR coefficients */ -extern const double WebRtcIsac_kLpcMeansGain[12]; - -extern const double WebRtcIsac_kLpcMeansShape[108]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_TABLES_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c deleted file mode 100644 index 080432c3a529..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/* header file for coding tables for the pitch filter side-info in the entropy coder */ -/********************* Pitch Filter Gain Coefficient Tables ************************/ -/* cdf for quantized pitch filter gains */ -const uint16_t WebRtcIsac_kQPitchGainCdf[255] = { - 0, 2, 4, 6, 64, 901, 903, 905, 16954, 16956, - 16961, 17360, 17362, 17364, 17366, 17368, 17370, 17372, 17374, 17411, - 17514, 17516, 17583, 18790, 18796, 18802, 20760, 20777, 20782, 21722, - 21724, 21728, 21738, 21740, 21742, 21744, 21746, 21748, 22224, 22227, - 22230, 23214, 23229, 23239, 25086, 25108, 25120, 26088, 26094, 26098, - 26175, 26177, 26179, 26181, 26183, 26185, 26484, 26507, 26522, 27705, - 27731, 27750, 29767, 29799, 29817, 30866, 30883, 30885, 31025, 31029, - 31031, 31033, 31035, 31037, 31114, 31126, 31134, 32687, 32722, 32767, - 35718, 35742, 35757, 36943, 36952, 36954, 37115, 37128, 37130, 37132, - 37134, 37136, 37143, 37145, 37152, 38843, 38863, 38897, 47458, 47467, - 47474, 49040, 49061, 49063, 49145, 49157, 49159, 49161, 49163, 49165, - 49167, 49169, 49171, 49757, 49770, 49782, 61333, 61344, 61346, 62860, - 62883, 62885, 62887, 62889, 62891, 62893, 62895, 62897, 62899, 62901, - 62903, 62905, 62907, 62909, 65496, 65498, 65500, 65521, 65523, 65525, - 65527, 65529, 65531, 65533, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535}; - -/* index limits and ranges */ -const int16_t WebRtcIsac_kIndexLowerLimitGain[3] = { - -7, -2, -1}; - -const int16_t WebRtcIsac_kIndexUpperLimitGain[3] = { - 0, 3, 1}; - -const uint16_t WebRtcIsac_kIndexMultsGain[2] = { - 18, 3}; - -/* size of cdf table */ -const uint16_t WebRtcIsac_kQCdfTableSizeGain[1] = { - 256}; - -///////////////////////////FIXED POINT -/* mean values of pitch filter gains in FIXED point */ -const int16_t WebRtcIsac_kQMeanGain1Q12[144] = { - 843, 1092, 1336, 1222, 1405, 1656, 1500, 1815, 1843, 1838, 1839, 1843, 1843, 1843, 1843, 1843, - 1843, 1843, 814, 846, 1092, 1013, 1174, 1383, 1391, 1511, 1584, 1734, 1753, 1843, 1843, 1843, - 1843, 1843, 1843, 1843, 524, 689, 777, 845, 947, 1069, 1090, 1263, 1380, 1447, 1559, 1676, - 1645, 1749, 1843, 1843, 1843, 1843, 81, 477, 563, 611, 706, 806, 849, 1012, 1192, 1128, - 1330, 1489, 1425, 1576, 1826, 1741, 1843, 1843, 0, 290, 305, 356, 488, 575, 602, 741, - 890, 835, 1079, 1196, 1182, 1376, 1519, 1506, 1680, 1843, 0, 47, 97, 69, 289, 381, - 385, 474, 617, 664, 803, 1079, 935, 1160, 1269, 1265, 1506, 1741, 0, 0, 0, 0, - 112, 120, 190, 283, 442, 343, 526, 809, 684, 935, 1134, 1020, 1265, 1506, 0, 0, - 0, 0, 0, 0, 0, 111, 256, 87, 373, 597, 430, 684, 935, 770, 1020, 1265}; - -const int16_t WebRtcIsac_kQMeanGain2Q12[144] = { - 1760, 1525, 1285, 1747, 1671, 1393, 1843, 1826, 1555, 1843, 1784, 1606, 1843, 1843, 1711, 1843, - 1843, 1814, 1389, 1275, 1040, 1564, 1414, 1252, 1610, 1495, 1343, 1753, 1592, 1405, 1804, 1720, - 1475, 1843, 1814, 1581, 1208, 1061, 856, 1349, 1148, 994, 1390, 1253, 1111, 1495, 1343, 1178, - 1770, 1465, 1234, 1814, 1581, 1342, 1040, 793, 713, 1053, 895, 737, 1128, 1003, 861, 1277, - 1094, 981, 1475, 1192, 1019, 1581, 1342, 1098, 855, 570, 483, 833, 648, 540, 948, 744, - 572, 1009, 844, 636, 1234, 934, 685, 1342, 1217, 984, 537, 318, 124, 603, 423, 350, - 687, 479, 322, 791, 581, 430, 987, 671, 488, 1098, 849, 597, 283, 27, 0, 397, - 222, 38, 513, 271, 124, 624, 325, 157, 737, 484, 233, 849, 597, 343, 27, 0, - 0, 141, 0, 0, 256, 69, 0, 370, 87, 0, 484, 229, 0, 597, 343, 87}; - -const int16_t WebRtcIsac_kQMeanGain3Q12[144] = { - 1843, 1843, 1711, 1843, 1818, 1606, 1843, 1827, 1511, 1814, 1639, 1393, 1760, 1525, 1285, 1656, - 1419, 1176, 1835, 1718, 1475, 1841, 1650, 1387, 1648, 1498, 1287, 1600, 1411, 1176, 1522, 1299, - 1040, 1419, 1176, 928, 1773, 1461, 1128, 1532, 1355, 1202, 1429, 1260, 1115, 1398, 1151, 1025, - 1172, 1080, 790, 1176, 928, 677, 1475, 1147, 1019, 1276, 1096, 922, 1214, 1010, 901, 1057, - 893, 800, 1040, 796, 734, 928, 677, 424, 1137, 897, 753, 1120, 830, 710, 875, 751, - 601, 795, 642, 583, 790, 544, 475, 677, 474, 140, 987, 750, 482, 697, 573, 450, - 691, 487, 303, 661, 394, 332, 537, 303, 220, 424, 168, 0, 737, 484, 229, 624, - 348, 153, 441, 261, 136, 397, 166, 51, 283, 27, 0, 168, 0, 0, 484, 229, - 0, 370, 57, 0, 256, 43, 0, 141, 0, 0, 27, 0, 0, 0, 0, 0}; - - -const int16_t WebRtcIsac_kQMeanGain4Q12[144] = { - 1843, 1843, 1843, 1843, 1841, 1843, 1500, 1821, 1843, 1222, 1434, 1656, 843, 1092, 1336, 504, - 757, 1007, 1843, 1843, 1843, 1838, 1791, 1843, 1265, 1505, 1599, 965, 1219, 1425, 730, 821, - 1092, 249, 504, 757, 1783, 1819, 1843, 1351, 1567, 1727, 1096, 1268, 1409, 805, 961, 1131, - 444, 670, 843, 0, 249, 504, 1425, 1655, 1743, 1096, 1324, 1448, 822, 1019, 1199, 490, - 704, 867, 81, 450, 555, 0, 0, 249, 1247, 1428, 1530, 881, 1073, 1283, 610, 759, - 939, 278, 464, 645, 0, 200, 270, 0, 0, 0, 935, 1163, 1410, 528, 790, 1068, - 377, 499, 717, 173, 240, 274, 0, 43, 62, 0, 0, 0, 684, 935, 1182, 343, - 551, 735, 161, 262, 423, 0, 55, 27, 0, 0, 0, 0, 0, 0, 430, 684, - 935, 87, 377, 597, 0, 46, 256, 0, 0, 0, 0, 0, 0, 0, 0, 0}; diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h deleted file mode 100644 index 145fd4e6aaec..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_gain_tables.h - * - * This file contains tables for the pitch filter side-info in the entropy - * coder. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_GAIN_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_GAIN_TABLES_H_ - -#include - -/* header file for coding tables for the pitch filter side-info in the entropy - * coder */ -/********************* Pitch Filter Gain Coefficient Tables - * ************************/ -/* cdf for quantized pitch filter gains */ -extern const uint16_t WebRtcIsac_kQPitchGainCdf[255]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsac_kIndexLowerLimitGain[3]; - -extern const int16_t WebRtcIsac_kIndexUpperLimitGain[3]; -extern const uint16_t WebRtcIsac_kIndexMultsGain[2]; - -/* mean values of pitch filter gains */ -//(Y) -extern const int16_t WebRtcIsac_kQMeanGain1Q12[144]; -extern const int16_t WebRtcIsac_kQMeanGain2Q12[144]; -extern const int16_t WebRtcIsac_kQMeanGain3Q12[144]; -extern const int16_t WebRtcIsac_kQMeanGain4Q12[144]; -//(Y) - -/* size of cdf table */ -extern const uint16_t WebRtcIsac_kQCdfTableSizeGain[1]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_GAIN_TABLES_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c deleted file mode 100644 index 57d12021acb4..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/* header file for coding tables for the pitch filter side-info in the entropy coder */ -/********************* Pitch Filter Gain Coefficient Tables ************************/ - -/* tables for use with small pitch gain */ - -/* cdf for quantized pitch filter lags */ -const uint16_t WebRtcIsac_kQPitchLagCdf1Lo[127] = { - 0, 134, 336, 549, 778, 998, 1264, 1512, 1777, 2070, - 2423, 2794, 3051, 3361, 3708, 3979, 4315, 4610, 4933, 5269, - 5575, 5896, 6155, 6480, 6816, 7129, 7477, 7764, 8061, 8358, - 8718, 9020, 9390, 9783, 10177, 10543, 10885, 11342, 11795, 12213, - 12680, 13096, 13524, 13919, 14436, 14903, 15349, 15795, 16267, 16734, - 17266, 17697, 18130, 18632, 19080, 19447, 19884, 20315, 20735, 21288, - 21764, 22264, 22723, 23193, 23680, 24111, 24557, 25022, 25537, 26082, - 26543, 27090, 27620, 28139, 28652, 29149, 29634, 30175, 30692, 31273, - 31866, 32506, 33059, 33650, 34296, 34955, 35629, 36295, 36967, 37726, - 38559, 39458, 40364, 41293, 42256, 43215, 44231, 45253, 46274, 47359, - 48482, 49678, 50810, 51853, 53016, 54148, 55235, 56263, 57282, 58363, - 59288, 60179, 61076, 61806, 62474, 63129, 63656, 64160, 64533, 64856, - 65152, 65535, 65535, 65535, 65535, 65535, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf2Lo[20] = { - 0, 429, 3558, 5861, 8558, 11639, 15210, 19502, 24773, 31983, - 42602, 48567, 52601, 55676, 58160, 60172, 61889, 63235, 65383, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf3Lo[2] = { - 0, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf4Lo[10] = { - 0, 2966, 6368, 11182, 19431, 37793, 48532, 55353, 60626, 65535}; - -const uint16_t *WebRtcIsac_kQPitchLagCdfPtrLo[4] = {WebRtcIsac_kQPitchLagCdf1Lo, WebRtcIsac_kQPitchLagCdf2Lo, WebRtcIsac_kQPitchLagCdf3Lo, WebRtcIsac_kQPitchLagCdf4Lo}; - -/* size of first cdf table */ -const uint16_t WebRtcIsac_kQPitchLagCdfSizeLo[1] = {128}; - -/* index limits and ranges */ -const int16_t WebRtcIsac_kQIndexLowerLimitLagLo[4] = { --140, -9, 0, -4}; - -const int16_t WebRtcIsac_kQIndexUpperLimitLagLo[4] = { --20, 9, 0, 4}; - -/* initial index for arithmetic decoder */ -const uint16_t WebRtcIsac_kQInitIndexLagLo[3] = { - 10, 1, 5}; - -/* mean values of pitch filter lags */ -const double WebRtcIsac_kQMeanLag2Lo[19] = { --17.21385070, -15.82678944, -14.07123081, -12.03003877, -10.01311864, -8.00794627, -5.91162987, -3.89231876, -1.90220980, -0.01879275, - 1.89144232, 3.88123171, 5.92146992, 7.96435361, 9.98923648, 11.98266347, 13.96101002, 15.74855713, 17.10976611}; - -const double WebRtcIsac_kQMeanLag3Lo[1] = { - 0.00000000}; - -const double WebRtcIsac_kQMeanLag4Lo[9] = { --7.76246496, -5.92083980, -3.94095226, -1.89502305, 0.03724681, 1.93054221, 3.96443467, 5.91726366, 7.78434291}; - -const double WebRtcIsac_kQPitchLagStepsizeLo = 2.000000; - - -/* tables for use with medium pitch gain */ - -/* cdf for quantized pitch filter lags */ -const uint16_t WebRtcIsac_kQPitchLagCdf1Mid[255] = { - 0, 28, 61, 88, 121, 149, 233, 331, 475, 559, - 624, 661, 689, 712, 745, 791, 815, 843, 866, 922, - 959, 1024, 1061, 1117, 1178, 1238, 1280, 1350, 1453, 1513, - 1564, 1625, 1671, 1741, 1788, 1904, 2072, 2421, 2626, 2770, - 2840, 2900, 2942, 3012, 3068, 3115, 3147, 3194, 3254, 3319, - 3366, 3520, 3678, 3780, 3850, 3911, 3957, 4032, 4106, 4185, - 4292, 4474, 4683, 4842, 5019, 5191, 5321, 5428, 5540, 5675, - 5763, 5847, 5959, 6127, 6304, 6564, 6839, 7090, 7263, 7421, - 7556, 7728, 7872, 7984, 8142, 8361, 8580, 8743, 8938, 9227, - 9409, 9539, 9674, 9795, 9930, 10060, 10177, 10382, 10614, 10861, - 11038, 11271, 11415, 11629, 11792, 12044, 12193, 12416, 12574, 12821, - 13007, 13235, 13445, 13654, 13901, 14134, 14488, 15000, 15703, 16285, - 16504, 16797, 17086, 17328, 17579, 17807, 17998, 18268, 18538, 18836, - 19087, 19274, 19474, 19716, 19935, 20270, 20833, 21303, 21532, 21741, - 21978, 22207, 22523, 22770, 23054, 23613, 23943, 24204, 24399, 24651, - 24832, 25074, 25270, 25549, 25759, 26015, 26150, 26424, 26713, 27048, - 27342, 27504, 27681, 27854, 28021, 28207, 28412, 28664, 28859, 29064, - 29278, 29548, 29748, 30107, 30377, 30656, 30856, 31164, 31452, 31755, - 32011, 32328, 32626, 32919, 33319, 33789, 34329, 34925, 35396, 35973, - 36443, 36964, 37551, 38156, 38724, 39357, 40023, 40908, 41587, 42602, - 43924, 45037, 45810, 46597, 47421, 48291, 49092, 50051, 51448, 52719, - 53440, 54241, 54944, 55977, 56676, 57299, 57872, 58389, 59059, 59688, - 60237, 60782, 61094, 61573, 61890, 62290, 62658, 63030, 63217, 63454, - 63622, 63882, 64003, 64273, 64427, 64529, 64581, 64697, 64758, 64902, - 65414, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf2Mid[36] = { - 0, 71, 335, 581, 836, 1039, 1323, 1795, 2258, 2608, - 3005, 3591, 4243, 5344, 7163, 10583, 16848, 28078, 49448, 57007, - 60357, 61850, 62837, 63437, 63872, 64188, 64377, 64614, 64774, 64949, - 65039, 65115, 65223, 65360, 65474, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf3Mid[2] = { - 0, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf4Mid[20] = { - 0, 28, 246, 459, 667, 1045, 1523, 2337, 4337, 11347, - 44231, 56709, 60781, 62243, 63161, 63969, 64608, 65062, 65502, 65535}; - -const uint16_t *WebRtcIsac_kQPitchLagCdfPtrMid[4] = {WebRtcIsac_kQPitchLagCdf1Mid, WebRtcIsac_kQPitchLagCdf2Mid, WebRtcIsac_kQPitchLagCdf3Mid, WebRtcIsac_kQPitchLagCdf4Mid}; - -/* size of first cdf table */ -const uint16_t WebRtcIsac_kQPitchLagCdfSizeMid[1] = {256}; - -/* index limits and ranges */ -const int16_t WebRtcIsac_kQIndexLowerLimitLagMid[4] = { --280, -17, 0, -9}; - -const int16_t WebRtcIsac_kQIndexUpperLimitLagMid[4] = { --40, 17, 0, 9}; - -/* initial index for arithmetic decoder */ -const uint16_t WebRtcIsac_kQInitIndexLagMid[3] = { - 18, 1, 10}; - -/* mean values of pitch filter lags */ -const double WebRtcIsac_kQMeanLag2Mid[35] = { --16.89183900, -15.86949778, -15.05476653, -14.00664348, -13.02793036, -12.07324237, -11.00542532, -10.11250602, -8.90792971, -8.02474753, --7.00426767, -5.94055287, -4.98251338, -3.91053158, -2.98820425, -1.93524245, -0.92978085, -0.01722509, 0.91317387, 1.92973955, - 2.96908851, 3.93728974, 4.96308471, 5.92244151, 7.08673497, 8.00993708, 9.04656316, 9.98538742, 10.97851694, 11.94772884, - 13.02426166, 14.00039951, 15.01347042, 15.80758023, 16.94086895}; - -const double WebRtcIsac_kQMeanLag3Mid[1] = { - 0.00000000}; - -const double WebRtcIsac_kQMeanLag4Mid[19] = { --8.60409403, -7.89198395, -7.03450280, -5.86260421, -4.93822322, -3.93078706, -2.91302322, -1.91824007, -0.87003282, 0.02822649, - 0.89951758, 1.87495484, 2.91802604, 3.96874074, 5.06571703, 5.93618227, 7.00520185, 7.88497726, 8.64160364}; - -const double WebRtcIsac_kQPitchLagStepsizeMid = 1.000000; - - -/* tables for use with large pitch gain */ - -/* cdf for quantized pitch filter lags */ -const uint16_t WebRtcIsac_kQPitchLagCdf1Hi[511] = { - 0, 7, 18, 33, 69, 105, 156, 228, 315, 612, - 680, 691, 709, 724, 735, 738, 742, 746, 749, 753, - 756, 760, 764, 774, 782, 785, 789, 796, 800, 803, - 807, 814, 818, 822, 829, 832, 847, 854, 858, 869, - 876, 883, 898, 908, 934, 977, 1010, 1050, 1060, 1064, - 1075, 1078, 1086, 1089, 1093, 1104, 1111, 1122, 1133, 1136, - 1151, 1162, 1183, 1209, 1252, 1281, 1339, 1364, 1386, 1401, - 1411, 1415, 1426, 1430, 1433, 1440, 1448, 1455, 1462, 1477, - 1487, 1495, 1502, 1506, 1509, 1516, 1524, 1531, 1535, 1542, - 1553, 1556, 1578, 1589, 1611, 1625, 1639, 1643, 1654, 1665, - 1672, 1687, 1694, 1705, 1708, 1719, 1730, 1744, 1752, 1759, - 1791, 1795, 1820, 1867, 1886, 1915, 1936, 1943, 1965, 1987, - 2041, 2099, 2161, 2175, 2200, 2211, 2226, 2233, 2244, 2251, - 2266, 2280, 2287, 2298, 2309, 2316, 2331, 2342, 2356, 2378, - 2403, 2418, 2447, 2497, 2544, 2602, 2863, 2895, 2903, 2935, - 2950, 2971, 3004, 3011, 3018, 3029, 3040, 3062, 3087, 3127, - 3152, 3170, 3199, 3243, 3293, 3322, 3340, 3377, 3402, 3427, - 3474, 3518, 3543, 3579, 3601, 3637, 3659, 3706, 3731, 3760, - 3818, 3847, 3869, 3901, 3920, 3952, 4068, 4169, 4220, 4271, - 4524, 4571, 4604, 4632, 4672, 4730, 4777, 4806, 4857, 4904, - 4951, 5002, 5031, 5060, 5107, 5150, 5212, 5266, 5331, 5382, - 5432, 5490, 5544, 5610, 5700, 5762, 5812, 5874, 5972, 6022, - 6091, 6163, 6232, 6305, 6402, 6540, 6685, 6880, 7090, 7271, - 7379, 7452, 7542, 7625, 7687, 7770, 7843, 7911, 7966, 8024, - 8096, 8190, 8252, 8320, 8411, 8501, 8585, 8639, 8751, 8842, - 8918, 8986, 9066, 9127, 9203, 9269, 9345, 9406, 9464, 9536, - 9612, 9667, 9735, 9844, 9931, 10036, 10119, 10199, 10260, 10358, - 10441, 10514, 10666, 10734, 10872, 10951, 11053, 11125, 11223, 11324, - 11516, 11664, 11737, 11816, 11892, 12008, 12120, 12200, 12280, 12392, - 12490, 12576, 12685, 12812, 12917, 13003, 13108, 13210, 13300, 13384, - 13470, 13579, 13673, 13771, 13879, 13999, 14136, 14201, 14368, 14614, - 14759, 14867, 14958, 15030, 15121, 15189, 15280, 15385, 15461, 15555, - 15653, 15768, 15884, 15971, 16069, 16145, 16210, 16279, 16380, 16463, - 16539, 16615, 16688, 16818, 16919, 17017, 18041, 18338, 18523, 18649, - 18790, 18917, 19047, 19167, 19315, 19460, 19601, 19731, 19858, 20068, - 20173, 20318, 20466, 20625, 20741, 20911, 21045, 21201, 21396, 21588, - 21816, 22022, 22305, 22547, 22786, 23072, 23322, 23600, 23879, 24168, - 24433, 24769, 25120, 25511, 25895, 26289, 26792, 27219, 27683, 28077, - 28566, 29094, 29546, 29977, 30491, 30991, 31573, 32105, 32594, 33173, - 33788, 34497, 35181, 35833, 36488, 37255, 37921, 38645, 39275, 39894, - 40505, 41167, 41790, 42431, 43096, 43723, 44385, 45134, 45858, 46607, - 47349, 48091, 48768, 49405, 49955, 50555, 51167, 51985, 52611, 53078, - 53494, 53965, 54435, 54996, 55601, 56125, 56563, 56838, 57244, 57566, - 57967, 58297, 58771, 59093, 59419, 59647, 59886, 60143, 60461, 60693, - 60917, 61170, 61416, 61634, 61891, 62122, 62310, 62455, 62632, 62839, - 63103, 63436, 63639, 63805, 63906, 64015, 64192, 64355, 64475, 64558, - 64663, 64742, 64811, 64865, 64916, 64956, 64981, 65025, 65068, 65115, - 65195, 65314, 65419, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf2Hi[68] = { - 0, 7, 11, 22, 37, 52, 56, 59, 81, 85, - 89, 96, 115, 130, 137, 152, 170, 181, 193, 200, - 207, 233, 237, 259, 289, 318, 363, 433, 592, 992, - 1607, 3062, 6149, 12206, 25522, 48368, 58223, 61918, 63640, 64584, - 64943, 65098, 65206, 65268, 65294, 65335, 65350, 65372, 65387, 65402, - 65413, 65420, 65428, 65435, 65439, 65450, 65454, 65468, 65472, 65476, - 65483, 65491, 65498, 65505, 65516, 65520, 65528, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf3Hi[2] = { - 0, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf4Hi[35] = { - 0, 7, 19, 30, 41, 48, 63, 74, 82, 96, - 122, 152, 215, 330, 701, 2611, 10931, 48106, 61177, 64341, - 65112, 65238, 65309, 65338, 65364, 65379, 65401, 65427, 65453, 65465, - 65476, 65490, 65509, 65528, 65535}; - -const uint16_t *WebRtcIsac_kQPitchLagCdfPtrHi[4] = {WebRtcIsac_kQPitchLagCdf1Hi, WebRtcIsac_kQPitchLagCdf2Hi, WebRtcIsac_kQPitchLagCdf3Hi, WebRtcIsac_kQPitchLagCdf4Hi}; - -/* size of first cdf table */ -const uint16_t WebRtcIsac_kQPitchLagCdfSizeHi[1] = {512}; - -/* index limits and ranges */ -const int16_t WebRtcIsac_kQindexLowerLimitLagHi[4] = { --552, -34, 0, -16}; - -const int16_t WebRtcIsac_kQindexUpperLimitLagHi[4] = { --80, 32, 0, 17}; - -/* initial index for arithmetic decoder */ -const uint16_t WebRtcIsac_kQInitIndexLagHi[3] = { - 34, 1, 18}; - -/* mean values of pitch filter lags */ -const double WebRtcIsac_kQMeanLag2Hi[67] = { --17.07263295, -16.50000000, -15.83966081, -15.55613708, -14.96948007, -14.50000000, -14.00000000, -13.48377986, -13.00000000, -12.50000000, --11.93199636, -11.44530414, -11.04197641, -10.39910301, -10.15202337, -9.51322461, -8.93357741, -8.46456632, -8.10270672, -7.53751847, --6.98686404, -6.50000000, -6.08463150, -5.46872991, -5.00864717, -4.50163760, -4.01382410, -3.43856708, -2.96898001, -2.46554810, --1.96861004, -1.47106701, -0.97197237, -0.46561654, -0.00531409, 0.45767857, 0.96777907, 1.47507903, 1.97740425, 2.46695420, - 3.00695774, 3.47167185, 4.02712538, 4.49280007, 5.01087640, 5.48191963, 6.04916550, 6.51511058, 6.97297819, 7.46565499, - 8.01489405, 8.39912001, 8.91819757, 9.50000000, 10.11654065, 10.50000000, 11.03712583, 11.50000000, 12.00000000, 12.38964346, - 12.89466127, 13.43657881, 13.96013840, 14.46279912, 15.00000000, 15.39412269, 15.96662441}; - -const double WebRtcIsac_kQMeanLag3Hi[1] = { - 0.00000000}; - -const double WebRtcIsac_kQMeanLag4Hi[34] = { --7.98331221, -7.47988769, -7.03626557, -6.52708003, -6.06982173, -5.51856292, -5.05827033, -4.45909878, -3.99125864, -3.45308135, --3.02328139, -2.47297273, -1.94341995, -1.44699056, -0.93612243, -0.43012406, 0.01120357, 0.44054812, 0.93199883, 1.45669587, - 1.97218322, 2.50187419, 2.98748690, 3.49343202, 4.01660147, 4.50984306, 5.01402683, 5.58936797, 5.91787793, 6.59998900, - 6.85034315, 7.53503316, 7.87711194, 8.53631648}; - -const double WebRtcIsac_kQPitchLagStepsizeHi = 0.500000; - -/* transform matrix */ -const double WebRtcIsac_kTransform[4][4] = { -{-0.50000000, -0.50000000, -0.50000000, -0.50000000}, -{ 0.67082039, 0.22360680, -0.22360680, -0.67082039}, -{ 0.50000000, -0.50000000, -0.50000000, 0.50000000}, -{ 0.22360680, -0.67082039, 0.67082039, -0.22360680}}; - -/* transpose transform matrix */ -const double WebRtcIsac_kTransformTranspose[4][4] = { -{-0.50000000, 0.67082039, 0.50000000, 0.22360680}, -{-0.50000000, 0.22360680, -0.50000000, -0.67082039}, -{-0.50000000, -0.22360680, -0.50000000, 0.67082039}, -{-0.50000000, -0.67082039, 0.50000000, -0.22360680}}; - diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h deleted file mode 100644 index b48e358a5a82..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_lag_tables.h - * - * This file contains tables for the pitch filter side-info in the entropy - * coder. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_LAG_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_LAG_TABLES_H_ - -#include - -/* header file for coding tables for the pitch filter side-info in the entropy - * coder */ -/********************* Pitch Filter Lag Coefficient Tables - * ************************/ - -/* tables for use with small pitch gain */ - -/* cdfs for quantized pitch lags */ -extern const uint16_t WebRtcIsac_kQPitchLagCdf1Lo[127]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf2Lo[20]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf3Lo[2]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf4Lo[10]; - -extern const uint16_t* WebRtcIsac_kQPitchLagCdfPtrLo[4]; - -/* size of first cdf table */ -extern const uint16_t WebRtcIsac_kQPitchLagCdfSizeLo[1]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsac_kQIndexLowerLimitLagLo[4]; -extern const int16_t WebRtcIsac_kQIndexUpperLimitLagLo[4]; - -/* initial index for arithmetic decoder */ -extern const uint16_t WebRtcIsac_kQInitIndexLagLo[3]; - -/* mean values of pitch filter lags */ -extern const double WebRtcIsac_kQMeanLag2Lo[19]; -extern const double WebRtcIsac_kQMeanLag3Lo[1]; -extern const double WebRtcIsac_kQMeanLag4Lo[9]; - -extern const double WebRtcIsac_kQPitchLagStepsizeLo; - -/* tables for use with medium pitch gain */ - -/* cdfs for quantized pitch lags */ -extern const uint16_t WebRtcIsac_kQPitchLagCdf1Mid[255]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf2Mid[36]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf3Mid[2]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf4Mid[20]; - -extern const uint16_t* WebRtcIsac_kQPitchLagCdfPtrMid[4]; - -/* size of first cdf table */ -extern const uint16_t WebRtcIsac_kQPitchLagCdfSizeMid[1]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsac_kQIndexLowerLimitLagMid[4]; -extern const int16_t WebRtcIsac_kQIndexUpperLimitLagMid[4]; - -/* initial index for arithmetic decoder */ -extern const uint16_t WebRtcIsac_kQInitIndexLagMid[3]; - -/* mean values of pitch filter lags */ -extern const double WebRtcIsac_kQMeanLag2Mid[35]; -extern const double WebRtcIsac_kQMeanLag3Mid[1]; -extern const double WebRtcIsac_kQMeanLag4Mid[19]; - -extern const double WebRtcIsac_kQPitchLagStepsizeMid; - -/* tables for use with large pitch gain */ - -/* cdfs for quantized pitch lags */ -extern const uint16_t WebRtcIsac_kQPitchLagCdf1Hi[511]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf2Hi[68]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf3Hi[2]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf4Hi[35]; - -extern const uint16_t* WebRtcIsac_kQPitchLagCdfPtrHi[4]; - -/* size of first cdf table */ -extern const uint16_t WebRtcIsac_kQPitchLagCdfSizeHi[1]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsac_kQindexLowerLimitLagHi[4]; -extern const int16_t WebRtcIsac_kQindexUpperLimitLagHi[4]; - -/* initial index for arithmetic decoder */ -extern const uint16_t WebRtcIsac_kQInitIndexLagHi[3]; - -/* mean values of pitch filter lags */ -extern const double WebRtcIsac_kQMeanLag2Hi[67]; -extern const double WebRtcIsac_kQMeanLag3Hi[1]; -extern const double WebRtcIsac_kQMeanLag4Hi[34]; - -extern const double WebRtcIsac_kQPitchLagStepsizeHi; - -/* transform matrix */ -extern const double WebRtcIsac_kTransform[4][4]; - -/* transpose transform matrix */ -extern const double WebRtcIsac_kTransformTranspose[4][4]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_LAG_TABLES_H_ */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c deleted file mode 100644 index 839d5d458684..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/********************* AR Coefficient Tables ************************/ -/* cdf for quantized reflection coefficient 1 */ -const uint16_t WebRtcIsac_kQArRc1Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { - 0, 2, 4, 129, 7707, 57485, 65495, 65527, 65529, 65531, - 65533, 65535}; - -/* cdf for quantized reflection coefficient 2 */ -const uint16_t WebRtcIsac_kQArRc2Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { - 0, 2, 4, 7, 531, 25298, 64525, 65526, 65529, 65531, - 65533, 65535}; - -/* cdf for quantized reflection coefficient 3 */ -const uint16_t WebRtcIsac_kQArRc3Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { - 0, 2, 4, 6, 620, 22898, 64843, 65527, 65529, 65531, - 65533, 65535}; - -/* cdf for quantized reflection coefficient 4 */ -const uint16_t WebRtcIsac_kQArRc4Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { - 0, 2, 4, 6, 35, 10034, 60733, 65506, 65529, 65531, - 65533, 65535}; - -/* cdf for quantized reflection coefficient 5 */ -const uint16_t WebRtcIsac_kQArRc5Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { - 0, 2, 4, 6, 36, 7567, 56727, 65385, 65529, 65531, - 65533, 65535}; - -/* cdf for quantized reflection coefficient 6 */ -const uint16_t WebRtcIsac_kQArRc6Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { - 0, 2, 4, 6, 14, 6579, 57360, 65409, 65529, 65531, - 65533, 65535}; - -/* representation levels for quantized reflection coefficient 1 */ -const int16_t WebRtcIsac_kQArRc1Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { - -32104, -29007, -23202, -15496, -9279, -2577, 5934, 17535, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 2 */ -const int16_t WebRtcIsac_kQArRc2Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { - -32104, -29503, -23494, -15261, -7309, -1399, 6158, 16381, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 3 */ -const int16_t WebRtcIsac_kQArRc3Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { --32104, -29503, -23157, -15186, -7347, -1359, 5829, 17535, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 4 */ -const int16_t WebRtcIsac_kQArRc4Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { --32104, -29503, -24512, -15362, -6665, -342, 6596, 14585, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 5 */ -const int16_t WebRtcIsac_kQArRc5Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { --32104, -29503, -24512, -15005, -6564, -106, 7123, 14920, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 6 */ -const int16_t WebRtcIsac_kQArRc6Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { --32104, -29503, -24512, -15096, -6656, -37, 7036, 14847, 24512, 29503, 32104 -}; - -/* quantization boundary levels for reflection coefficients */ -const int16_t WebRtcIsac_kQArBoundaryLevels[NUM_AR_RC_QUANT_BAUNDARY] = { --32768, -31441, -27566, -21458, -13612, -4663, 4663, 13612, 21458, 27566, 31441, -32767 -}; - -/* initial index for AR reflection coefficient quantizer and cdf table search */ -const uint16_t WebRtcIsac_kQArRcInitIndex[6] = { - 5, 5, 5, 5, 5, 5}; - -/* pointers to AR cdf tables */ -const uint16_t *WebRtcIsac_kQArRcCdfPtr[AR_ORDER] = { - WebRtcIsac_kQArRc1Cdf, WebRtcIsac_kQArRc2Cdf, WebRtcIsac_kQArRc3Cdf, - WebRtcIsac_kQArRc4Cdf, WebRtcIsac_kQArRc5Cdf, WebRtcIsac_kQArRc6Cdf -}; - -/* pointers to AR representation levels tables */ -const int16_t *WebRtcIsac_kQArRcLevelsPtr[AR_ORDER] = { - WebRtcIsac_kQArRc1Levels, WebRtcIsac_kQArRc2Levels, WebRtcIsac_kQArRc3Levels, - WebRtcIsac_kQArRc4Levels, WebRtcIsac_kQArRc5Levels, WebRtcIsac_kQArRc6Levels -}; - - -/******************** GAIN Coefficient Tables ***********************/ -/* cdf for Gain coefficient */ -const uint16_t WebRtcIsac_kQGainCdf[19] = { - 0, 2, 4, 6, 8, 10, 12, 14, 16, 1172, - 11119, 29411, 51699, 64445, 65527, 65529, 65531, 65533, 65535}; - -/* representation levels for quantized squared Gain coefficient */ -const int32_t WebRtcIsac_kQGain2Levels[18] = { -// 17, 28, 46, 76, 128, 215, 364, 709, 1268, 1960, 3405, 6078, 11286, 17827, 51918, 134498, 487432, 2048000}; - 128, 128, 128, 128, 128, 215, 364, 709, 1268, 1960, 3405, 6078, 11286, 17827, 51918, 134498, 487432, 2048000}; -/* quantization boundary levels for squared Gain coefficient */ -const int32_t WebRtcIsac_kQGain2BoundaryLevels[19] = { -0, 21, 35, 59, 99, 166, 280, 475, 815, 1414, 2495, 4505, 8397, 16405, 34431, 81359, 240497, 921600, 0x7FFFFFFF}; - -/* pointers to Gain cdf table */ -const uint16_t *WebRtcIsac_kQGainCdf_ptr[1] = {WebRtcIsac_kQGainCdf}; - -/* Gain initial index for gain quantizer and cdf table search */ -const uint16_t WebRtcIsac_kQGainInitIndex[1] = {11}; - -/************************* Cosine Tables ****************************/ -/* Cosine table */ -const int16_t WebRtcIsac_kCos[6][60] = { -{512, 512, 511, 510, 508, 507, 505, 502, 499, 496, 493, 489, 485, 480, 476, 470, 465, 459, 453, 447, -440, 433, 426, 418, 410, 402, 394, 385, 376, 367, 357, 348, 338, 327, 317, 306, 295, 284, 273, 262, -250, 238, 226, 214, 202, 190, 177, 165, 152, 139, 126, 113, 100, 87, 73, 60, 47, 33, 20, 7}, -{512, 510, 508, 503, 498, 491, 483, 473, 462, 450, 437, 422, 406, 389, 371, 352, 333, 312, 290, 268, -244, 220, 196, 171, 145, 120, 93, 67, 40, 13, -13, -40, -67, -93, -120, -145, -171, -196, -220, -244, --268, -290, -312, -333, -352, -371, -389, -406, -422, -437, -450, -462, -473, -483, -491, -498, -503, -508, -510, -512}, -{512, 508, 502, 493, 480, 465, 447, 426, 402, 376, 348, 317, 284, 250, 214, 177, 139, 100, 60, 20, --20, -60, -100, -139, -177, -214, -250, -284, -317, -348, -376, -402, -426, -447, -465, -480, -493, -502, -508, -512, --512, -508, -502, -493, -480, -465, -447, -426, -402, -376, -348, -317, -284, -250, -214, -177, -139, -100, -60, -20}, -{511, 506, 495, 478, 456, 429, 398, 362, 322, 279, 232, 183, 133, 80, 27, -27, -80, -133, -183, -232, --279, -322, -362, -398, -429, -456, -478, -495, -506, -511, -511, -506, -495, -478, -456, -429, -398, -362, -322, -279, --232, -183, -133, -80, -27, 27, 80, 133, 183, 232, 279, 322, 362, 398, 429, 456, 478, 495, 506, 511}, -{511, 502, 485, 459, 426, 385, 338, 284, 226, 165, 100, 33, -33, -100, -165, -226, -284, -338, -385, -426, --459, -485, -502, -511, -511, -502, -485, -459, -426, -385, -338, -284, -226, -165, -100, -33, 33, 100, 165, 226, -284, 338, 385, 426, 459, 485, 502, 511, 511, 502, 485, 459, 426, 385, 338, 284, 226, 165, 100, 33}, -{510, 498, 473, 437, 389, 333, 268, 196, 120, 40, -40, -120, -196, -268, -333, -389, -437, -473, -498, -510, --510, -498, -473, -437, -389, -333, -268, -196, -120, -40, 40, 120, 196, 268, 333, 389, 437, 473, 498, 510, -510, 498, 473, 437, 389, 333, 268, 196, 120, 40, -40, -120, -196, -268, -333, -389, -437, -473, -498, -510} -}; diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h deleted file mode 100644 index d272be0dc385..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * spectrum_ar_model_tables.h - * - * This file contains definitions of tables with AR coefficients, - * Gain coefficients and cosine tables. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ - -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -#define NUM_AR_RC_QUANT_BAUNDARY 12 - -/********************* AR Coefficient Tables ************************/ -/* cdf for quantized reflection coefficient 1 */ -extern const uint16_t WebRtcIsac_kQArRc1Cdf[NUM_AR_RC_QUANT_BAUNDARY]; - -/* cdf for quantized reflection coefficient 2 */ -extern const uint16_t WebRtcIsac_kQArRc2Cdf[NUM_AR_RC_QUANT_BAUNDARY]; - -/* cdf for quantized reflection coefficient 3 */ -extern const uint16_t WebRtcIsac_kQArRc3Cdf[NUM_AR_RC_QUANT_BAUNDARY]; - -/* cdf for quantized reflection coefficient 4 */ -extern const uint16_t WebRtcIsac_kQArRc4Cdf[NUM_AR_RC_QUANT_BAUNDARY]; - -/* cdf for quantized reflection coefficient 5 */ -extern const uint16_t WebRtcIsac_kQArRc5Cdf[NUM_AR_RC_QUANT_BAUNDARY]; - -/* cdf for quantized reflection coefficient 6 */ -extern const uint16_t WebRtcIsac_kQArRc6Cdf[NUM_AR_RC_QUANT_BAUNDARY]; - -/* quantization boundary levels for reflection coefficients */ -extern const int16_t WebRtcIsac_kQArBoundaryLevels[NUM_AR_RC_QUANT_BAUNDARY]; - -/* initial indices for AR reflection coefficient quantizer and cdf table search - */ -extern const uint16_t WebRtcIsac_kQArRcInitIndex[AR_ORDER]; - -/* pointers to AR cdf tables */ -extern const uint16_t* WebRtcIsac_kQArRcCdfPtr[AR_ORDER]; - -/* pointers to AR representation levels tables */ -extern const int16_t* WebRtcIsac_kQArRcLevelsPtr[AR_ORDER]; - -/******************** GAIN Coefficient Tables ***********************/ -/* cdf for Gain coefficient */ -extern const uint16_t WebRtcIsac_kQGainCdf[19]; - -/* representation levels for quantized Gain coefficient */ -extern const int32_t WebRtcIsac_kQGain2Levels[18]; - -/* squared quantization boundary levels for Gain coefficient */ -extern const int32_t WebRtcIsac_kQGain2BoundaryLevels[19]; - -/* pointer to Gain cdf table */ -extern const uint16_t* WebRtcIsac_kQGainCdf_ptr[1]; - -/* Gain initial index for gain quantizer and cdf table search */ -extern const uint16_t WebRtcIsac_kQGainInitIndex[1]; - -/************************* Cosine Tables ****************************/ -/* Cosine table */ -extern const int16_t WebRtcIsac_kCos[6][60]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ \ - */ diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/transform.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/transform.c deleted file mode 100644 index 082ad941c4de..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/transform.c +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/os_specific_inline.h" -#include "modules/third_party/fft/fft.h" - -void WebRtcIsac_InitTransform(TransformTables* tables) { - int k; - double fact, phase; - - fact = PI / (FRAMESAMPLES_HALF); - phase = 0.0; - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - tables->costab1[k] = cos(phase); - tables->sintab1[k] = sin(phase); - phase += fact; - } - - fact = PI * ((double) (FRAMESAMPLES_HALF - 1)) / ((double) FRAMESAMPLES_HALF); - phase = 0.5 * fact; - for (k = 0; k < FRAMESAMPLES_QUARTER; k++) { - tables->costab2[k] = cos(phase); - tables->sintab2[k] = sin(phase); - phase += fact; - } -} - -void WebRtcIsac_Time2Spec(const TransformTables* tables, - double* inre1, - double* inre2, - int16_t* outreQ7, - int16_t* outimQ7, - FFTstr* fftstr_obj) { - int k; - int dims[1]; - double tmp1r, tmp1i, xr, xi, yr, yi, fact; - double tmpre[FRAMESAMPLES_HALF], tmpim[FRAMESAMPLES_HALF]; - - - dims[0] = FRAMESAMPLES_HALF; - - - /* Multiply with complex exponentials and combine into one complex vector */ - fact = 0.5 / sqrt(FRAMESAMPLES_HALF); - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - tmp1r = tables->costab1[k]; - tmp1i = tables->sintab1[k]; - tmpre[k] = (inre1[k] * tmp1r + inre2[k] * tmp1i) * fact; - tmpim[k] = (inre2[k] * tmp1r - inre1[k] * tmp1i) * fact; - } - - - /* Get DFT */ - WebRtcIsac_Fftns(1, dims, tmpre, tmpim, -1, 1.0, fftstr_obj); - - /* Use symmetry to separate into two complex vectors and center frames in time around zero */ - for (k = 0; k < FRAMESAMPLES_QUARTER; k++) { - xr = tmpre[k] + tmpre[FRAMESAMPLES_HALF - 1 - k]; - yi = -tmpre[k] + tmpre[FRAMESAMPLES_HALF - 1 - k]; - xi = tmpim[k] - tmpim[FRAMESAMPLES_HALF - 1 - k]; - yr = tmpim[k] + tmpim[FRAMESAMPLES_HALF - 1 - k]; - - tmp1r = tables->costab2[k]; - tmp1i = tables->sintab2[k]; - outreQ7[k] = (int16_t)WebRtcIsac_lrint((xr * tmp1r - xi * tmp1i) * 128.0); - outimQ7[k] = (int16_t)WebRtcIsac_lrint((xr * tmp1i + xi * tmp1r) * 128.0); - outreQ7[FRAMESAMPLES_HALF - 1 - k] = (int16_t)WebRtcIsac_lrint((-yr * tmp1i - yi * tmp1r) * 128.0); - outimQ7[FRAMESAMPLES_HALF - 1 - k] = (int16_t)WebRtcIsac_lrint((-yr * tmp1r + yi * tmp1i) * 128.0); - } -} - -void WebRtcIsac_Spec2time(const TransformTables* tables, - double* inre, - double* inim, - double* outre1, - double* outre2, - FFTstr* fftstr_obj) { - int k; - double tmp1r, tmp1i, xr, xi, yr, yi, fact; - - int dims; - - dims = FRAMESAMPLES_HALF; - - for (k = 0; k < FRAMESAMPLES_QUARTER; k++) { - /* Move zero in time to beginning of frames */ - tmp1r = tables->costab2[k]; - tmp1i = tables->sintab2[k]; - xr = inre[k] * tmp1r + inim[k] * tmp1i; - xi = inim[k] * tmp1r - inre[k] * tmp1i; - yr = -inim[FRAMESAMPLES_HALF - 1 - k] * tmp1r - inre[FRAMESAMPLES_HALF - 1 - k] * tmp1i; - yi = -inre[FRAMESAMPLES_HALF - 1 - k] * tmp1r + inim[FRAMESAMPLES_HALF - 1 - k] * tmp1i; - - /* Combine into one vector, z = x + j * y */ - outre1[k] = xr - yi; - outre1[FRAMESAMPLES_HALF - 1 - k] = xr + yi; - outre2[k] = xi + yr; - outre2[FRAMESAMPLES_HALF - 1 - k] = -xi + yr; - } - - - /* Get IDFT */ - WebRtcIsac_Fftns(1, &dims, outre1, outre2, 1, FRAMESAMPLES_HALF, fftstr_obj); - - - /* Demodulate and separate */ - fact = sqrt(FRAMESAMPLES_HALF); - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - tmp1r = tables->costab1[k]; - tmp1i = tables->sintab1[k]; - xr = (outre1[k] * tmp1r - outre2[k] * tmp1i) * fact; - outre2[k] = (outre2[k] * tmp1r + outre1[k] * tmp1i) * fact; - outre1[k] = xr; - } -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc deleted file mode 100644 index ee72b07dc314..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc +++ /dev/null @@ -1,942 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// ReleaseTest-API.cpp : Defines the entry point for the console application. -// - -#include -#include -#include -#include -#include - -#include - -/* include API */ -#include "modules/audio_coding/codecs/isac/main/include/isac.h" -#include "modules/audio_coding/codecs/isac/main/util/utility.h" - -/* Defines */ -#define SEED_FILE \ - "randseed.txt" /* Used when running decoder on garbage data \ - */ -#define MAX_FRAMESAMPLES \ - 960 /* max number of samples per frame \ - (= 60 ms frame & 16 kHz) or \ - (= 30 ms frame & 32 kHz) */ -#define FRAMESAMPLES_10ms 160 /* number of samples per 10ms frame */ -#define SWBFRAMESAMPLES_10ms 320 -//#define FS 16000 /* sampling frequency (Hz) */ - -#ifdef WIN32 -#ifndef CLOCKS_PER_SEC -#define CLOCKS_PER_SEC 1000 /* Runtime statistics */ -#endif -#endif - -int main(int argc, char* argv[]) { - char inname[100], outname[100], bottleneck_file[100], vadfile[100]; - FILE *inp, *outp, *f_bn = NULL, *vadp = NULL, *bandwidthp; - int framecnt, endfile; - - size_t i; - int errtype, VADusage = 0, packetLossPercent = 0; - int16_t CodingMode; - int32_t bottleneck = 0; - int framesize = 30; /* ms */ - int cur_framesmpls, err; - - /* Runtime statistics */ - double starttime, runtime, length_file; - - size_t stream_len = 0; - int declen = 0, declenTC = 0; - bool lostFrame = false; - - int16_t shortdata[SWBFRAMESAMPLES_10ms]; - int16_t vaddata[SWBFRAMESAMPLES_10ms * 3]; - int16_t decoded[MAX_FRAMESAMPLES << 1]; - int16_t decodedTC[MAX_FRAMESAMPLES << 1]; - uint16_t streamdata[500]; - int16_t speechType[1]; - int16_t rateBPS = 0; - int16_t fixedFL = 0; - int16_t payloadSize = 0; - int32_t payloadRate = 0; - int setControlBWE = 0; - short FL, testNum; - char version_number[20]; - FILE* plFile; - int32_t sendBN; - -#if !defined(NDEBUG) - FILE* fy; - double kbps; -#endif - size_t totalbits = 0; - int totalsmpls = 0; - - /* If use GNS file */ - FILE* fp_gns = NULL; - char gns_file[100]; - size_t maxStreamLen30 = 0; - size_t maxStreamLen60 = 0; - short sampFreqKHz = 32; - short samplesIn10Ms; - // FILE logFile; - bool doTransCoding = false; - int32_t rateTransCoding = 0; - uint8_t streamDataTransCoding[1200]; - size_t streamLenTransCoding = 0; - FILE* transCodingFile = NULL; - FILE* transcodingBitstream = NULL; - size_t numTransCodingBytes = 0; - - /* only one structure used for ISAC encoder */ - ISACStruct* ISAC_main_inst = NULL; - ISACStruct* decoderTransCoding = NULL; - - BottleNeckModel BN_data; - -#if !defined(NDEBUG) - fy = fopen("bit_rate.dat", "w"); - fclose(fy); - fy = fopen("bytes_frames.dat", "w"); - fclose(fy); -#endif - - /* Handling wrong input arguments in the command line */ - if ((argc < 3) || (argc > 17)) { - printf("\n\nWrong number of arguments or flag values.\n\n"); - - printf("\n"); - WebRtcIsac_version(version_number); - printf("iSAC-swb version %s \n\n", version_number); - - printf("Usage:\n\n"); - printf("%s [-I] bottleneck_value infile outfile \n\n", argv[0]); - printf("with:\n"); - printf("[-FS num] : sampling frequency in kHz, valid values are\n"); - printf(" 16 & 32, with 16 as default.\n"); - printf("[-I] : if -I option is specified, the coder will use\n"); - printf(" an instantaneous Bottleneck value. If not, it\n"); - printf(" will be an adaptive Bottleneck value.\n"); - printf("[-assign] : Use Assign API.\n"); - printf("[-B num] : the value of the bottleneck provided either\n"); - printf(" as a fixed value in bits/sec (e.g. 25000) or\n"); - printf(" read from a file (e.g. bottleneck.txt)\n"); - printf("[-INITRATE num] : Set a new value for initial rate. Note! Only\n"); - printf(" used in adaptive mode.\n"); - printf("[-FL num] : Set (initial) frame length in msec. Valid\n"); - printf(" lengths are 30 and 60 msec.\n"); - printf("[-FIXED_FL] : Frame length will be fixed to initial value.\n"); - printf("[-MAX num] : Set the limit for the payload size of iSAC\n"); - printf(" in bytes. Minimum 100 maximum 400.\n"); - printf("[-MAXRATE num] : Set the maxrate for iSAC in bits per second.\n"); - printf(" Minimum 32000, maximum 53400.\n"); - printf("[-F num] : if -F option is specified, the test function\n"); - printf(" will run the iSAC API fault scenario\n"); - printf(" specified by the supplied number.\n"); - printf(" F 1 - Call encoder prior to init encoder call\n"); - printf(" F 2 - Call decoder prior to init decoder call\n"); - printf(" F 3 - Call decoder prior to encoder call\n"); - printf(" F 4 - Call decoder with a too short coded\n"); - printf(" sequence\n"); - printf(" F 5 - Call decoder with a too long coded\n"); - printf(" sequence\n"); - printf(" F 6 - Call decoder with random bit stream\n"); - printf(" F 7 - Call init encoder/decoder at random\n"); - printf(" during a call\n"); - printf(" F 8 - Call encoder/decoder without having\n"); - printf(" allocated memory for encoder/decoder\n"); - printf(" instance\n"); - printf(" F 9 - Call decodeB without calling decodeA\n"); - printf(" F 10 - Call decodeB with garbage data\n"); - printf("[-PL num] : if -PL option is specified \n"); - printf("[-T rate file] : test trans-coding with target bottleneck\n"); - printf(" 'rate' bits/sec\n"); - printf(" the output file is written to 'file'\n"); - printf("[-LOOP num] : number of times to repeat coding the input\n"); - printf(" file for stress testing\n"); - // printf("[-CE num] : Test of APIs used by Conference Engine.\n"); - // printf(" CE 1 - getNewBitstream, getBWE \n"); - // printf(" (CE 2 - RESERVED for transcoding)\n"); - // printf(" CE 3 - getSendBWE, setSendBWE. \n"); - // printf("-L filename : write the logging info into file - // (appending)\n"); - printf("infile : Normal speech input file\n"); - printf("outfile : Speech output file\n"); - exit(0); - } - - /* Print version number */ - printf("-------------------------------------------------\n"); - WebRtcIsac_version(version_number); - printf("iSAC version %s \n\n", version_number); - - /* Loop over all command line arguments */ - CodingMode = 0; - testNum = 0; - // logFile = NULL; - char transCodingFileName[500]; - int16_t totFileLoop = 0; - int16_t numFileLoop = 0; - for (i = 1; i + 2 < static_cast(argc); i++) { - if (!strcmp("-LOOP", argv[i])) { - i++; - totFileLoop = (int16_t)atol(argv[i]); - if (totFileLoop <= 0) { - fprintf(stderr, "Invalid number of runs for the given input file, %d.", - totFileLoop); - exit(0); - } - } - - if (!strcmp("-T", argv[i])) { - doTransCoding = true; - i++; - rateTransCoding = atoi(argv[i]); - i++; - strcpy(transCodingFileName, argv[i]); - } - - /* Set Sampling Rate */ - if (!strcmp("-FS", argv[i])) { - i++; - sampFreqKHz = atoi(argv[i]); - } - - /* Instantaneous mode */ - if (!strcmp("-I", argv[i])) { - printf("Instantaneous BottleNeck\n"); - CodingMode = 1; - } - - /* Set (initial) bottleneck value */ - if (!strcmp("-INITRATE", argv[i])) { - rateBPS = atoi(argv[i + 1]); - setControlBWE = 1; - if ((rateBPS < 10000) || (rateBPS > 32000)) { - printf( - "\n%d is not a initial rate. Valid values are in the range " - "10000 to 32000.\n", - rateBPS); - exit(0); - } - printf("New initial rate: %d\n", rateBPS); - i++; - } - - /* Set (initial) framelength */ - if (!strcmp("-FL", argv[i])) { - framesize = atoi(argv[i + 1]); - if ((framesize != 30) && (framesize != 60)) { - printf( - "\n%d is not a valid frame length. Valid length are 30 and 60 " - "msec.\n", - framesize); - exit(0); - } - setControlBWE = 1; - printf("Frame Length: %d\n", framesize); - i++; - } - - /* Fixed frame length */ - if (!strcmp("-FIXED_FL", argv[i])) { - fixedFL = 1; - setControlBWE = 1; - printf("Fixed Frame Length\n"); - } - - /* Set maximum allowed payload size in bytes */ - if (!strcmp("-MAX", argv[i])) { - payloadSize = atoi(argv[i + 1]); - printf("Maximum Payload Size: %d\n", payloadSize); - i++; - } - - /* Set maximum rate in bytes */ - if (!strcmp("-MAXRATE", argv[i])) { - payloadRate = atoi(argv[i + 1]); - printf("Maximum Rate in kbps: %d\n", payloadRate); - i++; - } - - /* Test of fault scenarious */ - if (!strcmp("-F", argv[i])) { - testNum = atoi(argv[i + 1]); - printf("Fault test: %d\n", testNum); - if (testNum < 1 || testNum > 10) { - printf( - "\n%d is not a valid Fault Scenario number. Valid Fault " - "Scenarios are numbered 1-10.\n", - testNum); - exit(0); - } - i++; - } - - /* Packet loss test */ - if (!strcmp("-PL", argv[i])) { - if (isdigit(static_cast(*argv[i + 1]))) { - packetLossPercent = atoi(argv[i + 1]); - if ((packetLossPercent < 0) | (packetLossPercent > 100)) { - printf("\nInvalid packet loss perentage \n"); - exit(0); - } - if (packetLossPercent > 0) { - printf("Simulating %d %% of independent packet loss\n", - packetLossPercent); - } else { - printf("\nNo Packet Loss Is Simulated \n"); - } - } else { - plFile = fopen(argv[i + 1], "rb"); - if (plFile == NULL) { - printf("\n couldn't open the frameloss file: %s\n", argv[i + 1]); - exit(0); - } - printf("Simulating packet loss through the given channel file: %s\n", - argv[i + 1]); - } - i++; - } - - /* Random packetlosses */ - if (!strcmp("-rnd", argv[i])) { - srand((unsigned int)time(NULL)); - printf("Random pattern in lossed packets \n"); - } - - /* Use gns file */ - if (!strcmp("-G", argv[i])) { - sscanf(argv[i + 1], "%s", gns_file); - fp_gns = fopen(gns_file, "rb"); - if (fp_gns == NULL) { - printf("Cannot read file %s.\n", gns_file); - exit(0); - } - i++; - } - - // make it with '-B' - /* Get Bottleneck value */ - if (!strcmp("-B", argv[i])) { - i++; - bottleneck = atoi(argv[i]); - if (bottleneck == 0) { - sscanf(argv[i], "%s", bottleneck_file); - f_bn = fopen(bottleneck_file, "rb"); - if (f_bn == NULL) { - printf( - "Error No value provided for BottleNeck and cannot read file " - "%s.\n", - bottleneck_file); - exit(0); - } else { - printf("reading bottleneck rates from file %s\n\n", bottleneck_file); - if (fscanf(f_bn, "%d", &bottleneck) == EOF) { - /* Set pointer to beginning of file */ - fseek(f_bn, 0L, SEEK_SET); - if (fscanf(f_bn, "%d", &bottleneck) == EOF) { - exit(0); - } - } - - /* Bottleneck is a cosine function - * Matlab code for writing the bottleneck file: - * BottleNeck_10ms = 20e3 + 10e3 * cos((0:5999)/5999*2*pi); - * fid = fopen('bottleneck.txt', 'wb'); - * fprintf(fid, '%d\n', BottleNeck_10ms); fclose(fid); - */ - } - } else { - printf("\nfixed bottleneck rate of %d bits/s\n\n", bottleneck); - } - } - /* Run Conference Engine APIs */ - // Do not test it in the first release - // - // if(!strcmp ("-CE", argv[i])) - // { - // testCE = atoi(argv[i + 1]); - // if(testCE==1) - // { - // i++; - // scale = (float)atof( argv[i+1] ); - // } - // else if(testCE == 2) - // { - // printf("\nCE-test 2 (transcoding) not implemented.\n"); - // exit(0); - // } - // else if(testCE < 1 || testCE > 3) - // { - // printf("\n%d is not a valid CE-test number. Valid CE tests - // are 1-3.\n", testCE); - // exit(0); - // } - // printf("CE-test number: %d\n", testCE); - // i++; - // } - } - - if (CodingMode == 0) { - printf("\nAdaptive BottleNeck\n"); - } - - switch (sampFreqKHz) { - case 16: { - printf("iSAC Wideband.\n"); - samplesIn10Ms = FRAMESAMPLES_10ms; - break; - } - case 32: { - printf("iSAC Supper-Wideband.\n"); - samplesIn10Ms = SWBFRAMESAMPLES_10ms; - break; - } - default: - printf("Unsupported sampling frequency %d kHz", sampFreqKHz); - exit(0); - } - - /* Get Input and Output files */ - sscanf(argv[argc - 2], "%s", inname); - sscanf(argv[argc - 1], "%s", outname); - printf("\nInput file: %s\n", inname); - printf("Output file: %s\n\n", outname); - if ((inp = fopen(inname, "rb")) == NULL) { - printf(" Error iSAC Cannot read file %s.\n", inname); - std::cout << std::flush; - exit(1); - } - - if ((outp = fopen(outname, "wb")) == NULL) { - printf(" Error iSAC Cannot write file %s.\n", outname); - std::cout << std::flush; - getc(stdin); - exit(1); - } - if (VADusage) { - if ((vadp = fopen(vadfile, "rb")) == NULL) { - printf(" Error iSAC Cannot read file %s.\n", vadfile); - std::cout << std::flush; - exit(1); - } - } - - if ((bandwidthp = fopen("bwe.pcm", "wb")) == NULL) { - printf(" Error iSAC Cannot read file %s.\n", "bwe.pcm"); - std::cout << std::flush; - exit(1); - } - - starttime = clock() / (double)CLOCKS_PER_SEC; /* Runtime statistics */ - - /* Initialize the ISAC and BN structs */ - if (testNum != 8) { - err = WebRtcIsac_Create(&ISAC_main_inst); - WebRtcIsac_SetEncSampRate(ISAC_main_inst, sampFreqKHz * 1000); - WebRtcIsac_SetDecSampRate(ISAC_main_inst, - sampFreqKHz >= 32 ? 32000 : 16000); - /* Error check */ - if (err < 0) { - printf("\n\n Error in create.\n\n"); - std::cout << std::flush; - exit(EXIT_FAILURE); - } - } - BN_data.arrival_time = 0; - BN_data.sample_count = 0; - BN_data.rtp_number = 0; - - /* Initialize encoder and decoder */ - framecnt = 0; - endfile = 0; - - if (doTransCoding) { - WebRtcIsac_Create(&decoderTransCoding); - WebRtcIsac_SetEncSampRate(decoderTransCoding, sampFreqKHz * 1000); - WebRtcIsac_SetDecSampRate(decoderTransCoding, - sampFreqKHz >= 32 ? 32000 : 16000); - WebRtcIsac_DecoderInit(decoderTransCoding); - transCodingFile = fopen(transCodingFileName, "wb"); - if (transCodingFile == NULL) { - printf("Could not open %s to output trans-coding.\n", - transCodingFileName); - exit(0); - } - strcat(transCodingFileName, ".bit"); - transcodingBitstream = fopen(transCodingFileName, "wb"); - if (transcodingBitstream == NULL) { - printf("Could not open %s to write the bit-stream of transcoder.\n", - transCodingFileName); - exit(0); - } - } - - if (testNum != 1) { - if (WebRtcIsac_EncoderInit(ISAC_main_inst, CodingMode) < 0) { - printf("Error could not initialize the encoder \n"); - std::cout << std::flush; - return 0; - } - } - if (testNum != 2) - WebRtcIsac_DecoderInit(ISAC_main_inst); - if (CodingMode == 1) { - err = WebRtcIsac_Control(ISAC_main_inst, bottleneck, framesize); - if (err < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - printf("\n\n Error in initialization (control): %d.\n\n", errtype); - std::cout << std::flush; - if (testNum == 0) { - exit(EXIT_FAILURE); - } - } - } - - if ((setControlBWE) && (CodingMode == 0)) { - err = WebRtcIsac_ControlBwe(ISAC_main_inst, rateBPS, framesize, fixedFL); - if (err < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - - printf("\n\n Error in Control BWE: %d.\n\n", errtype); - std::cout << std::flush; - exit(EXIT_FAILURE); - } - } - - if (payloadSize != 0) { - err = WebRtcIsac_SetMaxPayloadSize(ISAC_main_inst, payloadSize); - if (err < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - printf("\n\n Error in SetMaxPayloadSize: %d.\n\n", errtype); - std::cout << std::flush; - exit(EXIT_FAILURE); - } - } - if (payloadRate != 0) { - err = WebRtcIsac_SetMaxRate(ISAC_main_inst, payloadRate); - if (err < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - printf("\n\n Error in SetMaxRateInBytes: %d.\n\n", errtype); - std::cout << std::flush; - exit(EXIT_FAILURE); - } - } - - *speechType = 1; - - std::cout << "\n" << std::flush; - - length_file = 0; - int16_t bnIdxTC = 0; - int16_t jitterInfoTC = 0; - while (endfile == 0) { - /* Call init functions at random, fault test number 7 */ - if (testNum == 7 && (rand() % 2 == 0)) { - err = WebRtcIsac_EncoderInit(ISAC_main_inst, CodingMode); - /* Error check */ - if (err < 0) { - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - printf("\n\n Error in encoderinit: %d.\n\n", errtype); - std::cout << std::flush; - } - - WebRtcIsac_DecoderInit(ISAC_main_inst); - } - - cur_framesmpls = 0; - while (1) { - int stream_len_int = 0; - - /* Read 10 ms speech block */ - endfile = readframe(shortdata, inp, samplesIn10Ms); - - if (endfile) { - numFileLoop++; - if (numFileLoop < totFileLoop) { - rewind(inp); - framecnt = 0; - fprintf(stderr, "\n"); - endfile = readframe(shortdata, inp, samplesIn10Ms); - } - } - - if (testNum == 7) { - srand((unsigned int)time(NULL)); - } - - /* iSAC encoding */ - if (!(testNum == 3 && framecnt == 0)) { - stream_len_int = - WebRtcIsac_Encode(ISAC_main_inst, shortdata, (uint8_t*)streamdata); - if ((payloadSize != 0) && (stream_len_int > payloadSize)) { - if (testNum == 0) { - printf("\n\n"); - } - - printf("\nError: Streamsize out of range %d\n", - stream_len_int - payloadSize); - std::cout << std::flush; - } - - WebRtcIsac_GetUplinkBw(ISAC_main_inst, &sendBN); - - if (stream_len_int > 0) { - if (doTransCoding) { - int16_t indexStream; - uint8_t auxUW8; - - /******************** Main Transcoding stream ********************/ - WebRtcIsac_GetDownLinkBwIndex(ISAC_main_inst, &bnIdxTC, - &jitterInfoTC); - int streamLenTransCoding_int = WebRtcIsac_GetNewBitStream( - ISAC_main_inst, bnIdxTC, jitterInfoTC, rateTransCoding, - streamDataTransCoding, false); - if (streamLenTransCoding_int < 0) { - fprintf(stderr, "Error in trans-coding\n"); - exit(0); - } - streamLenTransCoding = - static_cast(streamLenTransCoding_int); - auxUW8 = (uint8_t)(((streamLenTransCoding & 0xFF00) >> 8) & 0x00FF); - if (fwrite(&auxUW8, sizeof(uint8_t), 1, transcodingBitstream) != - 1) { - return -1; - } - - auxUW8 = (uint8_t)(streamLenTransCoding & 0x00FF); - if (fwrite(&auxUW8, sizeof(uint8_t), 1, transcodingBitstream) != - 1) { - return -1; - } - - if (fwrite(streamDataTransCoding, sizeof(uint8_t), - streamLenTransCoding, - transcodingBitstream) != streamLenTransCoding) { - return -1; - } - - WebRtcIsac_ReadBwIndex(streamDataTransCoding, &indexStream); - if (indexStream != bnIdxTC) { - fprintf(stderr, - "Error in inserting Bandwidth index into transcoding " - "stream.\n"); - exit(0); - } - numTransCodingBytes += streamLenTransCoding; - } - } - } else { - break; - } - - if (stream_len_int < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - fprintf(stderr, "Error in encoder: %d.\n", errtype); - std::cout << std::flush; - exit(0); - } - stream_len = static_cast(stream_len_int); - - cur_framesmpls += samplesIn10Ms; - /* exit encoder loop if the encoder returned a bitstream */ - if (stream_len != 0) - break; - } - - /* read next bottleneck rate */ - if (f_bn != NULL) { - if (fscanf(f_bn, "%d", &bottleneck) == EOF) { - /* Set pointer to beginning of file */ - fseek(f_bn, 0L, SEEK_SET); - if (fscanf(f_bn, "%d", &bottleneck) == EOF) { - exit(0); - } - } - if (CodingMode == 1) { - WebRtcIsac_Control(ISAC_main_inst, bottleneck, framesize); - } - } - - length_file += cur_framesmpls; - if (cur_framesmpls == (3 * samplesIn10Ms)) { - maxStreamLen30 = - (stream_len > maxStreamLen30) ? stream_len : maxStreamLen30; - } else { - maxStreamLen60 = - (stream_len > maxStreamLen60) ? stream_len : maxStreamLen60; - } - - if (!lostFrame) { - lostFrame = ((rand() % 100) < packetLossPercent); - } else { - lostFrame = false; - } - - // RED. - if (lostFrame) { - int stream_len_int = WebRtcIsac_GetRedPayload( - ISAC_main_inst, reinterpret_cast(streamdata)); - if (stream_len_int < 0) { - fprintf(stderr, "Error getting RED payload\n"); - exit(0); - } - stream_len = static_cast(stream_len_int); - - if (doTransCoding) { - int streamLenTransCoding_int = WebRtcIsac_GetNewBitStream( - ISAC_main_inst, bnIdxTC, jitterInfoTC, rateTransCoding, - streamDataTransCoding, true); - if (streamLenTransCoding_int < 0) { - fprintf(stderr, "Error in RED trans-coding\n"); - exit(0); - } - streamLenTransCoding = static_cast(streamLenTransCoding_int); - } - } - - /* make coded sequence to short be inreasing */ - /* the length the decoder expects */ - if (testNum == 4) { - stream_len += 10; - } - - /* make coded sequence to long be decreasing */ - /* the length the decoder expects */ - if (testNum == 5) { - stream_len -= 10; - } - - if (testNum == 6) { - srand((unsigned int)time(NULL)); - for (i = 0; i < stream_len; i++) { - streamdata[i] = rand(); - } - } - - if (VADusage) { - readframe(vaddata, vadp, samplesIn10Ms * 3); - } - - /* simulate packet handling through NetEq and the modem */ - if (!(testNum == 3 && framecnt == 0)) { - get_arrival_time(cur_framesmpls, stream_len, bottleneck, &BN_data, - sampFreqKHz * 1000, sampFreqKHz * 1000); - } - - if (VADusage && (framecnt > 10 && vaddata[0] == 0)) { - BN_data.rtp_number--; - } else { - /* Error test number 10, garbage data */ - if (testNum == 10) { - /* Test to run decoder with garbage data */ - for (i = 0; i < stream_len; i++) { - streamdata[i] = (short)(streamdata[i]) + (short)rand(); - } - } - - if (testNum != 9) { - err = WebRtcIsac_UpdateBwEstimate( - ISAC_main_inst, reinterpret_cast(streamdata), - stream_len, BN_data.rtp_number, BN_data.sample_count, - BN_data.arrival_time); - - if (err < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - if (testNum == 0) { - printf("\n\n"); - } - - printf("Error: in decoder: %d.", errtype); - std::cout << std::flush; - if (testNum == 0) { - printf("\n\n"); - } - } - } - - /* Call getFramelen, only used here for function test */ - err = WebRtcIsac_ReadFrameLen( - ISAC_main_inst, reinterpret_cast(streamdata), &FL); - if (err < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - if (testNum == 0) { - printf("\n\n"); - } - printf(" Error: in getFrameLen %d.", errtype); - std::cout << std::flush; - if (testNum == 0) { - printf("\n\n"); - } - } - - // iSAC decoding - - if (lostFrame) { - declen = WebRtcIsac_DecodeRcu( - ISAC_main_inst, reinterpret_cast(streamdata), - stream_len, decoded, speechType); - - if (doTransCoding) { - declenTC = - WebRtcIsac_DecodeRcu(decoderTransCoding, streamDataTransCoding, - streamLenTransCoding, decodedTC, speechType); - } - } else { - declen = WebRtcIsac_Decode(ISAC_main_inst, - reinterpret_cast(streamdata), - stream_len, decoded, speechType); - if (doTransCoding) { - declenTC = - WebRtcIsac_Decode(decoderTransCoding, streamDataTransCoding, - streamLenTransCoding, decodedTC, speechType); - } - } - - if (declen < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - if (testNum == 0) { - printf("\n\n"); - } - printf(" Error: in decoder %d.", errtype); - std::cout << std::flush; - if (testNum == 0) { - printf("\n\n"); - } - } - - if (declenTC < 0) { - if (testNum == 0) { - printf("\n\n"); - } - printf(" Error: in decoding the transcoded stream"); - std::cout << std::flush; - if (testNum == 0) { - printf("\n\n"); - } - } - } - /* Write decoded speech frame to file */ - if ((declen > 0) && (numFileLoop == 0)) { - if (fwrite(decoded, sizeof(int16_t), declen, outp) != - static_cast(declen)) { - return -1; - } - } - - if ((declenTC > 0) && (numFileLoop == 0)) { - if (fwrite(decodedTC, sizeof(int16_t), declen, transCodingFile) != - static_cast(declen)) { - return -1; - } - } - - fprintf(stderr, "\rframe = %5d ", framecnt); - fflush(stderr); - framecnt++; - - /* Error test number 10, garbage data */ - // if (testNum == 10) - // { - // /* Test to run decoder with garbage data */ - // if ((seedfile = fopen(SEED_FILE, "a+t")) == NULL) { - // fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE); - // } else { - // fprintf(seedfile, "ok\n\n"); - // fclose(seedfile); - // } - // } - /* Error test number 10, garbage data */ - // if (testNum == 10) { - // /* Test to run decoder with garbage data */ - // for (i = 0; i < stream_len; i++) { - // streamdata[i] = (short) (streamdata[i] + (short) rand()); - // } - // } - - totalsmpls += declen; - totalbits += 8 * stream_len; -#if !defined(NDEBUG) - kbps = ((double)sampFreqKHz * 1000.) / ((double)cur_framesmpls) * 8.0 * - stream_len / 1000.0; // kbits/s - fy = fopen("bit_rate.dat", "a"); - fprintf(fy, "Frame %i = %0.14f\n", framecnt, kbps); - fclose(fy); - -#endif - } - printf("\n"); - printf("total bits = %zu bits\n", totalbits); - printf("measured average bitrate = %0.3f kbits/s\n", - (double)totalbits * (sampFreqKHz) / totalsmpls); - if (doTransCoding) { - printf("Transcoding average bit-rate = %0.3f kbps\n", - (double)numTransCodingBytes * 8.0 * (sampFreqKHz) / totalsmpls); - fclose(transCodingFile); - } - printf("\n"); - - /* Runtime statistics */ - runtime = (double)(clock() / (double)CLOCKS_PER_SEC - starttime); - length_file = length_file / (sampFreqKHz * 1000.); - - printf("\n\nLength of speech file: %.1f s\n", length_file); - printf("Time to run iSAC: %.2f s (%.2f %% of realtime)\n\n", runtime, - (100 * runtime / length_file)); - - if (maxStreamLen30 != 0) { - printf( - "Maximum payload size 30ms Frames %zu" - " bytes (%0.3f kbps)\n", - maxStreamLen30, maxStreamLen30 * 8 / 30.); - } - if (maxStreamLen60 != 0) { - printf( - "Maximum payload size 60ms Frames %zu" - " bytes (%0.3f kbps)\n", - maxStreamLen60, maxStreamLen60 * 8 / 60.); - } - // fprintf(stderr, "\n"); - - fprintf(stderr, " %.1f s", length_file); - fprintf(stderr, " %0.1f kbps", - (double)totalbits * (sampFreqKHz) / totalsmpls); - if (maxStreamLen30 != 0) { - fprintf(stderr, " plmax-30ms %zu bytes (%0.0f kbps)", maxStreamLen30, - maxStreamLen30 * 8 / 30.); - } - if (maxStreamLen60 != 0) { - fprintf(stderr, " plmax-60ms %zu bytes (%0.0f kbps)", maxStreamLen60, - maxStreamLen60 * 8 / 60.); - } - if (doTransCoding) { - fprintf(stderr, " transcoding rate %.0f kbps", - (double)numTransCodingBytes * 8.0 * (sampFreqKHz) / totalsmpls); - } - - fclose(inp); - fclose(outp); - WebRtcIsac_Free(ISAC_main_inst); - - exit(0); -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc deleted file mode 100644 index 549163fc44d4..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc +++ /dev/null @@ -1,425 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// SwitchingSampRate.cpp : Defines the entry point for the console -// application. -// - -#include - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/main/include/isac.h" -#include "modules/audio_coding/codecs/isac/main/util/utility.h" - -#define MAX_FILE_NAME 500 -#define MAX_NUM_CLIENTS 2 - -#define NUM_CLIENTS 2 - -int main(int argc, char* argv[]) { - char fileNameWB[MAX_FILE_NAME]; - char fileNameSWB[MAX_FILE_NAME]; - - char outFileName[MAX_NUM_CLIENTS][MAX_FILE_NAME]; - - FILE* inFile[MAX_NUM_CLIENTS]; - FILE* outFile[MAX_NUM_CLIENTS]; - - ISACStruct* codecInstance[MAX_NUM_CLIENTS]; - int32_t resamplerState[MAX_NUM_CLIENTS][8]; - - int encoderSampRate[MAX_NUM_CLIENTS]; - - int minBn = 16000; - int maxBn = 56000; - - int bnWB = 32000; - int bnSWB = 56000; - - strcpy(outFileName[0], "switchSampRate_out1.pcm"); - strcpy(outFileName[1], "switchSampRate_out2.pcm"); - - short clientCntr; - - size_t lenEncodedInBytes[MAX_NUM_CLIENTS]; - unsigned int lenAudioIn10ms[MAX_NUM_CLIENTS]; - size_t lenEncodedInBytesTmp[MAX_NUM_CLIENTS]; - unsigned int lenAudioIn10msTmp[MAX_NUM_CLIENTS]; - BottleNeckModel* packetData[MAX_NUM_CLIENTS]; - - char versionNumber[100]; - short samplesIn10ms[MAX_NUM_CLIENTS]; - int bottleneck[MAX_NUM_CLIENTS]; - - printf("\n\n"); - printf("____________________________________________\n\n"); - WebRtcIsac_version(versionNumber); - printf(" iSAC-swb version %s\n", versionNumber); - printf("____________________________________________\n"); - - fileNameWB[0] = '\0'; - fileNameSWB[0] = '\0'; - - char myFlag[20]; - strcpy(myFlag, "-wb"); - // READ THE WIDEBAND AND SUPER-WIDEBAND FILE NAMES - if (readParamString(argc, argv, myFlag, fileNameWB, MAX_FILE_NAME) <= 0) { - printf("No wideband file is specified"); - } - - strcpy(myFlag, "-swb"); - if (readParamString(argc, argv, myFlag, fileNameSWB, MAX_FILE_NAME) <= 0) { - printf("No super-wideband file is specified"); - } - - // THE FIRST CLIENT STARTS IN WIDEBAND - encoderSampRate[0] = 16000; - OPEN_FILE_RB(inFile[0], fileNameWB); - - // THE SECOND CLIENT STARTS IN SUPER-WIDEBAND - encoderSampRate[1] = 32000; - OPEN_FILE_RB(inFile[1], fileNameSWB); - - strcpy(myFlag, "-I"); - short codingMode = readSwitch(argc, argv, myFlag); - - for (clientCntr = 0; clientCntr < NUM_CLIENTS; clientCntr++) { - codecInstance[clientCntr] = NULL; - - printf("\n"); - printf("Client %d\n", clientCntr + 1); - printf("---------\n"); - printf("Starting %s", (encoderSampRate[clientCntr] == 16000) - ? "wideband" - : "super-wideband"); - - // Open output File Name - OPEN_FILE_WB(outFile[clientCntr], outFileName[clientCntr]); - printf("Output File...................... %s\n", outFileName[clientCntr]); - - samplesIn10ms[clientCntr] = encoderSampRate[clientCntr] * 10; - - if (codingMode == 1) { - bottleneck[clientCntr] = (clientCntr) ? bnSWB : bnWB; - } else { - bottleneck[clientCntr] = (clientCntr) ? minBn : maxBn; - } - - printf("Bottleneck....................... %0.3f kbits/sec \n", - bottleneck[clientCntr] / 1000.0); - - // coding-mode - printf( - "Encoding Mode.................... %s\n", - (codingMode == 1) ? "Channel-Independent (Instantaneous)" : "Adaptive"); - - lenEncodedInBytes[clientCntr] = 0; - lenAudioIn10ms[clientCntr] = 0; - lenEncodedInBytesTmp[clientCntr] = 0; - lenAudioIn10msTmp[clientCntr] = 0; - - packetData[clientCntr] = (BottleNeckModel*)new (BottleNeckModel); - if (packetData[clientCntr] == NULL) { - printf("Could not allocate memory for packetData \n"); - return -1; - } - memset(packetData[clientCntr], 0, sizeof(BottleNeckModel)); - memset(resamplerState[clientCntr], 0, sizeof(int32_t) * 8); - } - - for (clientCntr = 0; clientCntr < NUM_CLIENTS; clientCntr++) { - // Create - if (WebRtcIsac_Create(&codecInstance[clientCntr])) { - printf("Could not creat client %d\n", clientCntr + 1); - return -1; - } - - WebRtcIsac_SetEncSampRate(codecInstance[clientCntr], - encoderSampRate[clientCntr]); - - WebRtcIsac_SetDecSampRate( - codecInstance[clientCntr], - encoderSampRate[clientCntr + (1 - ((clientCntr & 1) << 1))]); - - // Initialize Encoder - if (WebRtcIsac_EncoderInit(codecInstance[clientCntr], codingMode) < 0) { - printf("Could not initialize client, %d\n", clientCntr + 1); - return -1; - } - - WebRtcIsac_DecoderInit(codecInstance[clientCntr]); - - // setup Rate if in Instantaneous mode - if (codingMode != 0) { - // ONLY Clients who are not in Adaptive mode - if (WebRtcIsac_Control(codecInstance[clientCntr], bottleneck[clientCntr], - 30) < 0) { - printf("Could not setup bottleneck and frame-size for client %d\n", - clientCntr + 1); - return -1; - } - } - } - - size_t streamLen; - short numSamplesRead; - size_t lenDecodedAudio; - short senderIdx; - short receiverIdx; - - printf("\n"); - short num10ms[MAX_NUM_CLIENTS]; - memset(num10ms, 0, sizeof(short) * MAX_NUM_CLIENTS); - FILE* arrivalTimeFile1 = fopen("arrivalTime1.dat", "wb"); - FILE* arrivalTimeFile2 = fopen("arrivalTime2.dat", "wb"); - short numPrint[MAX_NUM_CLIENTS]; - memset(numPrint, 0, sizeof(short) * MAX_NUM_CLIENTS); - - // Audio Buffers - short silence10ms[10 * 32]; - memset(silence10ms, 0, 320 * sizeof(short)); - short audioBuff10ms[10 * 32]; - short audioBuff60ms[60 * 32]; - short resampledAudio60ms[60 * 32]; - - unsigned short bitStream[600 + 600]; - short speechType[1]; - - short numSampFreqChanged = 0; - while (numSampFreqChanged < 10) { - for (clientCntr = 0; clientCntr < NUM_CLIENTS; clientCntr++) { - // Encoding/decoding for this pair of clients, if there is - // audio for any of them - // if(audioLeft[clientCntr] || audioLeft[clientCntr + 1]) - //{ - // for(pairCntr = 0; pairCntr < 2; pairCntr++) - //{ - senderIdx = clientCntr; // + pairCntr; - receiverIdx = 1 - clientCntr; // + (1 - pairCntr); - - // if(num10ms[senderIdx] > 6) - //{ - // printf("Too many frames read for client %d", - // senderIdx + 1); - // return -1; - //} - - numSamplesRead = - (short)fread(audioBuff10ms, sizeof(short), samplesIn10ms[senderIdx], - inFile[senderIdx]); - if (numSamplesRead != samplesIn10ms[senderIdx]) { - // file finished switch encoder sampling frequency. - printf("Changing Encoder Sampling frequency in client %d to ", - senderIdx + 1); - fclose(inFile[senderIdx]); - numSampFreqChanged++; - if (encoderSampRate[senderIdx] == 16000) { - printf("super-wideband.\n"); - OPEN_FILE_RB(inFile[senderIdx], fileNameSWB); - encoderSampRate[senderIdx] = 32000; - } else { - printf("wideband.\n"); - OPEN_FILE_RB(inFile[senderIdx], fileNameWB); - encoderSampRate[senderIdx] = 16000; - } - WebRtcIsac_SetEncSampRate(codecInstance[senderIdx], - encoderSampRate[senderIdx]); - WebRtcIsac_SetDecSampRate(codecInstance[receiverIdx], - encoderSampRate[senderIdx]); - - samplesIn10ms[clientCntr] = encoderSampRate[clientCntr] * 10; - - numSamplesRead = - (short)fread(audioBuff10ms, sizeof(short), samplesIn10ms[senderIdx], - inFile[senderIdx]); - if (numSamplesRead != samplesIn10ms[senderIdx]) { - printf(" File %s for client %d has not enough audio\n", - (encoderSampRate[senderIdx] == 16000) ? "wideband" - : "super-wideband", - senderIdx + 1); - return -1; - } - } - num10ms[senderIdx]++; - - // sanity check - // if(num10ms[senderIdx] > 6) - //{ - // printf("Client %d has got more than 60 ms audio and encoded no - // packet.\n", - // senderIdx); - // return -1; - //} - - // Encode - - int streamLen_int = WebRtcIsac_Encode(codecInstance[senderIdx], - audioBuff10ms, (uint8_t*)bitStream); - int16_t ggg; - if (streamLen_int > 0) { - if ((WebRtcIsac_ReadFrameLen( - codecInstance[receiverIdx], - reinterpret_cast(bitStream), &ggg)) < 0) - printf("ERROR\n"); - } - - // Sanity check - if (streamLen_int < 0) { - printf(" Encoder error in client %d \n", senderIdx + 1); - return -1; - } - streamLen = static_cast(streamLen_int); - - if (streamLen > 0) { - // Packet generated; model sending through a channel, do bandwidth - // estimation at the receiver and decode. - lenEncodedInBytes[senderIdx] += streamLen; - lenAudioIn10ms[senderIdx] += (unsigned int)num10ms[senderIdx]; - lenEncodedInBytesTmp[senderIdx] += streamLen; - lenAudioIn10msTmp[senderIdx] += (unsigned int)num10ms[senderIdx]; - - // Print after ~5 sec. - if (lenAudioIn10msTmp[senderIdx] >= 100) { - numPrint[senderIdx]++; - printf(" %d, %6.3f => %6.3f ", senderIdx + 1, - bottleneck[senderIdx] / 1000.0, - lenEncodedInBytesTmp[senderIdx] * 0.8 / - lenAudioIn10msTmp[senderIdx]); - - if (codingMode == 0) { - int32_t bn; - WebRtcIsac_GetUplinkBw(codecInstance[senderIdx], &bn); - printf("[%d] ", bn); - } - // int16_t rateIndexLB; - // int16_t rateIndexUB; - // WebRtcIsac_GetDownLinkBwIndex(codecInstance[receiverIdx], - // &rateIndexLB, &rateIndexUB); - // printf(" (%2d, %2d) ", rateIndexLB, rateIndexUB); - - std::cout << std::flush; - lenEncodedInBytesTmp[senderIdx] = 0; - lenAudioIn10msTmp[senderIdx] = 0; - // if(senderIdx == (NUM_CLIENTS - 1)) - //{ - printf(" %0.1f \n", lenAudioIn10ms[senderIdx] * 10. / 1000); - //} - - // After ~20 sec change the bottleneck. - // if((numPrint[senderIdx] == 4) && (codingMode == 0)) - // { - // numPrint[senderIdx] = 0; - // if(codingMode == 0) - // { - // int newBottleneck = bottleneck[senderIdx] + - // (bottleneckChange[senderIdx] * 1000); - - // if(bottleneckChange[senderIdx] > 0) - // { - // if(newBottleneck >maxBn) - // { - // bottleneckChange[senderIdx] = -1; - // newBottleneck = bottleneck[senderIdx] + - // (bottleneckChange[senderIdx] * 1000); - // if(newBottleneck > minBn) - // { - // bottleneck[senderIdx] = newBottleneck; - // } - // } - // else - // { - // bottleneck[senderIdx] = newBottleneck; - // } - // } - // else - // { - // if(newBottleneck < minBn) - // { - // bottleneckChange[senderIdx] = 1; - // newBottleneck = bottleneck[senderIdx] + - // (bottleneckChange[senderIdx] * 1000); - // if(newBottleneck < maxBn) - // { - // bottleneck[senderIdx] = newBottleneck; - // } - // } - // else - // { - // bottleneck[senderIdx] = newBottleneck; - // } - // } - // } - // } - } - - // model a channel of given bottleneck, to get the receive timestamp - get_arrival_time(num10ms[senderIdx] * samplesIn10ms[senderIdx], - streamLen, bottleneck[senderIdx], - packetData[senderIdx], - encoderSampRate[senderIdx] * 1000, - encoderSampRate[senderIdx] * 1000); - - // Write the arrival time. - if (senderIdx == 0) { - if (fwrite(&(packetData[senderIdx]->arrival_time), - sizeof(unsigned int), 1, arrivalTimeFile1) != 1) { - return -1; - } - } else { - if (fwrite(&(packetData[senderIdx]->arrival_time), - sizeof(unsigned int), 1, arrivalTimeFile2) != 1) { - return -1; - } - } - - // BWE - if (WebRtcIsac_UpdateBwEstimate( - codecInstance[receiverIdx], - reinterpret_cast(bitStream), streamLen, - packetData[senderIdx]->rtp_number, - packetData[senderIdx]->sample_count, - packetData[senderIdx]->arrival_time) < 0) { - printf(" BWE Error at client %d \n", receiverIdx + 1); - return -1; - } - /**/ - // Decode - int lenDecodedAudio_int = - WebRtcIsac_Decode(codecInstance[receiverIdx], - reinterpret_cast(bitStream), - streamLen, audioBuff60ms, speechType); - if (lenDecodedAudio_int < 0) { - printf(" Decoder error in client %d \n", receiverIdx + 1); - return -1; - } - lenDecodedAudio = static_cast(lenDecodedAudio_int); - - if (encoderSampRate[senderIdx] == 16000) { - WebRtcSpl_UpsampleBy2(audioBuff60ms, lenDecodedAudio, - resampledAudio60ms, - resamplerState[receiverIdx]); - if (fwrite(resampledAudio60ms, sizeof(short), lenDecodedAudio << 1, - outFile[receiverIdx]) != lenDecodedAudio << 1) { - return -1; - } - } else { - if (fwrite(audioBuff60ms, sizeof(short), lenDecodedAudio, - outFile[receiverIdx]) != lenDecodedAudio) { - return -1; - } - } - num10ms[senderIdx] = 0; - } - //} - //} - } - } - return 0; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/test/simpleKenny.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/test/simpleKenny.c deleted file mode 100644 index 4446ff7806f0..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/test/simpleKenny.c +++ /dev/null @@ -1,461 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* kenny.c - Main function for the iSAC coder */ - -#include -#include -#include -#include - -#ifdef WIN32 -#include "windows.h" -#ifndef CLOCKS_PER_SEC -#define CLOCKS_PER_SEC 1000 -#endif -#endif - -#include - -/* include API */ -#include "modules/audio_coding/codecs/isac/main/include/isac.h" -#include "modules/audio_coding/codecs/isac/main/util/utility.h" - -/* max number of samples per frame (= 60 ms frame) */ -#define MAX_FRAMESAMPLES_SWB 1920 -/* number of samples per 10ms frame */ -#define FRAMESAMPLES_SWB_10ms 320 -#define FRAMESAMPLES_WB_10ms 160 - -/* sampling frequency (Hz) */ -#define FS_SWB 32000 -#define FS_WB 16000 - -unsigned long framecnt = 0; - -int main(int argc, char* argv[]) { - //--- File IO ---- - FILE* inp; - FILE* outp; - char inname[500]; - char outname[500]; - - /* Runtime statistics */ - double rate; - double rateRCU; - size_t totalbits = 0; - unsigned long totalBitsRCU = 0; - unsigned long totalsmpls = 0; - - int32_t bottleneck = 39; - int frameSize = 30; /* ms */ - int16_t codingMode = 1; - int16_t shortdata[FRAMESAMPLES_SWB_10ms]; - int16_t decoded[MAX_FRAMESAMPLES_SWB]; - int16_t speechType[1]; - int16_t payloadLimit; - int32_t rateLimit; - ISACStruct* ISAC_main_inst; - - size_t stream_len = 0; - int declen = 0; - int16_t err; - int cur_framesmpls; - int endfile; -#ifdef WIN32 - double length_file; - double runtime; - char outDrive[10]; - char outPath[500]; - char outPrefix[500]; - char outSuffix[500]; - char bitrateFileName[500]; - FILE* bitrateFile; - double starttime; - double rateLB = 0; - double rateUB = 0; -#endif - FILE* histFile; - FILE* averageFile; - int sampFreqKHz; - int samplesIn10Ms; - size_t maxStreamLen = 0; - char histFileName[500]; - char averageFileName[500]; - unsigned int hist[600]; - double tmpSumStreamLen = 0; - unsigned int packetCntr = 0; - unsigned int lostPacketCntr = 0; - uint8_t payload[1200]; - uint8_t payloadRCU[1200]; - uint16_t packetLossPercent = 0; - int16_t rcuStreamLen = 0; - int onlyEncode; - int onlyDecode; - - BottleNeckModel packetData; - packetData.arrival_time = 0; - packetData.sample_count = 0; - packetData.rtp_number = 0; - memset(hist, 0, sizeof(hist)); - - /* handling wrong input arguments in the command line */ - if (argc < 5) { - printf("\n\nWrong number of arguments or flag values.\n\n"); - - printf("Usage:\n\n"); - printf("%s infile outfile -bn bottleneck [options]\n\n", argv[0]); - printf("with:\n"); - printf("-I.............. indicates encoding in instantaneous mode.\n"); - printf("-bn bottleneck.. the value of the bottleneck in bit/sec, e.g.\n"); - printf(" 39742, in instantaneous (channel-independent)\n"); - printf(" mode.\n\n"); - printf("infile.......... Normal speech input file\n\n"); - printf("outfile......... Speech output file\n\n"); - printf("OPTIONS\n"); - printf("-------\n"); - printf("-fs sampFreq.... sampling frequency of codec 16 or 32 (default)\n"); - printf(" kHz.\n"); - printf("-plim payloadLim payload limit in bytes, default is the maximum\n"); - printf(" possible.\n"); - printf("-rlim rateLim... rate limit in bits/sec, default is the maximum\n"); - printf(" possible.\n"); - printf("-h file......... record histogram and *append* to 'file'.\n"); - printf("-ave file....... record average rate of 3 sec intervales and\n"); - printf(" *append* to 'file'.\n"); - printf("-ploss.......... packet-loss percentage.\n"); - printf("-enc............ do only encoding and store the bit-stream\n"); - printf("-dec............ the input file is a bit-stream, decode it.\n\n"); - printf("Example usage:\n\n"); - printf("%s speechIn.pcm speechOut.pcm -B 40000 -fs 32\n\n", argv[0]); - - exit(0); - } - - /* Get Bottleneck value */ - bottleneck = readParamInt(argc, argv, "-bn", 50000); - fprintf(stderr, "\nfixed bottleneck rate of %d bits/s\n\n", bottleneck); - - /* Get Input and Output files */ - sscanf(argv[1], "%s", inname); - sscanf(argv[2], "%s", outname); - codingMode = readSwitch(argc, argv, "-I"); - sampFreqKHz = (int16_t)readParamInt(argc, argv, "-fs", 32); - if (readParamString(argc, argv, "-h", histFileName, 500) > 0) { - histFile = fopen(histFileName, "a"); - if (histFile == NULL) { - printf("cannot open hist file %s", histFileName); - exit(0); - } - } else { - // NO recording of hitstogram - histFile = NULL; - } - - packetLossPercent = readParamInt(argc, argv, "-ploss", 0); - - if (readParamString(argc, argv, "-ave", averageFileName, 500) > 0) { - averageFile = fopen(averageFileName, "a"); - if (averageFile == NULL) { - printf("cannot open file to write rate %s", averageFileName); - exit(0); - } - } else { - averageFile = NULL; - } - - onlyEncode = readSwitch(argc, argv, "-enc"); - onlyDecode = readSwitch(argc, argv, "-dec"); - - switch (sampFreqKHz) { - case 16: { - samplesIn10Ms = 160; - break; - } - case 32: { - samplesIn10Ms = 320; - break; - } - default: - printf("A sampling frequency of %d kHz is not supported, valid values are" - " 8 and 16.\n", sampFreqKHz); - exit(-1); - } - payloadLimit = (int16_t)readParamInt(argc, argv, "-plim", 400); - rateLimit = readParamInt(argc, argv, "-rlim", 106800); - - if ((inp = fopen(inname, "rb")) == NULL) { - printf(" iSAC: Cannot read file %s.\n", inname); - exit(1); - } - if ((outp = fopen(outname, "wb")) == NULL) { - printf(" iSAC: Cannot write file %s.\n", outname); - exit(1); - } - -#ifdef WIN32 - _splitpath(outname, outDrive, outPath, outPrefix, outSuffix); - _makepath(bitrateFileName, outDrive, outPath, "bitrate", ".txt"); - - bitrateFile = fopen(bitrateFileName, "a"); - fprintf(bitrateFile, "%% %s \n", inname); -#endif - - printf("\n"); - printf("Input.................... %s\n", inname); - printf("Output................... %s\n", outname); - printf("Encoding Mode............ %s\n", - (codingMode == 1) ? "Channel-Independent" : "Channel-Adaptive"); - printf("Bottleneck............... %d bits/sec\n", bottleneck); - printf("Packet-loss Percentage... %d\n", packetLossPercent); - printf("\n"); - -#ifdef WIN32 - starttime = clock() / (double)CLOCKS_PER_SEC; /* Runtime statistics */ -#endif - - /* Initialize the ISAC and BN structs */ - err = WebRtcIsac_Create(&ISAC_main_inst); - - WebRtcIsac_SetEncSampRate(ISAC_main_inst, sampFreqKHz * 1000); - WebRtcIsac_SetDecSampRate(ISAC_main_inst, sampFreqKHz >= 32 ? 32000 : 16000); - /* Error check */ - if (err < 0) { - fprintf(stderr, "\n\n Error in create.\n\n"); - exit(EXIT_FAILURE); - } - - framecnt = 0; - endfile = 0; - - /* Initialize encoder and decoder */ - if (WebRtcIsac_EncoderInit(ISAC_main_inst, codingMode) < 0) { - printf("cannot initialize encoder\n"); - return -1; - } - WebRtcIsac_DecoderInit(ISAC_main_inst); - - if (codingMode == 1) { - if (WebRtcIsac_Control(ISAC_main_inst, bottleneck, frameSize) < 0) { - printf("cannot set bottleneck\n"); - return -1; - } - } else { - if (WebRtcIsac_ControlBwe(ISAC_main_inst, 15000, 30, 1) < 0) { - printf("cannot configure BWE\n"); - return -1; - } - } - - if (WebRtcIsac_SetMaxPayloadSize(ISAC_main_inst, payloadLimit) < 0) { - printf("cannot set maximum payload size %d.\n", payloadLimit); - return -1; - } - - if (rateLimit < 106800) { - if (WebRtcIsac_SetMaxRate(ISAC_main_inst, rateLimit) < 0) { - printf("cannot set the maximum rate %d.\n", rateLimit); - return -1; - } - } - - while (endfile == 0) { - fprintf(stderr, " \rframe = %7li", framecnt); - - //============== Readind from the file and encoding ================= - cur_framesmpls = 0; - stream_len = 0; - - if (onlyDecode) { - uint8_t auxUW8; - if (fread(&auxUW8, sizeof(uint8_t), 1, inp) < 1) { - break; - } - stream_len = auxUW8 << 8; - if (fread(&auxUW8, sizeof(uint8_t), 1, inp) < 1) { - break; - } - stream_len |= auxUW8; - if (fread(payload, 1, stream_len, inp) < stream_len) { - printf("last payload is corrupted\n"); - break; - } - } else { - while (stream_len == 0) { - int stream_len_int; - - // Read 10 ms speech block - endfile = readframe(shortdata, inp, samplesIn10Ms); - if (endfile) { - break; - } - cur_framesmpls += samplesIn10Ms; - - //-------- iSAC encoding --------- - stream_len_int = WebRtcIsac_Encode(ISAC_main_inst, shortdata, payload); - - if (stream_len_int < 0) { - // exit if returned with error - // errType=WebRtcIsac_GetErrorCode(ISAC_main_inst); - fprintf(stderr, "\nError in encoder\n"); - getc(stdin); - exit(EXIT_FAILURE); - } - stream_len = (size_t)stream_len_int; - } - //=================================================================== - if (endfile) { - break; - } - - rcuStreamLen = WebRtcIsac_GetRedPayload(ISAC_main_inst, payloadRCU); - if (rcuStreamLen < 0) { - fprintf(stderr, "\nError getting RED payload\n"); - getc(stdin); - exit(EXIT_FAILURE); - } - - get_arrival_time(cur_framesmpls, stream_len, bottleneck, &packetData, - sampFreqKHz * 1000, sampFreqKHz * 1000); - if (WebRtcIsac_UpdateBwEstimate( - ISAC_main_inst, payload, stream_len, packetData.rtp_number, - packetData.sample_count, packetData.arrival_time) < 0) { - printf(" BWE Error at client\n"); - return -1; - } - } - - if (endfile) { - break; - } - - maxStreamLen = (stream_len > maxStreamLen) ? stream_len : maxStreamLen; - packetCntr++; - - hist[stream_len]++; - if (averageFile != NULL) { - tmpSumStreamLen += stream_len; - if (packetCntr == 100) { - // kbps - fprintf(averageFile, "%8.3f ", - tmpSumStreamLen * 8.0 / (30.0 * packetCntr)); - packetCntr = 0; - tmpSumStreamLen = 0; - } - } - - if (onlyEncode) { - uint8_t auxUW8; - auxUW8 = (uint8_t)(((stream_len & 0x7F00) >> 8) & 0xFF); - if (fwrite(&auxUW8, sizeof(uint8_t), 1, outp) != 1) { - return -1; - } - - auxUW8 = (uint8_t)(stream_len & 0xFF); - if (fwrite(&auxUW8, sizeof(uint8_t), 1, outp) != 1) { - return -1; - } - if (fwrite(payload, 1, stream_len, outp) != stream_len) { - return -1; - } - } else { - //======================= iSAC decoding =========================== - - if ((rand() % 100) < packetLossPercent) { - declen = WebRtcIsac_DecodeRcu(ISAC_main_inst, payloadRCU, - (size_t)rcuStreamLen, decoded, - speechType); - lostPacketCntr++; - } else { - declen = WebRtcIsac_Decode(ISAC_main_inst, payload, stream_len, decoded, - speechType); - } - if (declen <= 0) { - // errType=WebRtcIsac_GetErrorCode(ISAC_main_inst); - fprintf(stderr, "\nError in decoder.\n"); - getc(stdin); - exit(1); - } - - // Write decoded speech frame to file - if (fwrite(decoded, sizeof(int16_t), declen, outp) != (size_t)declen) { - return -1; - } - cur_framesmpls = declen; - } - // Update Statistics - framecnt++; - totalsmpls += cur_framesmpls; - if (stream_len > 0) { - totalbits += 8 * stream_len; - } - if (rcuStreamLen > 0) { - totalBitsRCU += 8 * rcuStreamLen; - } - } - - rate = ((double)totalbits * (sampFreqKHz)) / (double)totalsmpls; - rateRCU = ((double)totalBitsRCU * (sampFreqKHz)) / (double)totalsmpls; - - printf("\n\n"); - printf("Sampling Rate............... %d kHz\n", sampFreqKHz); - printf("Payload Limit............... %d bytes \n", payloadLimit); - printf("Rate Limit.................. %d bits/sec \n", rateLimit); - -#ifdef WIN32 - fprintf(bitrateFile, "%d %10lu %d %6.3f %6.3f %6.3f\n", - sampFreqKHz, framecnt, bottleneck, rateLB, rateUB, rate); - fclose(bitrateFile); -#endif // WIN32 - - printf("\n"); - printf("Measured bit-rate........... %0.3f kbps\n", rate); - printf("Measured RCU bit-ratre...... %0.3f kbps\n", rateRCU); - printf("Maximum bit-rate/payloadsize %0.3f / %zu\n", - maxStreamLen * 8 / 0.03, maxStreamLen); - printf("Measured packet-loss........ %0.1f%% \n", - 100.0f * (float)lostPacketCntr / (float)packetCntr); - - printf("\n"); - -/* Runtime statistics */ -#ifdef WIN32 - runtime = (double)(clock() / (double)CLOCKS_PER_SEC - starttime); - length_file = ((double)framecnt * (double)declen / (sampFreqKHz * 1000)); - printf("Length of speech file....... %.1f s\n", length_file); - printf("Time to run iSAC............ %.2f s (%.2f %% of realtime)\n\n", - runtime, (100 * runtime / length_file)); -#endif - printf("\n\n_______________________________________________\n"); - - if (histFile != NULL) { - int n; - for (n = 0; n < 600; n++) { - fprintf(histFile, "%6d ", hist[n]); - } - fprintf(histFile, "\n"); - fclose(histFile); - } - if (averageFile != NULL) { - if (packetCntr > 0) { - fprintf(averageFile, "%8.3f ", - tmpSumStreamLen * 8.0 / (30.0 * packetCntr)); - } - fprintf(averageFile, "\n"); - fclose(averageFile); - } - - fclose(inp); - fclose(outp); - - WebRtcIsac_Free(ISAC_main_inst); - - exit(0); -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/util/utility.c b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/util/utility.c deleted file mode 100644 index 56547b11c2d3..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/util/utility.c +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include -#include -#include - -#include "modules/audio_coding/codecs/isac/main/util/utility.h" - -/* function for reading audio data from PCM file */ -int -readframe( - short* data, - FILE* inp, - int length) -{ - short k, rlen, status = 0; - unsigned char* ptrUChar; - ptrUChar = (unsigned char*)data; - - rlen = (short)fread(data, sizeof(short), length, inp); - if (rlen < length) { - for (k = rlen; k < length; k++) - data[k] = 0; - status = 1; - } - - // Assuming that our PCM files are written in Intel machines - for(k = 0; k < length; k++) - { - data[k] = (short)ptrUChar[k<<1] | ((((short)ptrUChar[(k<<1) + 1]) << 8) & 0xFF00); - } - - return status; -} - -short -readSwitch( - int argc, - char* argv[], - char* strID) -{ - short n; - for(n = 0; n < argc; n++) - { - if(strcmp(argv[n], strID) == 0) - { - return 1; - } - } - return 0; -} - -double -readParamDouble( - int argc, - char* argv[], - char* strID, - double defaultVal) -{ - double returnVal = defaultVal; - short n; - for(n = 0; n < argc; n++) - { - if(strcmp(argv[n], strID) == 0) - { - n++; - if(n < argc) - { - returnVal = atof(argv[n]); - } - break; - } - } - return returnVal; -} - -int -readParamInt( - int argc, - char* argv[], - char* strID, - int defaultVal) -{ - int returnVal = defaultVal; - short n; - for(n = 0; n < argc; n++) - { - if(strcmp(argv[n], strID) == 0) - { - n++; - if(n < argc) - { - returnVal = atoi(argv[n]); - } - break; - } - } - return returnVal; -} - -int -readParamString( - int argc, - char* argv[], - char* strID, - char* stringParam, - int maxSize) -{ - int paramLenght = 0; - short n; - for(n = 0; n < argc; n++) - { - if(strcmp(argv[n], strID) == 0) - { - n++; - if(n < argc) - { - strncpy(stringParam, argv[n], maxSize); - paramLenght = (int)strlen(argv[n]); - } - break; - } - } - return paramLenght; -} - -void -get_arrival_time( - int current_framesamples, /* samples */ - size_t packet_size, /* bytes */ - int bottleneck, /* excluding headers; bits/s */ - BottleNeckModel* BN_data, - short senderSampFreqHz, - short receiverSampFreqHz) -{ - unsigned int travelTimeMs; - const int headerSizeByte = 35; - - int headerRate; - - BN_data->whenPackGeneratedMs += (current_framesamples / (senderSampFreqHz / 1000)); - - headerRate = headerSizeByte * 8 * senderSampFreqHz / current_framesamples; /* bits/s */ - - /* everything in samples */ - BN_data->sample_count = BN_data->sample_count + current_framesamples; - - //travelTimeMs = ((packet_size + HeaderSize) * 8 * sampFreqHz) / - // (bottleneck + HeaderRate) - travelTimeMs = (unsigned int)floor((double)((packet_size + headerSizeByte) * 8 * 1000) - / (double)(bottleneck + headerRate) + 0.5); - - if(BN_data->whenPrevPackLeftMs > BN_data->whenPackGeneratedMs) - { - BN_data->whenPrevPackLeftMs += travelTimeMs; - } - else - { - BN_data->whenPrevPackLeftMs = BN_data->whenPackGeneratedMs + - travelTimeMs; - } - - BN_data->arrival_time = (BN_data->whenPrevPackLeftMs * - (receiverSampFreqHz / 1000)); - -// if (BN_data->arrival_time < BN_data->sample_count) -// BN_data->arrival_time = BN_data->sample_count; - - BN_data->rtp_number++; -} diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/util/utility.h b/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/util/utility.h deleted file mode 100644 index 1acc54251bd7..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/util/utility.h +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_UTIL_UTILITY_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_UTIL_UTILITY_H_ - -#include -#include - -#if defined(__cplusplus) -extern "C" { -#endif - -#define OPEN_FILE_WB(filePtr, fullPath) \ - do { \ - if (fullPath != NULL) { \ - filePtr = fopen(fullPath, "wb"); \ - if (filePtr == NULL) { \ - printf("could not open %s to write to.", fullPath); \ - return -1; \ - } \ - } else { \ - filePtr = NULL; \ - } \ - } while (0) - -#define OPEN_FILE_AB(filePtr, fullPath) \ - do { \ - if (fullPath != NULL) { \ - filePtr = fopen(fullPath, "ab"); \ - if (filePtr == NULL) { \ - printf("could not open %s to write to.", fullPath); \ - return -1; \ - } \ - } else { \ - filePtr = NULL; \ - } \ - } while (0) - -#define OPEN_FILE_RB(filePtr, fullPath) \ - do { \ - if (fullPath != NULL) { \ - filePtr = fopen(fullPath, "rb"); \ - if (filePtr == NULL) { \ - printf("could not open %s to read from.", fullPath); \ - return -1; \ - } \ - } else { \ - filePtr = NULL; \ - } \ - } while (0) - -#define WRITE_FILE_D(bufferPtr, len, filePtr) \ - do { \ - if (filePtr != NULL) { \ - double dummy[1000]; \ - int cntr; \ - for (cntr = 0; cntr < (len); cntr++) { \ - dummy[cntr] = (double)bufferPtr[cntr]; \ - } \ - fwrite(dummy, sizeof(double), len, filePtr); \ - fflush(filePtr); \ - } \ - } while (0) - -typedef struct { - unsigned int whenPackGeneratedMs; - unsigned int whenPrevPackLeftMs; - unsigned int sendTimeMs; /* milisecond */ - unsigned int arrival_time; /* samples */ - unsigned int sample_count; /* samples, also used as "send time stamp" */ - unsigned int rtp_number; -} BottleNeckModel; - -void get_arrival_time(int current_framesamples, /* samples */ - size_t packet_size, /* bytes */ - int bottleneck, /* excluding headers; bits/s */ - BottleNeckModel* BN_data, - short senderSampFreqHz, - short receiverSampFreqHz); - -/* function for reading audio data from PCM file */ -int readframe(short* data, FILE* inp, int length); - -short readSwitch(int argc, char* argv[], char* strID); - -double readParamDouble(int argc, char* argv[], char* strID, double defaultVal); - -int readParamInt(int argc, char* argv[], char* strID, int defaultVal); - -int readParamString(int argc, - char* argv[], - char* strID, - char* stringParam, - int maxSize); - -#if defined(__cplusplus) -} -#endif - -#endif diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc b/third_party/libwebrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc index c9b1aefe69da..17e0e33b1d8d 100644 --- a/third_party/libwebrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc +++ b/third_party/libwebrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc @@ -362,8 +362,6 @@ AudioEncoderOpusImpl::AudioEncoderOpusImpl( const AudioNetworkAdaptorCreator& audio_network_adaptor_creator, std::unique_ptr bitrate_smoother) : payload_type_(payload_type), - send_side_bwe_with_overhead_( - !webrtc::field_trial::IsDisabled("WebRTC-SendSideBwe-WithOverhead")), use_stable_target_for_adaptation_(!webrtc::field_trial::IsDisabled( "WebRTC-Audio-StableTargetAdaptation")), adjust_bandwidth_( @@ -521,7 +519,7 @@ void AudioEncoderOpusImpl::OnReceivedUplinkBandwidth( } ApplyAudioNetworkAdaptor(); - } else if (send_side_bwe_with_overhead_) { + } else { if (!overhead_bytes_per_packet_) { RTC_LOG(LS_INFO) << "AudioEncoderOpusImpl: Overhead unknown, target audio bitrate " @@ -534,8 +532,6 @@ void AudioEncoderOpusImpl::OnReceivedUplinkBandwidth( std::min(AudioEncoderOpusConfig::kMaxBitrateBps, std::max(AudioEncoderOpusConfig::kMinBitrateBps, target_audio_bitrate_bps - overhead_bps))); - } else { - SetTargetBitrate(target_audio_bitrate_bps); } } void AudioEncoderOpusImpl::OnReceivedUplinkBandwidth( @@ -813,9 +809,10 @@ ANAStats AudioEncoderOpusImpl::GetANAStats() const { absl::optional > AudioEncoderOpusImpl::GetFrameLengthRange() const { - if (config_.supported_frame_lengths_ms.empty()) { - return absl::nullopt; - } else if (audio_network_adaptor_) { + if (audio_network_adaptor_) { + if (config_.supported_frame_lengths_ms.empty()) { + return absl::nullopt; + } return {{TimeDelta::Millis(config_.supported_frame_lengths_ms.front()), TimeDelta::Millis(config_.supported_frame_lengths_ms.back())}}; } else { diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h b/third_party/libwebrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h index a0c42af121b9..8c5c2350162c 100644 --- a/third_party/libwebrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h +++ b/third_party/libwebrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h @@ -154,7 +154,6 @@ class AudioEncoderOpusImpl final : public AudioEncoder { AudioEncoderOpusConfig config_; const int payload_type_; - const bool send_side_bwe_with_overhead_; const bool use_stable_target_for_adaptation_; const bool adjust_bandwidth_; bool bitrate_changed_; diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc b/third_party/libwebrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc index 43e8a7a80f3e..a2ebe43bbe72 100644 --- a/third_party/libwebrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc +++ b/third_party/libwebrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc @@ -373,9 +373,6 @@ TEST_P(AudioEncoderOpusTest, PacketLossRateUpperBounded) { } TEST_P(AudioEncoderOpusTest, DoNotInvokeSetTargetBitrateIfOverheadUnknown) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); - auto states = CreateCodec(sample_rate_hz_, 2); states->encoder->OnReceivedUplinkBandwidth(kDefaultOpusRate * 2, @@ -670,6 +667,17 @@ TEST(AudioEncoderOpusTest, TestConfigFromInvalidParams) { config.supported_frame_lengths_ms); } +TEST(AudioEncoderOpusTest, GetFrameLenghtRange) { + AudioEncoderOpusConfig config = + CreateConfigWithParameters({{"maxptime", "10"}, {"ptime", "10"}}); + std::unique_ptr encoder = + AudioEncoderOpus::MakeAudioEncoder(config, kDefaultOpusPayloadType); + auto ptime = webrtc::TimeDelta::Millis(10); + absl::optional> range = { + {ptime, ptime}}; + EXPECT_EQ(encoder->GetFrameLengthRange(), range); +} + // Test that bitrate will be overridden by the "maxaveragebitrate" parameter. // Also test that the "maxaveragebitrate" can't be set to values outside the // range of 6000 and 510000 diff --git a/third_party/libwebrtc/modules/audio_coding/isac_c_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/isac_c_gn/moz.build deleted file mode 100644 index a62775c1f2d7..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/isac_c_gn/moz.build +++ /dev/null @@ -1,223 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - - ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### - ### DO NOT edit it by hand. ### - -COMPILE_FLAGS["OS_INCLUDES"] = [] -AllowCompilerWarnings() - -DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" -DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True -DEFINES["RTC_ENABLE_VP9"] = True -DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" -DEFINES["WEBRTC_LIBRARY_IMPL"] = True -DEFINES["WEBRTC_MOZILLA_BUILD"] = True -DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" -DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" - -FINAL_LIBRARY = "webrtc" - - -LOCAL_INCLUDES += [ - "!/ipc/ipdl/_ipdlheaders", - "!/third_party/libwebrtc/gen", - "/ipc/chromium/src", - "/third_party/libwebrtc/", - "/third_party/libwebrtc/third_party/abseil-cpp/", - "/tools/profiler/public" -] - -UNIFIED_SOURCES += [ - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/crc.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/decode.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/decode_bwe.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/encode.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/filterbanks.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/intialize.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/isac.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lattice.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_analysis.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/transform.c" -] - -if not CONFIG["MOZ_DEBUG"]: - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" - DEFINES["NDEBUG"] = True - DEFINES["NVALGRIND"] = True - -if CONFIG["MOZ_DEBUG"] == "1": - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" - -if CONFIG["OS_TARGET"] == "Android": - - DEFINES["ANDROID"] = True - DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" - DEFINES["HAVE_SYS_UIO_H"] = True - DEFINES["WEBRTC_ANDROID"] = True - DEFINES["WEBRTC_ANDROID_OPENSLES"] = True - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_LINUX"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_GNU_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - - OS_LIBS += [ - "log" - ] - -if CONFIG["OS_TARGET"] == "Darwin": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_MAC"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True - DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0" - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - -if CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_AURA"] = "1" - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_NSS_CERTS"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_UDEV"] = True - DEFINES["WEBRTC_LINUX"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - - OS_LIBS += [ - "m", - "rt" - ] - -if CONFIG["OS_TARGET"] == "OpenBSD": - - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_X11"] = "1" - DEFINES["WEBRTC_BSD"] = True - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - -if CONFIG["OS_TARGET"] == "WINNT": - - DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True - DEFINES["NOMINMAX"] = True - DEFINES["NTDDI_VERSION"] = "0x0A000000" - DEFINES["PSAPI_VERSION"] = "2" - DEFINES["UNICODE"] = True - DEFINES["USE_AURA"] = "1" - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_WIN"] = True - DEFINES["WIN32"] = True - DEFINES["WIN32_LEAN_AND_MEAN"] = True - DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP" - DEFINES["WINVER"] = "0x0A00" - DEFINES["_ATL_NO_OPENGL"] = True - DEFINES["_CRT_RAND_S"] = True - DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True - DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True - DEFINES["_HAS_EXCEPTIONS"] = "0" - DEFINES["_HAS_NODISCARD"] = True - DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True - DEFINES["_SECURE_ATL"] = True - DEFINES["_UNICODE"] = True - DEFINES["_WIN32_WINNT"] = "0x0A00" - DEFINES["_WINDOWS"] = True - DEFINES["__STD_C"] = True - - OS_LIBS += [ - "crypt32", - "iphlpapi", - "secur32", - "winmm" - ] - -if CONFIG["CPU_ARCH"] == "aarch64": - - DEFINES["WEBRTC_ARCH_ARM64"] = True - DEFINES["WEBRTC_HAS_NEON"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT": - - DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0" - -if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_X11"] = "1" - -if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": - - CFLAGS += [ - "-msse2" - ] - - OS_LIBS += [ - "android_support" - ] - -if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["_GNU_SOURCE"] = True - -if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": - - CFLAGS += [ - "-msse2" - ] - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["_GNU_SOURCE"] = True - -if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["_GNU_SOURCE"] = True - -Library("isac_c_gn") diff --git a/third_party/libwebrtc/modules/audio_coding/isac_fix_c_arm_asm_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/isac_fix_c_arm_asm_gn/moz.build deleted file mode 100644 index 13b2b1854d4d..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/isac_fix_c_arm_asm_gn/moz.build +++ /dev/null @@ -1,91 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - - ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### - ### DO NOT edit it by hand. ### - -COMPILE_FLAGS["OS_INCLUDES"] = [] -AllowCompilerWarnings() - -DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" -DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True -DEFINES["RTC_ENABLE_VP9"] = True -DEFINES["WEBRTC_ARCH_ARM"] = True -DEFINES["WEBRTC_ARCH_ARM_V7"] = True -DEFINES["WEBRTC_ENABLE_AVX2"] = True -DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" -DEFINES["WEBRTC_HAS_NEON"] = True -DEFINES["WEBRTC_LIBRARY_IMPL"] = True -DEFINES["WEBRTC_LINUX"] = True -DEFINES["WEBRTC_MOZILLA_BUILD"] = True -DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" -DEFINES["WEBRTC_POSIX"] = True -DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" -DEFINES["_GNU_SOURCE"] = True -DEFINES["__STDC_CONSTANT_MACROS"] = True -DEFINES["__STDC_FORMAT_MACROS"] = True - -FINAL_LIBRARY = "webrtc" - - -LOCAL_INCLUDES += [ - "!/ipc/ipdl/_ipdlheaders", - "!/third_party/libwebrtc/gen", - "/ipc/chromium/src", - "/third_party/libwebrtc/", - "/third_party/libwebrtc/third_party/abseil-cpp/", - "/tools/profiler/public" -] - -SOURCES += [ - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice_armv7.S", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_armv6.S" -] - -if not CONFIG["MOZ_DEBUG"]: - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" - DEFINES["NDEBUG"] = True - DEFINES["NVALGRIND"] = True - -if CONFIG["MOZ_DEBUG"] == "1": - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" - DEFINES["_DEBUG"] = True - -if CONFIG["OS_TARGET"] == "Android": - - DEFINES["ANDROID"] = True - DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" - DEFINES["HAVE_SYS_UIO_H"] = True - DEFINES["WEBRTC_ANDROID"] = True - DEFINES["WEBRTC_ANDROID_OPENSLES"] = True - - OS_LIBS += [ - "android_support", - "log", - "unwind" - ] - -if CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_AURA"] = "1" - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_NSS_CERTS"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_UDEV"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - - OS_LIBS += [ - "rt" - ] - -if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_X11"] = "1" - -Library("isac_fix_c_arm_asm_gn") diff --git a/third_party/libwebrtc/modules/audio_coding/isac_fix_c_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/isac_fix_c_gn/moz.build deleted file mode 100644 index 39291b734c4b..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/isac_fix_c_gn/moz.build +++ /dev/null @@ -1,120 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - - ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### - ### DO NOT edit it by hand. ### - -CFLAGS += [ - "-mfpu=neon" -] - -COMPILE_FLAGS["OS_INCLUDES"] = [] -AllowCompilerWarnings() - -DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" -DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True -DEFINES["RTC_ENABLE_VP9"] = True -DEFINES["WEBRTC_ARCH_ARM"] = True -DEFINES["WEBRTC_ARCH_ARM_V7"] = True -DEFINES["WEBRTC_ENABLE_AVX2"] = True -DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" -DEFINES["WEBRTC_HAS_NEON"] = True -DEFINES["WEBRTC_LIBRARY_IMPL"] = True -DEFINES["WEBRTC_LINUX"] = True -DEFINES["WEBRTC_MOZILLA_BUILD"] = True -DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" -DEFINES["WEBRTC_POSIX"] = True -DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" -DEFINES["_GNU_SOURCE"] = True -DEFINES["__STDC_CONSTANT_MACROS"] = True -DEFINES["__STDC_FORMAT_MACROS"] = True - -FINAL_LIBRARY = "webrtc" - - -LOCAL_INCLUDES += [ - "!/ipc/ipdl/_ipdlheaders", - "!/third_party/libwebrtc/gen", - "/ipc/chromium/src", - "/third_party/libwebrtc/", - "/third_party/libwebrtc/third_party/abseil-cpp/", - "/tools/profiler/public" -] - -SOURCES += [ - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c" -] - -UNIFIED_SOURCES += [ - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/decode.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/decode_bwe.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/encode.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filters.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/initialize.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform.c" -] - -if not CONFIG["MOZ_DEBUG"]: - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" - DEFINES["NDEBUG"] = True - DEFINES["NVALGRIND"] = True - -if CONFIG["MOZ_DEBUG"] == "1": - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" - DEFINES["_DEBUG"] = True - -if CONFIG["OS_TARGET"] == "Android": - - DEFINES["ANDROID"] = True - DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" - DEFINES["HAVE_SYS_UIO_H"] = True - DEFINES["WEBRTC_ANDROID"] = True - DEFINES["WEBRTC_ANDROID_OPENSLES"] = True - - OS_LIBS += [ - "android_support", - "log", - "unwind" - ] - -if CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_AURA"] = "1" - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_NSS_CERTS"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_UDEV"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - - OS_LIBS += [ - "rt" - ] - -if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_X11"] = "1" - -Library("isac_fix_c_gn") diff --git a/third_party/libwebrtc/modules/audio_coding/isac_fix_common_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/isac_fix_common_gn/moz.build deleted file mode 100644 index 5cd43a047b48..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/isac_fix_common_gn/moz.build +++ /dev/null @@ -1,95 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - - ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### - ### DO NOT edit it by hand. ### - -CFLAGS += [ - "-mfpu=neon" -] - -COMPILE_FLAGS["OS_INCLUDES"] = [] -AllowCompilerWarnings() - -DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" -DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True -DEFINES["RTC_ENABLE_VP9"] = True -DEFINES["WEBRTC_ARCH_ARM"] = True -DEFINES["WEBRTC_ARCH_ARM_V7"] = True -DEFINES["WEBRTC_ENABLE_AVX2"] = True -DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" -DEFINES["WEBRTC_HAS_NEON"] = True -DEFINES["WEBRTC_LIBRARY_IMPL"] = True -DEFINES["WEBRTC_LINUX"] = True -DEFINES["WEBRTC_MOZILLA_BUILD"] = True -DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" -DEFINES["WEBRTC_POSIX"] = True -DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" -DEFINES["_GNU_SOURCE"] = True -DEFINES["__STDC_CONSTANT_MACROS"] = True -DEFINES["__STDC_FORMAT_MACROS"] = True - -FINAL_LIBRARY = "webrtc" - - -LOCAL_INCLUDES += [ - "!/ipc/ipdl/_ipdlheaders", - "!/third_party/libwebrtc/gen", - "/ipc/chromium/src", - "/third_party/libwebrtc/", - "/third_party/libwebrtc/third_party/abseil-cpp/", - "/tools/profiler/public" -] - -UNIFIED_SOURCES += [ - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/fft.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_tables.c" -] - -if not CONFIG["MOZ_DEBUG"]: - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" - DEFINES["NDEBUG"] = True - DEFINES["NVALGRIND"] = True - -if CONFIG["MOZ_DEBUG"] == "1": - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" - DEFINES["_DEBUG"] = True - -if CONFIG["OS_TARGET"] == "Android": - - DEFINES["ANDROID"] = True - DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" - DEFINES["HAVE_SYS_UIO_H"] = True - DEFINES["WEBRTC_ANDROID"] = True - DEFINES["WEBRTC_ANDROID_OPENSLES"] = True - - OS_LIBS += [ - "android_support", - "log", - "unwind" - ] - -if CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_AURA"] = "1" - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_NSS_CERTS"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_UDEV"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - - OS_LIBS += [ - "rt" - ] - -if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_X11"] = "1" - -Library("isac_fix_common_gn") diff --git a/third_party/libwebrtc/modules/audio_coding/isac_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/isac_gn/moz.build deleted file mode 100644 index d84fcbfe0513..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/isac_gn/moz.build +++ /dev/null @@ -1,202 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - - ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### - ### DO NOT edit it by hand. ### - -COMPILE_FLAGS["OS_INCLUDES"] = [] -AllowCompilerWarnings() - -DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" -DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True -DEFINES["RTC_ENABLE_VP9"] = True -DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" -DEFINES["WEBRTC_LIBRARY_IMPL"] = True -DEFINES["WEBRTC_MOZILLA_BUILD"] = True -DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" -DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" - -FINAL_LIBRARY = "webrtc" - - -LOCAL_INCLUDES += [ - "!/ipc/ipdl/_ipdlheaders", - "!/third_party/libwebrtc/gen", - "/ipc/chromium/src", - "/third_party/libwebrtc/", - "/third_party/libwebrtc/third_party/abseil-cpp/", - "/tools/profiler/public" -] - -UNIFIED_SOURCES += [ - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/audio_decoder_isac.cc", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/main/source/audio_encoder_isac.cc" -] - -if not CONFIG["MOZ_DEBUG"]: - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" - DEFINES["NDEBUG"] = True - DEFINES["NVALGRIND"] = True - -if CONFIG["MOZ_DEBUG"] == "1": - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" - -if CONFIG["OS_TARGET"] == "Android": - - DEFINES["ANDROID"] = True - DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" - DEFINES["HAVE_SYS_UIO_H"] = True - DEFINES["WEBRTC_ANDROID"] = True - DEFINES["WEBRTC_ANDROID_OPENSLES"] = True - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_LINUX"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_GNU_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - - OS_LIBS += [ - "log" - ] - -if CONFIG["OS_TARGET"] == "Darwin": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_MAC"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True - DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0" - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - -if CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_AURA"] = "1" - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_NSS_CERTS"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_UDEV"] = True - DEFINES["WEBRTC_LINUX"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - - OS_LIBS += [ - "m", - "rt" - ] - -if CONFIG["OS_TARGET"] == "OpenBSD": - - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_X11"] = "1" - DEFINES["WEBRTC_BSD"] = True - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_POSIX"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - DEFINES["__STDC_CONSTANT_MACROS"] = True - DEFINES["__STDC_FORMAT_MACROS"] = True - -if CONFIG["OS_TARGET"] == "WINNT": - - DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True - DEFINES["NOMINMAX"] = True - DEFINES["NTDDI_VERSION"] = "0x0A000000" - DEFINES["PSAPI_VERSION"] = "2" - DEFINES["UNICODE"] = True - DEFINES["USE_AURA"] = "1" - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["WEBRTC_WIN"] = True - DEFINES["WIN32"] = True - DEFINES["WIN32_LEAN_AND_MEAN"] = True - DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP" - DEFINES["WINVER"] = "0x0A00" - DEFINES["_ATL_NO_OPENGL"] = True - DEFINES["_CRT_RAND_S"] = True - DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True - DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True - DEFINES["_HAS_EXCEPTIONS"] = "0" - DEFINES["_HAS_NODISCARD"] = True - DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True - DEFINES["_SECURE_ATL"] = True - DEFINES["_UNICODE"] = True - DEFINES["_WIN32_WINNT"] = "0x0A00" - DEFINES["_WINDOWS"] = True - DEFINES["__STD_C"] = True - - OS_LIBS += [ - "crypt32", - "iphlpapi", - "secur32", - "winmm" - ] - -if CONFIG["CPU_ARCH"] == "aarch64": - - DEFINES["WEBRTC_ARCH_ARM64"] = True - DEFINES["WEBRTC_HAS_NEON"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD": - - DEFINES["_DEBUG"] = True - -if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT": - - DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0" - -if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_X11"] = "1" - -if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": - - CXXFLAGS += [ - "-msse2" - ] - - OS_LIBS += [ - "android_support" - ] - -if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["_GNU_SOURCE"] = True - -if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": - - CXXFLAGS += [ - "-msse2" - ] - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["_GNU_SOURCE"] = True - -if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["WEBRTC_ENABLE_AVX2"] = True - DEFINES["_GNU_SOURCE"] = True - -Library("isac_gn") diff --git a/third_party/libwebrtc/modules/audio_coding/isac_neon_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/isac_neon_gn/moz.build deleted file mode 100644 index 72f8d79eb6db..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/isac_neon_gn/moz.build +++ /dev/null @@ -1,98 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - - ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### - ### DO NOT edit it by hand. ### - -CFLAGS += [ - "-mfpu=neon" -] - -COMPILE_FLAGS["OS_INCLUDES"] = [] -AllowCompilerWarnings() - -DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" -DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True -DEFINES["RTC_ENABLE_VP9"] = True -DEFINES["WEBRTC_ARCH_ARM"] = True -DEFINES["WEBRTC_ARCH_ARM_V7"] = True -DEFINES["WEBRTC_ENABLE_AVX2"] = True -DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" -DEFINES["WEBRTC_HAS_NEON"] = True -DEFINES["WEBRTC_LIBRARY_IMPL"] = True -DEFINES["WEBRTC_LINUX"] = True -DEFINES["WEBRTC_MOZILLA_BUILD"] = True -DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" -DEFINES["WEBRTC_POSIX"] = True -DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" -DEFINES["_GNU_SOURCE"] = True -DEFINES["__STDC_CONSTANT_MACROS"] = True -DEFINES["__STDC_FORMAT_MACROS"] = True - -FINAL_LIBRARY = "webrtc" - - -LOCAL_INCLUDES += [ - "!/ipc/ipdl/_ipdlheaders", - "!/third_party/libwebrtc/gen", - "/ipc/chromium/src", - "/third_party/libwebrtc/", - "/third_party/libwebrtc/third_party/abseil-cpp/", - "/tools/profiler/public" -] - -UNIFIED_SOURCES += [ - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding_neon.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_neon.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/filters_neon.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/lattice_neon.c", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.c" -] - -if not CONFIG["MOZ_DEBUG"]: - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" - DEFINES["NDEBUG"] = True - DEFINES["NVALGRIND"] = True - -if CONFIG["MOZ_DEBUG"] == "1": - - DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" - DEFINES["_DEBUG"] = True - -if CONFIG["OS_TARGET"] == "Android": - - DEFINES["ANDROID"] = True - DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" - DEFINES["HAVE_SYS_UIO_H"] = True - DEFINES["WEBRTC_ANDROID"] = True - DEFINES["WEBRTC_ANDROID_OPENSLES"] = True - - OS_LIBS += [ - "android_support", - "log", - "unwind" - ] - -if CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_AURA"] = "1" - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_NSS_CERTS"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_UDEV"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True - - OS_LIBS += [ - "rt" - ] - -if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": - - DEFINES["USE_X11"] = "1" - -Library("isac_neon_gn") diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc b/third_party/libwebrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc index bb5c6d167be3..fef3c3c1e414 100644 --- a/third_party/libwebrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc +++ b/third_party/libwebrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc @@ -22,10 +22,6 @@ #include "modules/audio_coding/codecs/g722/audio_encoder_g722.h" #include "modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h" #include "modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h" -#include "modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h" -#include "modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h" -#include "modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h" -#include "modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h" #include "modules/audio_coding/codecs/opus/audio_decoder_opus.h" #include "modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h" #include "modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h" @@ -195,8 +191,8 @@ class AudioDecoderTest : public ::testing::Test { processed_samples += frame_size_; } // For some codecs it doesn't make sense to check expected number of bytes, - // since the number can vary for different platforms. Opus and iSAC are - // such codecs. In this case expected_bytes is set to 0. + // since the number can vary for different platforms. Opus is such a codec. + // In this case expected_bytes is set to 0. if (expected_bytes) { EXPECT_EQ(expected_bytes, encoded_bytes); } @@ -347,66 +343,6 @@ class AudioDecoderIlbcTest : public AudioDecoderTest { } }; -class AudioDecoderIsacFloatTest : public AudioDecoderTest { - protected: - AudioDecoderIsacFloatTest() : AudioDecoderTest() { - codec_input_rate_hz_ = 16000; - frame_size_ = 480; - data_length_ = 10 * frame_size_; - AudioEncoderIsacFloatImpl::Config config; - config.payload_type = payload_type_; - config.sample_rate_hz = codec_input_rate_hz_; - config.frame_size_ms = - 1000 * static_cast(frame_size_) / codec_input_rate_hz_; - audio_encoder_.reset(new AudioEncoderIsacFloatImpl(config)); - audio_encoder_->OnReceivedOverhead(kOverheadBytesPerPacket); - - AudioDecoderIsacFloatImpl::Config decoder_config; - decoder_config.sample_rate_hz = codec_input_rate_hz_; - decoder_ = new AudioDecoderIsacFloatImpl(decoder_config); - } -}; - -class AudioDecoderIsacSwbTest : public AudioDecoderTest { - protected: - AudioDecoderIsacSwbTest() : AudioDecoderTest() { - codec_input_rate_hz_ = 32000; - frame_size_ = 960; - data_length_ = 10 * frame_size_; - AudioEncoderIsacFloatImpl::Config config; - config.payload_type = payload_type_; - config.sample_rate_hz = codec_input_rate_hz_; - config.frame_size_ms = - 1000 * static_cast(frame_size_) / codec_input_rate_hz_; - audio_encoder_.reset(new AudioEncoderIsacFloatImpl(config)); - audio_encoder_->OnReceivedOverhead(kOverheadBytesPerPacket); - - AudioDecoderIsacFloatImpl::Config decoder_config; - decoder_config.sample_rate_hz = codec_input_rate_hz_; - decoder_ = new AudioDecoderIsacFloatImpl(decoder_config); - } -}; - -class AudioDecoderIsacFixTest : public AudioDecoderTest { - protected: - AudioDecoderIsacFixTest() : AudioDecoderTest() { - codec_input_rate_hz_ = 16000; - frame_size_ = 480; - data_length_ = 10 * frame_size_; - AudioEncoderIsacFixImpl::Config config; - config.payload_type = payload_type_; - config.sample_rate_hz = codec_input_rate_hz_; - config.frame_size_ms = - 1000 * static_cast(frame_size_) / codec_input_rate_hz_; - audio_encoder_.reset(new AudioEncoderIsacFixImpl(config)); - audio_encoder_->OnReceivedOverhead(kOverheadBytesPerPacket); - - AudioDecoderIsacFixImpl::Config decoder_config; - decoder_config.sample_rate_hz = codec_input_rate_hz_; - decoder_ = new AudioDecoderIsacFixImpl(decoder_config); - } -}; - class AudioDecoderG722Test : public AudioDecoderTest { protected: AudioDecoderG722Test() : AudioDecoderTest() { @@ -533,94 +469,6 @@ TEST_F(AudioDecoderIlbcTest, SetTargetBitrate) { TestSetAndGetTargetBitratesWithFixedCodec(audio_encoder_.get(), 13333); } -TEST_F(AudioDecoderIsacFloatTest, EncodeDecode) { - int tolerance = 3399; - double mse = 434951.0; - int delay = 48; // Delay from input to output. - EncodeDecodeTest(0, tolerance, mse, delay); - ReInitTest(); - EXPECT_FALSE(decoder_->HasDecodePlc()); -} - -TEST_F(AudioDecoderIsacFloatTest, SetTargetBitrate) { - const int overhead_rate = - 8 * kOverheadBytesPerPacket * codec_input_rate_hz_ / frame_size_; - EXPECT_EQ(10000, - SetAndGetTargetBitrate(audio_encoder_.get(), 9999 + overhead_rate)); - EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), - 10000 + overhead_rate)); - EXPECT_EQ(23456, SetAndGetTargetBitrate(audio_encoder_.get(), - 23456 + overhead_rate)); - EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), - 32000 + overhead_rate)); - EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), - 32001 + overhead_rate)); -} - -TEST_F(AudioDecoderIsacSwbTest, EncodeDecode) { - int tolerance = 19757; - double mse = 8.18e6; - int delay = 160; // Delay from input to output. - EncodeDecodeTest(0, tolerance, mse, delay); - ReInitTest(); - EXPECT_FALSE(decoder_->HasDecodePlc()); -} - -TEST_F(AudioDecoderIsacSwbTest, SetTargetBitrate) { - const int overhead_rate = - 8 * kOverheadBytesPerPacket * codec_input_rate_hz_ / frame_size_; - EXPECT_EQ(10000, - SetAndGetTargetBitrate(audio_encoder_.get(), 9999 + overhead_rate)); - EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), - 10000 + overhead_rate)); - EXPECT_EQ(23456, SetAndGetTargetBitrate(audio_encoder_.get(), - 23456 + overhead_rate)); - EXPECT_EQ(56000, SetAndGetTargetBitrate(audio_encoder_.get(), - 56000 + overhead_rate)); - EXPECT_EQ(56000, SetAndGetTargetBitrate(audio_encoder_.get(), - 56001 + overhead_rate)); -} - -// Run bit exactness test only for release builds. -#if defined(NDEBUG) -TEST_F(AudioDecoderIsacFixTest, EncodeDecode) { - int tolerance = 11034; - double mse = 3.46e6; - int delay = 54; // Delay from input to output. -#if defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM) - static const int kEncodedBytes = 685; -#elif defined(WEBRTC_MAC) && defined(WEBRTC_ARCH_ARM64) // M1 Mac - static const int kEncodedBytes = 673; -#elif defined(WEBRTC_ARCH_ARM64) - static const int kEncodedBytes = 673; -#elif defined(WEBRTC_WIN) && defined(_MSC_VER) && !defined(__clang__) - static const int kEncodedBytes = 671; -#elif defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_X86_64) - static const int kEncodedBytes = 671; -#else - static const int kEncodedBytes = 671; -#endif - EncodeDecodeTest(kEncodedBytes, tolerance, mse, delay); - ReInitTest(); - EXPECT_FALSE(decoder_->HasDecodePlc()); -} -#endif - -TEST_F(AudioDecoderIsacFixTest, SetTargetBitrate) { - const int overhead_rate = - 8 * kOverheadBytesPerPacket * codec_input_rate_hz_ / frame_size_; - EXPECT_EQ(10000, - SetAndGetTargetBitrate(audio_encoder_.get(), 9999 + overhead_rate)); - EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), - 10000 + overhead_rate)); - EXPECT_EQ(23456, SetAndGetTargetBitrate(audio_encoder_.get(), - 23456 + overhead_rate)); - EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), - 32000 + overhead_rate)); - EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), - 32001 + overhead_rate)); -} - TEST_F(AudioDecoderG722Test, EncodeDecode) { int tolerance = 6176; double mse = 238630.0; diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc b/third_party/libwebrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc deleted file mode 100644 index 6a096c307c9b..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "absl/flags/flag.h" -#include "modules/audio_coding/codecs/isac/fix/include/isacfix.h" -#include "modules/audio_coding/neteq/tools/neteq_quality_test.h" - -ABSL_FLAG(int, bit_rate_kbps, 32, "Target bit rate (kbps)."); - -using ::testing::InitGoogleTest; - -namespace webrtc { -namespace test { -namespace { -static const int kIsacBlockDurationMs = 30; -static const int kIsacInputSamplingKhz = 16; -static const int kIsacOutputSamplingKhz = 16; -} // namespace - -class NetEqIsacQualityTest : public NetEqQualityTest { - protected: - NetEqIsacQualityTest(); - void SetUp() override; - void TearDown() override; - int EncodeBlock(int16_t* in_data, - size_t block_size_samples, - rtc::Buffer* payload, - size_t max_bytes) override; - - private: - ISACFIX_MainStruct* isac_encoder_; - int bit_rate_kbps_; -}; - -NetEqIsacQualityTest::NetEqIsacQualityTest() - : NetEqQualityTest(kIsacBlockDurationMs, - kIsacInputSamplingKhz, - kIsacOutputSamplingKhz, - SdpAudioFormat("isac", 16000, 1)), - isac_encoder_(NULL), - bit_rate_kbps_(absl::GetFlag(FLAGS_bit_rate_kbps)) { - // Flag validation - RTC_CHECK(absl::GetFlag(FLAGS_bit_rate_kbps) >= 10 && - absl::GetFlag(FLAGS_bit_rate_kbps) <= 32) - << "Invalid bit rate, should be between 10 and 32 kbps."; -} - -void NetEqIsacQualityTest::SetUp() { - ASSERT_EQ(1u, channels_) << "iSAC supports only mono audio."; - // Create encoder memory. - WebRtcIsacfix_Create(&isac_encoder_); - ASSERT_TRUE(isac_encoder_ != NULL); - EXPECT_EQ(0, WebRtcIsacfix_EncoderInit(isac_encoder_, 1)); - // Set bitrate and block length. - EXPECT_EQ(0, WebRtcIsacfix_Control(isac_encoder_, bit_rate_kbps_ * 1000, - kIsacBlockDurationMs)); - NetEqQualityTest::SetUp(); -} - -void NetEqIsacQualityTest::TearDown() { - // Free memory. - EXPECT_EQ(0, WebRtcIsacfix_Free(isac_encoder_)); - NetEqQualityTest::TearDown(); -} - -int NetEqIsacQualityTest::EncodeBlock(int16_t* in_data, - size_t block_size_samples, - rtc::Buffer* payload, - size_t max_bytes) { - // ISAC takes 10 ms for every call. - const int subblocks = kIsacBlockDurationMs / 10; - const int subblock_length = 10 * kIsacInputSamplingKhz; - int value = 0; - - int pointer = 0; - for (int idx = 0; idx < subblocks; idx++, pointer += subblock_length) { - // The Isac encoder does not perform encoding (and returns 0) until it - // receives a sequence of sub-blocks that amount to the frame duration. - EXPECT_EQ(0, value); - payload->AppendData(max_bytes, [&](rtc::ArrayView payload) { - value = WebRtcIsacfix_Encode(isac_encoder_, &in_data[pointer], - payload.data()); - return (value >= 0) ? static_cast(value) : 0; - }); - } - EXPECT_GT(value, 0); - return value; -} - -TEST_F(NetEqIsacQualityTest, Test) { - Simulate(); -} - -} // namespace test -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/tools/neteq_delay_analyzer.cc b/third_party/libwebrtc/modules/audio_coding/neteq/tools/neteq_delay_analyzer.cc index 91c3a1d96b1d..020199e9ac2d 100644 --- a/third_party/libwebrtc/modules/audio_coding/neteq/tools/neteq_delay_analyzer.cc +++ b/third_party/libwebrtc/modules/audio_coding/neteq/tools/neteq_delay_analyzer.cc @@ -284,6 +284,9 @@ void NetEqDelayAnalyzer::CreatePythonScript( output << " plt.ylabel('relative delay [ms]')" << std::endl; if (!ssrcs_.empty()) { auto ssrc_it = ssrcs_.cbegin(); + output << " plt.legend((\"arrival delay\", \"target delay\", \"playout " + "delay\"))" + << std::endl; output << " plt.title('SSRC: 0x" << std::hex << static_cast(*ssrc_it++); while (ssrc_it != ssrcs_.end()) { diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/tools/neteq_test.cc b/third_party/libwebrtc/modules/audio_coding/neteq/tools/neteq_test.cc index 19b1df11a186..a567efe2defe 100644 --- a/third_party/libwebrtc/modules/audio_coding/neteq/tools/neteq_test.cc +++ b/third_party/libwebrtc/modules/audio_coding/neteq/tools/neteq_test.cc @@ -67,11 +67,11 @@ NetEqTest::NetEqTest(const NetEq::Config& config, std::unique_ptr input, std::unique_ptr output, Callbacks callbacks) - : clock_(0), + : input_(std::move(input)), + clock_(Timestamp::Millis(input_->NextEventTime().value_or(0))), neteq_(neteq_factory ? neteq_factory->CreateNetEq(config, decoder_factory, &clock_) : CreateNetEq(config, &clock_, decoder_factory)), - input_(std::move(input)), output_(std::move(output)), callbacks_(callbacks), sample_rate_hz_(config.sample_rate_hz), @@ -99,7 +99,7 @@ int64_t NetEqTest::Run() { NetEqTest::SimulationStepResult NetEqTest::RunToNextGetAudio() { SimulationStepResult result; const int64_t start_time_ms = *input_->NextEventTime(); - int64_t time_now_ms = start_time_ms; + int64_t time_now_ms = clock_.CurrentTime().ms(); current_state_.packet_iat_ms.clear(); while (!input_->ended()) { @@ -311,10 +311,6 @@ NetEqTest::DecoderMap NetEqTest::StandardDecoderMap() { {8, SdpAudioFormat("pcma", 8000, 1)}, #ifdef WEBRTC_CODEC_ILBC {102, SdpAudioFormat("ilbc", 8000, 1)}, -#endif - {103, SdpAudioFormat("isac", 16000, 1)}, -#if !defined(WEBRTC_ANDROID) - {104, SdpAudioFormat("isac", 32000, 1)}, #endif #ifdef WEBRTC_CODEC_OPUS {111, SdpAudioFormat("opus", 48000, 2)}, diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/tools/neteq_test.h b/third_party/libwebrtc/modules/audio_coding/neteq/tools/neteq_test.h index 0a6c24f3d660..1d3eeda4534c 100644 --- a/third_party/libwebrtc/modules/audio_coding/neteq/tools/neteq_test.h +++ b/third_party/libwebrtc/modules/audio_coding/neteq/tools/neteq_test.h @@ -109,11 +109,11 @@ class NetEqTest : public NetEqSimulator { private: void RegisterDecoders(const DecoderMap& codecs); + std::unique_ptr input_; SimulatedClock clock_; absl::optional next_action_; absl::optional last_packet_time_ms_; std::unique_ptr neteq_; - std::unique_ptr input_; std::unique_ptr output_; Callbacks callbacks_; int sample_rate_hz_; diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/tools/rtp_encode.cc b/third_party/libwebrtc/modules/audio_coding/neteq/tools/rtp_encode.cc index 6aeeb6d129fb..8adca927f055 100644 --- a/third_party/libwebrtc/modules/audio_coding/neteq/tools/rtp_encode.cc +++ b/third_party/libwebrtc/modules/audio_coding/neteq/tools/rtp_encode.cc @@ -30,7 +30,6 @@ #include "api/audio_codecs/g711/audio_encoder_g711.h" #include "api/audio_codecs/g722/audio_encoder_g722.h" #include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" -#include "api/audio_codecs/isac/audio_encoder_isac.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" #include "modules/audio_coding/include/audio_coding_module.h" @@ -71,7 +70,6 @@ enum class CodecType { kPcm16b32, kPcm16b48, kIlbc, - kIsac }; struct CodecTypeAndInfo { @@ -94,8 +92,7 @@ const std::map& CodecList() { {"pcm16b_16", {CodecType::kPcm16b16, 94, false}}, {"pcm16b_32", {CodecType::kPcm16b32, 95, false}}, {"pcm16b_48", {CodecType::kPcm16b48, 96, false}}, - {"ilbc", {CodecType::kIlbc, 102, false}}, - {"isac", {CodecType::kIsac, 103, false}}}; + {"ilbc", {CodecType::kIlbc, 102, false}}}; return *codec_list; } @@ -236,11 +233,6 @@ std::unique_ptr CreateEncoder(CodecType codec_type, return AudioEncoderIlbc::MakeAudioEncoder( GetCodecConfig(), payload_type); } - - case CodecType::kIsac: { - return AudioEncoderIsac::MakeAudioEncoder( - GetCodecConfig(), payload_type); - } } RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/third_party/libwebrtc/modules/audio_coding/test/EncodeDecodeTest.cc b/third_party/libwebrtc/modules/audio_coding/test/EncodeDecodeTest.cc index 8d4bcce8df63..9f9c4aa74ce9 100644 --- a/third_party/libwebrtc/modules/audio_coding/test/EncodeDecodeTest.cc +++ b/third_party/libwebrtc/modules/audio_coding/test/EncodeDecodeTest.cc @@ -110,9 +110,7 @@ void Receiver::Setup(AudioCodingModule* acm, EXPECT_EQ(0, acm->InitializeReceiver()); if (channels == 1) { - acm->SetReceiveCodecs({{103, {"ISAC", 16000, 1}}, - {104, {"ISAC", 32000, 1}}, - {107, {"L16", 8000, 1}}, + acm->SetReceiveCodecs({{107, {"L16", 8000, 1}}, {108, {"L16", 16000, 1}}, {109, {"L16", 32000, 1}}, {0, {"PCMU", 8000, 1}}, @@ -232,7 +230,6 @@ EncodeDecodeTest::EncodeDecodeTest() = default; void EncodeDecodeTest::Perform() { const std::map send_codecs = { - {103, {"ISAC", 16000, 1}}, {104, {"ISAC", 32000, 1}}, {107, {"L16", 8000, 1}}, {108, {"L16", 16000, 1}}, {109, {"L16", 32000, 1}}, {0, {"PCMU", 8000, 1}}, {8, {"PCMA", 8000, 1}}, diff --git a/third_party/libwebrtc/modules/audio_coding/test/TestAllCodecs.cc b/third_party/libwebrtc/modules/audio_coding/test/TestAllCodecs.cc index e93df346f191..b44037d73253 100644 --- a/third_party/libwebrtc/modules/audio_coding/test/TestAllCodecs.cc +++ b/third_party/libwebrtc/modules/audio_coding/test/TestAllCodecs.cc @@ -130,9 +130,7 @@ void TestAllCodecs::Perform() { acm_a_->InitializeReceiver(); acm_b_->InitializeReceiver(); - acm_b_->SetReceiveCodecs({{103, {"ISAC", 16000, 1}}, - {104, {"ISAC", 32000, 1}}, - {107, {"L16", 8000, 1}}, + acm_b_->SetReceiveCodecs({{107, {"L16", 8000, 1}}, {108, {"L16", 16000, 1}}, {109, {"L16", 32000, 1}}, {111, {"L16", 8000, 2}}, @@ -186,33 +184,6 @@ void TestAllCodecs::Perform() { RegisterSendCodec('A', codec_ilbc, 8000, 15200, 320, 0); Run(channel_a_to_b_); outfile_b_.Close(); -#endif -#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) - test_count_++; - OpenOutFile(test_count_); - char codec_isac[] = "ISAC"; - RegisterSendCodec('A', codec_isac, 16000, -1, 480, kVariableSize); - Run(channel_a_to_b_); - RegisterSendCodec('A', codec_isac, 16000, -1, 960, kVariableSize); - Run(channel_a_to_b_); - RegisterSendCodec('A', codec_isac, 16000, 15000, 480, kVariableSize); - Run(channel_a_to_b_); - RegisterSendCodec('A', codec_isac, 16000, 32000, 960, kVariableSize); - Run(channel_a_to_b_); - outfile_b_.Close(); -#endif -#ifdef WEBRTC_CODEC_ISAC - test_count_++; - OpenOutFile(test_count_); - RegisterSendCodec('A', codec_isac, 32000, -1, 960, kVariableSize); - Run(channel_a_to_b_); - RegisterSendCodec('A', codec_isac, 32000, 56000, 960, kVariableSize); - Run(channel_a_to_b_); - RegisterSendCodec('A', codec_isac, 32000, 37000, 960, kVariableSize); - Run(channel_a_to_b_); - RegisterSendCodec('A', codec_isac, 32000, 32000, 960, kVariableSize); - Run(channel_a_to_b_); - outfile_b_.Close(); #endif test_count_++; OpenOutFile(test_count_); @@ -319,15 +290,11 @@ void TestAllCodecs::RegisterSendCodec(char side, // Store packet-size in samples, used to validate the received packet. // If G.722, store half the size to compensate for the timestamp bug in the // RFC for G.722. - // If iSAC runs in adaptive mode, packet size in samples can change on the - // fly, so we exclude this test by setting `packet_size_samples_` to -1. int clockrate_hz = sampling_freq_hz; size_t num_channels = 1; if (absl::EqualsIgnoreCase(codec_name, "G722")) { packet_size_samples_ = packet_size / 2; clockrate_hz = sampling_freq_hz / 2; - } else if (absl::EqualsIgnoreCase(codec_name, "ISAC") && (rate == -1)) { - packet_size_samples_ = -1; } else if (absl::EqualsIgnoreCase(codec_name, "OPUS")) { packet_size_samples_ = packet_size; num_channels = 2; diff --git a/third_party/libwebrtc/modules/audio_coding/test/TestRedFec.cc b/third_party/libwebrtc/modules/audio_coding/test/TestRedFec.cc index 892fbc83d688..fff48b27bc9c 100644 --- a/third_party/libwebrtc/modules/audio_coding/test/TestRedFec.cc +++ b/third_party/libwebrtc/modules/audio_coding/test/TestRedFec.cc @@ -22,8 +22,6 @@ #include "api/audio_codecs/g711/audio_encoder_g711.h" #include "api/audio_codecs/g722/audio_decoder_g722.h" #include "api/audio_codecs/g722/audio_encoder_g722.h" -#include "api/audio_codecs/isac/audio_decoder_isac_float.h" -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" @@ -38,12 +36,10 @@ namespace webrtc { TestRedFec::TestRedFec() : encoder_factory_(CreateAudioEncoderFactory()), decoder_factory_(CreateAudioDecoderFactory()), _acmA(AudioCodingModule::Create( @@ -95,19 +91,6 @@ void TestRedFec::Perform() { Run(); _outFileB.Close(); - RegisterSendCodec(_acmA, {"ISAC", 16000, 1}, Vad::kVadVeryAggressive, false); - OpenOutFile(_testCntr); - Run(); - _outFileB.Close(); - - // Switch to a 32 kHz codec; RED should be switched off. - RegisterSendCodec(_acmA, {"ISAC", 32000, 1}, Vad::kVadVeryAggressive, false); - OpenOutFile(_testCntr); - Run(); - _outFileB.Close(); - - RegisterSendCodec(_acmA, {"ISAC", 32000, 1}, absl::nullopt, false); - _channelA2B->SetFECTestWithPacketLoss(true); // Following tests are under packet losses. @@ -118,22 +101,6 @@ void TestRedFec::Perform() { Run(); _outFileB.Close(); - // Switch to a 16 kHz codec, RED should have been switched off. - RegisterSendCodec(_acmA, {"ISAC", 16000, 1}, Vad::kVadVeryAggressive, false); - - OpenOutFile(_testCntr); - Run(); - _outFileB.Close(); - - // Switch to a 32 kHz codec, RED should have been switched off. - RegisterSendCodec(_acmA, {"ISAC", 32000, 1}, Vad::kVadVeryAggressive, false); - - OpenOutFile(_testCntr); - Run(); - _outFileB.Close(); - - RegisterSendCodec(_acmA, {"ISAC", 32000, 1}, absl::nullopt, false); - RegisterSendCodec(_acmA, {"opus", 48000, 2}, absl::nullopt, false); // _channelA2B imposes 25% packet loss rate. diff --git a/third_party/libwebrtc/modules/audio_coding/test/TestVADDTX.cc b/third_party/libwebrtc/modules/audio_coding/test/TestVADDTX.cc index cb05deb92a06..19367d9bde26 100644 --- a/third_party/libwebrtc/modules/audio_coding/test/TestVADDTX.cc +++ b/third_party/libwebrtc/modules/audio_coding/test/TestVADDTX.cc @@ -18,8 +18,6 @@ #include "api/audio_codecs/audio_encoder_factory_template.h" #include "api/audio_codecs/ilbc/audio_decoder_ilbc.h" #include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" -#include "api/audio_codecs/isac/audio_decoder_isac_float.h" -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" @@ -68,12 +66,10 @@ void MonitoringAudioPacketizationCallback::GetStatistics(uint32_t* counter) { } TestVadDtx::TestVadDtx() - : encoder_factory_(CreateAudioEncoderFactory()), - decoder_factory_(CreateAudioDecoderFactory()), + : encoder_factory_( + CreateAudioEncoderFactory()), + decoder_factory_( + CreateAudioDecoderFactory()), acm_send_(AudioCodingModule::Create( AudioCodingModule::Config(decoder_factory_))), acm_receive_(AudioCodingModule::Create( @@ -182,8 +178,6 @@ void TestVadDtx::Run(absl::string_view in_filename, TestWebRtcVadDtx::TestWebRtcVadDtx() : output_file_num_(0) {} void TestWebRtcVadDtx::Perform() { - RunTestCases({"ISAC", 16000, 1}); - RunTestCases({"ISAC", 32000, 1}); RunTestCases({"ILBC", 8000, 1}); RunTestCases({"opus", 48000, 2}); } diff --git a/third_party/libwebrtc/modules/audio_coding/test/Tester.cc b/third_party/libwebrtc/modules/audio_coding/test/Tester.cc index 113dbe059e2d..7612aa43a34c 100644 --- a/third_party/libwebrtc/modules/audio_coding/test/Tester.cc +++ b/third_party/libwebrtc/modules/audio_coding/test/Tester.cc @@ -21,7 +21,6 @@ #include "modules/audio_coding/test/TestStereo.h" #include "modules/audio_coding/test/TestVADDTX.h" #include "modules/audio_coding/test/TwoWayCommunication.h" -#include "modules/audio_coding/test/iSACTest.h" #include "modules/audio_coding/test/opus_test.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -42,25 +41,6 @@ TEST(AudioCodingModuleTest, TestRedFec) { webrtc::TestRedFec().Perform(); } -#if defined(WEBRTC_ANDROID) -TEST(AudioCodingModuleTest, DISABLED_TestIsac) { -#else -TEST(AudioCodingModuleTest, TestIsac) { -#endif - webrtc::ISACTest().Perform(); -} - -#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) && \ - defined(WEBRTC_CODEC_ILBC) -#if defined(WEBRTC_ANDROID) -TEST(AudioCodingModuleTest, DISABLED_TwoWayCommunication) { -#else -TEST(AudioCodingModuleTest, TwoWayCommunication) { -#endif - webrtc::TwoWayCommunication().Perform(); -} -#endif - // Disabled on ios as flaky, see https://crbug.com/webrtc/7057 #if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) TEST(AudioCodingModuleTest, DISABLED_TestStereo) { diff --git a/third_party/libwebrtc/modules/audio_coding/test/iSACTest.cc b/third_party/libwebrtc/modules/audio_coding/test/iSACTest.cc deleted file mode 100644 index 246c485afe62..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/test/iSACTest.cc +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/test/iSACTest.h" - -#include -#include - -#include "absl/strings/match.h" -#include "api/audio_codecs/builtin_audio_decoder_factory.h" -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/time_utils.h" -#include "test/gmock.h" -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -namespace webrtc { - -using ::testing::AnyOf; -using ::testing::Eq; -using ::testing::StrCaseEq; - -namespace { - -constexpr int kISAC16kPayloadType = 103; -constexpr int kISAC32kPayloadType = 104; -const SdpAudioFormat kISAC16kFormat = {"ISAC", 16000, 1}; -const SdpAudioFormat kISAC32kFormat = {"ISAC", 32000, 1}; - -AudioEncoderIsacFloat::Config TweakConfig( - AudioEncoderIsacFloat::Config config, - const ACMTestISACConfig& test_config) { - if (test_config.currentRateBitPerSec > 0) { - config.bit_rate = test_config.currentRateBitPerSec; - } - if (test_config.currentFrameSizeMsec != 0) { - config.frame_size_ms = test_config.currentFrameSizeMsec; - } - EXPECT_THAT(config.IsOk(), Eq(true)); - return config; -} - -void SetISACConfigDefault(ACMTestISACConfig& isacConfig) { - isacConfig.currentRateBitPerSec = 0; - isacConfig.currentFrameSizeMsec = 0; - isacConfig.encodingMode = -1; - isacConfig.initRateBitPerSec = 0; - isacConfig.initFrameSizeInMsec = 0; - isacConfig.enforceFrameSize = false; -} - -} // namespace - -ISACTest::ISACTest() - : _acmA(AudioCodingModule::Create( - AudioCodingModule::Config(CreateBuiltinAudioDecoderFactory()))), - _acmB(AudioCodingModule::Create( - AudioCodingModule::Config(CreateBuiltinAudioDecoderFactory()))) {} - -ISACTest::~ISACTest() {} - -void ISACTest::Setup() { - // Register both iSAC-wb & iSAC-swb in both sides as receiver codecs. - std::map receive_codecs = { - {kISAC16kPayloadType, kISAC16kFormat}, - {kISAC32kPayloadType, kISAC32kFormat}}; - _acmA->SetReceiveCodecs(receive_codecs); - _acmB->SetReceiveCodecs(receive_codecs); - - //--- Set A-to-B channel - _channel_A2B.reset(new Channel); - EXPECT_EQ(0, _acmA->RegisterTransportCallback(_channel_A2B.get())); - _channel_A2B->RegisterReceiverACM(_acmB.get()); - - //--- Set B-to-A channel - _channel_B2A.reset(new Channel); - EXPECT_EQ(0, _acmB->RegisterTransportCallback(_channel_B2A.get())); - _channel_B2A->RegisterReceiverACM(_acmA.get()); - - file_name_swb_ = - webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"); - - _acmB->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC16kFormat), - kISAC16kPayloadType)); - _acmA->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC32kFormat), - kISAC32kPayloadType)); - - _inFileA.Open(file_name_swb_, 32000, "rb"); - // Set test length to 500 ms (50 blocks of 10 ms each). - _inFileA.SetNum10MsBlocksToRead(50); - // Fast-forward 1 second (100 blocks) since the files start with silence. - _inFileA.FastForward(100); - std::string fileNameA = webrtc::test::OutputPath() + "testisac_a.pcm"; - std::string fileNameB = webrtc::test::OutputPath() + "testisac_b.pcm"; - _outFileA.Open(fileNameA, 32000, "wb"); - _outFileB.Open(fileNameB, 32000, "wb"); - - while (!_inFileA.EndOfFile()) { - Run10ms(); - } - - _inFileA.Close(); - _outFileA.Close(); - _outFileB.Close(); -} - -void ISACTest::Perform() { - Setup(); - - int16_t testNr = 0; - ACMTestISACConfig wbISACConfig; - ACMTestISACConfig swbISACConfig; - - SetISACConfigDefault(wbISACConfig); - SetISACConfigDefault(swbISACConfig); - - wbISACConfig.currentRateBitPerSec = -1; - swbISACConfig.currentRateBitPerSec = -1; - testNr++; - EncodeDecode(testNr, wbISACConfig, swbISACConfig); - - SetISACConfigDefault(wbISACConfig); - SetISACConfigDefault(swbISACConfig); - testNr++; - EncodeDecode(testNr, wbISACConfig, swbISACConfig); - - testNr++; - SwitchingSamplingRate(testNr, 4); -} - -void ISACTest::Run10ms() { - AudioFrame audioFrame; - EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0); - EXPECT_GE(_acmA->Add10MsData(audioFrame), 0); - EXPECT_GE(_acmB->Add10MsData(audioFrame), 0); - bool muted; - EXPECT_EQ(0, _acmA->PlayoutData10Ms(32000, &audioFrame, &muted)); - ASSERT_FALSE(muted); - _outFileA.Write10MsData(audioFrame); - EXPECT_EQ(0, _acmB->PlayoutData10Ms(32000, &audioFrame, &muted)); - ASSERT_FALSE(muted); - _outFileB.Write10MsData(audioFrame); -} - -void ISACTest::EncodeDecode(int testNr, - ACMTestISACConfig& wbISACConfig, - ACMTestISACConfig& swbISACConfig) { - // Files in Side A and B - _inFileA.Open(file_name_swb_, 32000, "rb", true); - _inFileB.Open(file_name_swb_, 32000, "rb", true); - - std::string file_name_out; - rtc::StringBuilder file_stream_a; - rtc::StringBuilder file_stream_b; - file_stream_a << webrtc::test::OutputPath(); - file_stream_b << webrtc::test::OutputPath(); - file_stream_a << "out_iSACTest_A_" << testNr << ".pcm"; - file_stream_b << "out_iSACTest_B_" << testNr << ".pcm"; - file_name_out = file_stream_a.str(); - _outFileA.Open(file_name_out, 32000, "wb"); - file_name_out = file_stream_b.str(); - _outFileB.Open(file_name_out, 32000, "wb"); - - // Side A is sending super-wideband, and side B is sending wideband. - _acmA->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - TweakConfig(*AudioEncoderIsacFloat::SdpToConfig(kISAC32kFormat), - swbISACConfig), - kISAC32kPayloadType)); - _acmB->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - TweakConfig(*AudioEncoderIsacFloat::SdpToConfig(kISAC16kFormat), - wbISACConfig), - kISAC16kPayloadType)); - - _channel_A2B->ResetStats(); - _channel_B2A->ResetStats(); - - while (!(_inFileA.EndOfFile() || _inFileA.Rewinded())) { - Run10ms(); - } - - _channel_A2B->ResetStats(); - _channel_B2A->ResetStats(); - - _outFileA.Close(); - _outFileB.Close(); - _inFileA.Close(); - _inFileB.Close(); -} - -void ISACTest::SwitchingSamplingRate(int testNr, int maxSampRateChange) { - // Files in Side A - _inFileA.Open(file_name_swb_, 32000, "rb"); - _inFileB.Open(file_name_swb_, 32000, "rb"); - - std::string file_name_out; - rtc::StringBuilder file_stream_a; - rtc::StringBuilder file_stream_b; - file_stream_a << webrtc::test::OutputPath(); - file_stream_b << webrtc::test::OutputPath(); - file_stream_a << "out_iSACTest_A_" << testNr << ".pcm"; - file_stream_b << "out_iSACTest_B_" << testNr << ".pcm"; - file_name_out = file_stream_a.str(); - _outFileA.Open(file_name_out, 32000, "wb"); - file_name_out = file_stream_b.str(); - _outFileB.Open(file_name_out, 32000, "wb"); - - // Start with side A sending super-wideband and side B seding wideband. - // Toggle sending wideband/super-wideband in this test. - _acmA->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC32kFormat), - kISAC32kPayloadType)); - _acmB->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC16kFormat), - kISAC16kPayloadType)); - - int numSendCodecChanged = 0; - while (numSendCodecChanged < (maxSampRateChange << 1)) { - Run10ms(); - if (_inFileA.EndOfFile()) { - if (_inFileA.SamplingFrequency() == 16000) { - // Switch side A to send super-wideband. - _inFileA.Close(); - _inFileA.Open(file_name_swb_, 32000, "rb"); - _acmA->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC32kFormat), - kISAC32kPayloadType)); - } else { - // Switch side A to send wideband. - _inFileA.Close(); - _inFileA.Open(file_name_swb_, 32000, "rb"); - _acmA->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC16kFormat), - kISAC16kPayloadType)); - } - numSendCodecChanged++; - } - - if (_inFileB.EndOfFile()) { - if (_inFileB.SamplingFrequency() == 16000) { - // Switch side B to send super-wideband. - _inFileB.Close(); - _inFileB.Open(file_name_swb_, 32000, "rb"); - _acmB->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC32kFormat), - kISAC32kPayloadType)); - } else { - // Switch side B to send wideband. - _inFileB.Close(); - _inFileB.Open(file_name_swb_, 32000, "rb"); - _acmB->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC16kFormat), - kISAC16kPayloadType)); - } - numSendCodecChanged++; - } - } - _outFileA.Close(); - _outFileB.Close(); - _inFileA.Close(); - _inFileB.Close(); -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_coding/test/iSACTest.h b/third_party/libwebrtc/modules/audio_coding/test/iSACTest.h deleted file mode 100644 index f6efeeac1cf0..000000000000 --- a/third_party/libwebrtc/modules/audio_coding/test/iSACTest.h +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_TEST_ISACTEST_H_ -#define MODULES_AUDIO_CODING_TEST_ISACTEST_H_ - -#include - -#include - -#include "modules/audio_coding/include/audio_coding_module.h" -#include "modules/audio_coding/test/Channel.h" -#include "modules/audio_coding/test/PCMFile.h" - -namespace webrtc { - -struct ACMTestISACConfig { - int32_t currentRateBitPerSec; - int16_t currentFrameSizeMsec; - int16_t encodingMode; - uint32_t initRateBitPerSec; - int16_t initFrameSizeInMsec; - bool enforceFrameSize; -}; - -class ISACTest { - public: - ISACTest(); - ~ISACTest(); - - void Perform(); - - private: - void Setup(); - - void Run10ms(); - - void EncodeDecode(int testNr, - ACMTestISACConfig& wbISACConfig, - ACMTestISACConfig& swbISACConfig); - - void SwitchingSamplingRate(int testNr, int maxSampRateChange); - - std::unique_ptr _acmA; - std::unique_ptr _acmB; - - std::unique_ptr _channel_A2B; - std::unique_ptr _channel_B2A; - - PCMFile _inFileA; - PCMFile _inFileB; - - PCMFile _outFileA; - PCMFile _outFileB; - - std::string file_name_swb_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_TEST_ISACTEST_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/webrtc_multiopus_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/webrtc_multiopus_gn/moz.build index 438ca4e4e42f..1eff483cac09 100644 --- a/third_party/libwebrtc/modules/audio_coding/webrtc_multiopus_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_coding/webrtc_multiopus_gn/moz.build @@ -69,7 +69,6 @@ if CONFIG["OS_TARGET"] == "Android": if CONFIG["OS_TARGET"] == "Darwin": - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_MAC"] = True DEFINES["WEBRTC_POSIX"] = True @@ -99,7 +98,6 @@ if CONFIG["OS_TARGET"] == "OpenBSD": DEFINES["USE_OZONE"] = "1" DEFINES["USE_X11"] = "1" DEFINES["WEBRTC_BSD"] = True - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_POSIX"] = True DEFINES["_FILE_OFFSET_BITS"] = "64" @@ -116,7 +114,6 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["PSAPI_VERSION"] = "2" DEFINES["UNICODE"] = True DEFINES["USE_AURA"] = "1" - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_WIN"] = True DEFINES["WIN32"] = True @@ -153,13 +150,8 @@ if CONFIG["CPU_ARCH"] == "arm": DEFINES["WEBRTC_ARCH_ARM"] = True DEFINES["WEBRTC_ARCH_ARM_V7"] = True - DEFINES["WEBRTC_CODEC_ISACFX"] = True DEFINES["WEBRTC_HAS_NEON"] = True -if CONFIG["CPU_ARCH"] == "ppc64": - - DEFINES["WEBRTC_CODEC_ISAC"] = True - if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": DEFINES["_DEBUG"] = True @@ -184,10 +176,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": DEFINES["USE_X11"] = "1" -if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_CODEC_ISAC"] = True - if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android": OS_LIBS += [ @@ -201,19 +189,12 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": "-msse2" ] - DEFINES["WEBRTC_CODEC_ISAC"] = True - OS_LIBS += [ "android_support" ] -if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_CODEC_ISAC"] = True - if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True @@ -228,13 +209,11 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": "-msse2" ] - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True diff --git a/third_party/libwebrtc/modules/audio_coding/webrtc_opus_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/webrtc_opus_gn/moz.build index c9cb5de2f7b2..2913ddd3f624 100644 --- a/third_party/libwebrtc/modules/audio_coding/webrtc_opus_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_coding/webrtc_opus_gn/moz.build @@ -69,7 +69,6 @@ if CONFIG["OS_TARGET"] == "Android": if CONFIG["OS_TARGET"] == "Darwin": - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_MAC"] = True DEFINES["WEBRTC_POSIX"] = True @@ -103,7 +102,6 @@ if CONFIG["OS_TARGET"] == "OpenBSD": DEFINES["USE_OZONE"] = "1" DEFINES["USE_X11"] = "1" DEFINES["WEBRTC_BSD"] = True - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_POSIX"] = True DEFINES["_FILE_OFFSET_BITS"] = "64" @@ -120,7 +118,6 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["PSAPI_VERSION"] = "2" DEFINES["UNICODE"] = True DEFINES["USE_AURA"] = "1" - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_WIN"] = True DEFINES["WIN32"] = True @@ -160,13 +157,8 @@ if CONFIG["CPU_ARCH"] == "arm": DEFINES["WEBRTC_ARCH_ARM"] = True DEFINES["WEBRTC_ARCH_ARM_V7"] = True - DEFINES["WEBRTC_CODEC_ISACFX"] = True DEFINES["WEBRTC_HAS_NEON"] = True -if CONFIG["CPU_ARCH"] == "ppc64": - - DEFINES["WEBRTC_CODEC_ISAC"] = True - if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": DEFINES["_DEBUG"] = True @@ -191,10 +183,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": DEFINES["USE_X11"] = "1" -if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_CODEC_ISAC"] = True - if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android": OS_LIBS += [ @@ -208,19 +196,12 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": "-msse2" ] - DEFINES["WEBRTC_CODEC_ISAC"] = True - OS_LIBS += [ "android_support" ] -if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_CODEC_ISAC"] = True - if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True @@ -235,13 +216,11 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": "-msse2" ] - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True diff --git a/third_party/libwebrtc/modules/audio_coding/webrtc_opus_wrapper_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/webrtc_opus_wrapper_gn/moz.build index 555c485b765b..bfa798809d55 100644 --- a/third_party/libwebrtc/modules/audio_coding/webrtc_opus_wrapper_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_coding/webrtc_opus_wrapper_gn/moz.build @@ -68,7 +68,6 @@ if CONFIG["OS_TARGET"] == "Android": if CONFIG["OS_TARGET"] == "Darwin": - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_MAC"] = True DEFINES["WEBRTC_POSIX"] = True @@ -98,7 +97,6 @@ if CONFIG["OS_TARGET"] == "OpenBSD": DEFINES["USE_OZONE"] = "1" DEFINES["USE_X11"] = "1" DEFINES["WEBRTC_BSD"] = True - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_POSIX"] = True DEFINES["_FILE_OFFSET_BITS"] = "64" @@ -115,7 +113,6 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["PSAPI_VERSION"] = "2" DEFINES["UNICODE"] = True DEFINES["USE_AURA"] = "1" - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_WIN"] = True DEFINES["WIN32"] = True @@ -152,13 +149,8 @@ if CONFIG["CPU_ARCH"] == "arm": DEFINES["WEBRTC_ARCH_ARM"] = True DEFINES["WEBRTC_ARCH_ARM_V7"] = True - DEFINES["WEBRTC_CODEC_ISACFX"] = True DEFINES["WEBRTC_HAS_NEON"] = True -if CONFIG["CPU_ARCH"] == "ppc64": - - DEFINES["WEBRTC_CODEC_ISAC"] = True - if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": DEFINES["_DEBUG"] = True @@ -183,10 +175,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": DEFINES["USE_X11"] = "1" -if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_CODEC_ISAC"] = True - if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android": OS_LIBS += [ @@ -200,19 +188,12 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": "-msse2" ] - DEFINES["WEBRTC_CODEC_ISAC"] = True - OS_LIBS += [ "android_support" ] -if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Android": - - DEFINES["WEBRTC_CODEC_ISAC"] = True - if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True @@ -227,13 +208,11 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": "-msse2" ] - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": - DEFINES["WEBRTC_CODEC_ISAC"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True diff --git a/third_party/libwebrtc/modules/audio_mixer/frame_combiner.cc b/third_party/libwebrtc/modules/audio_mixer/frame_combiner.cc index abfc5e3ad913..96c62f6b0d26 100644 --- a/third_party/libwebrtc/modules/audio_mixer/frame_combiner.cc +++ b/third_party/libwebrtc/modules/audio_mixer/frame_combiner.cc @@ -164,8 +164,6 @@ void FrameCombiner::Combine(rtc::ArrayView mix_list, AudioFrame* audio_frame_for_mixing) { RTC_DCHECK(audio_frame_for_mixing); - LogMixingStats(mix_list, sample_rate, number_of_streams); - SetAudioFrameFields(mix_list, number_of_channels, sample_rate, number_of_streams, audio_frame_for_mixing); @@ -212,32 +210,4 @@ void FrameCombiner::Combine(rtc::ArrayView mix_list, InterleaveToAudioFrame(mixing_buffer_view, audio_frame_for_mixing); } -void FrameCombiner::LogMixingStats( - rtc::ArrayView mix_list, - int sample_rate, - size_t number_of_streams) const { - // Log every second. - uma_logging_counter_++; - if (uma_logging_counter_ > 1000 / AudioMixerImpl::kFrameDurationInMs) { - uma_logging_counter_ = 0; - RTC_HISTOGRAM_COUNTS_100("WebRTC.Audio.AudioMixer.NumIncomingStreams", - static_cast(number_of_streams)); - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.AudioMixer.NumIncomingActiveStreams2", - rtc::dchecked_cast(mix_list.size()), /*min=*/1, /*max=*/16, - /*bucket_count=*/16); - - using NativeRate = AudioProcessing::NativeRate; - static constexpr NativeRate native_rates[] = { - NativeRate::kSampleRate8kHz, NativeRate::kSampleRate16kHz, - NativeRate::kSampleRate32kHz, NativeRate::kSampleRate48kHz}; - const auto* rate_position = std::lower_bound( - std::begin(native_rates), std::end(native_rates), sample_rate); - RTC_HISTOGRAM_ENUMERATION( - "WebRTC.Audio.AudioMixer.MixingRate", - std::distance(std::begin(native_rates), rate_position), - arraysize(native_rates)); - } -} - } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_mixer/frame_combiner.h b/third_party/libwebrtc/modules/audio_mixer/frame_combiner.h index 9ddf81e41e4e..4c858e1d999b 100644 --- a/third_party/libwebrtc/modules/audio_mixer/frame_combiner.h +++ b/third_party/libwebrtc/modules/audio_mixer/frame_combiner.h @@ -47,15 +47,10 @@ class FrameCombiner { kMaximumNumberOfChannels>; private: - void LogMixingStats(rtc::ArrayView mix_list, - int sample_rate, - size_t number_of_streams) const; - std::unique_ptr data_dumper_; std::unique_ptr mixing_buffer_; Limiter limiter_; const bool use_limiter_; - mutable int uma_logging_counter_ = 0; }; } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_processing/BUILD.gn b/third_party/libwebrtc/modules/audio_processing/BUILD.gn index 8edf6fe3c53b..3e6b201ab62a 100644 --- a/third_party/libwebrtc/modules/audio_processing/BUILD.gn +++ b/third_party/libwebrtc/modules/audio_processing/BUILD.gn @@ -138,11 +138,15 @@ rtc_library("gain_controller2") { "../../rtc_base:logging", "../../rtc_base:stringutils", "../../system_wrappers:field_trial", - "agc2:adaptive_digital", + "agc2:adaptive_digital_gain_controller", + "agc2:common", "agc2:cpu_features", "agc2:fixed_digital", "agc2:gain_applier", "agc2:input_volume_controller", + "agc2:noise_level_estimator", + "agc2:saturation_protector", + "agc2:speech_level_estimator", "agc2:vad_wrapper", ] } @@ -193,6 +197,7 @@ rtc_library("audio_processing") { "../../rtc_base:sanitizer", "../../rtc_base:swap_queue", "../../rtc_base:timeutils", + "../../rtc_base/experiments:field_trial_parser", "../../rtc_base/synchronization:mutex", "../../rtc_base/system:rtc_export", "../../system_wrappers", @@ -416,12 +421,14 @@ if (rtc_include_tests) { "../audio_coding:neteq_input_audio_tools", "aec_dump:mock_aec_dump_unittests", "agc:agc_unittests", - "agc2:adaptive_digital_unittests", + "agc2:adaptive_digital_gain_controller_unittest", "agc2:biquad_filter_unittests", "agc2:fixed_digital_unittests", + "agc2:gain_applier_unittest", "agc2:input_volume_controller_unittests", "agc2:input_volume_stats_reporter_unittests", "agc2:noise_estimator_unittests", + "agc2:saturation_protector_unittest", "agc2:speech_level_estimator_unittest", "agc2:test_utils", "agc2:vad_wrapper_unittests", diff --git a/third_party/libwebrtc/modules/audio_processing/OWNERS b/third_party/libwebrtc/modules/audio_processing/OWNERS index ca9bc46323cc..f5dc59ea352f 100644 --- a/third_party/libwebrtc/modules/audio_processing/OWNERS +++ b/third_party/libwebrtc/modules/audio_processing/OWNERS @@ -1,8 +1,8 @@ -aleloi@webrtc.org alessiob@webrtc.org +devicentepena@webrtc.org gustaf@webrtc.org henrik.lundin@webrtc.org ivoc@webrtc.org -minyue@webrtc.org +lionelk@webrtc.org peah@webrtc.org saza@webrtc.org diff --git a/third_party/libwebrtc/modules/audio_processing/agc/BUILD.gn b/third_party/libwebrtc/modules/audio_processing/agc/BUILD.gn index cd41461abba3..75bef1450f68 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc/BUILD.gn +++ b/third_party/libwebrtc/modules/audio_processing/agc/BUILD.gn @@ -36,6 +36,7 @@ rtc_library("agc") { "../../../system_wrappers:metrics", "../agc2:clipping_predictor", "../agc2:gain_map", + "../agc2:input_volume_stats_reporter", "../vad", ] absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] diff --git a/third_party/libwebrtc/modules/audio_processing/agc/agc_manager_direct.cc b/third_party/libwebrtc/modules/audio_processing/agc/agc_manager_direct.cc index acff3e8d351c..b8ad4a8bb99e 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc/agc_manager_direct.cc +++ b/third_party/libwebrtc/modules/audio_processing/agc/agc_manager_direct.cc @@ -17,6 +17,7 @@ #include "common_audio/include/audio_util.h" #include "modules/audio_processing/agc/gain_control.h" #include "modules/audio_processing/agc2/gain_map_internal.h" +#include "modules/audio_processing/agc2/input_volume_stats_reporter.h" #include "modules/audio_processing/include/audio_frame_view.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -407,22 +408,12 @@ void MonoAgc::UpdateGain(int rms_error_db) { int old_level = level_; SetLevel(LevelFromGainError(residual_gain, level_, min_mic_level_)); if (old_level != level_) { - // level_ was updated by SetLevel; log the new value. - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.AgcSetLevel", level_, 1, - kMaxMicLevel, 50); // Reset the AGC since the level has changed. agc_->Reset(); } } void MonoAgc::UpdateCompressor() { - calls_since_last_gain_log_++; - if (calls_since_last_gain_log_ == 100) { - calls_since_last_gain_log_ = 0; - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc.DigitalGainApplied", - compression_, 0, kMaxCompressionGain, - kMaxCompressionGain + 1); - } if (compression_ == target_compression_) { return; } @@ -447,9 +438,6 @@ void MonoAgc::UpdateCompressor() { // Set the new compression gain. if (new_compression != compression_) { - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc.DigitalGainUpdated", - new_compression, 0, kMaxCompressionGain, - kMaxCompressionGain + 1); compression_ = new_compression; compression_accumulator_ = new_compression; new_compression_to_set_ = compression_; @@ -637,6 +625,7 @@ void AgcManagerDirect::Process(const AudioBuffer& audio_buffer, absl::optional speech_probability, absl::optional speech_level_dbfs) { AggregateChannelLevels(); + const int volume_after_clipping_handling = recommended_input_volume_; if (!capture_output_used_) { return; @@ -659,6 +648,12 @@ void AgcManagerDirect::Process(const AudioBuffer& audio_buffer, } AggregateChannelLevels(); + if (volume_after_clipping_handling != recommended_input_volume_) { + // The recommended input volume was adjusted in order to match the target + // level. + UpdateHistogramOnRecommendedInputVolumeChangeToMatchTarget( + recommended_input_volume_); + } } absl::optional AgcManagerDirect::GetDigitalComressionGain() { diff --git a/third_party/libwebrtc/modules/audio_processing/agc/agc_manager_direct.h b/third_party/libwebrtc/modules/audio_processing/agc/agc_manager_direct.h index d1314c66bd32..adb2f5a63fe0 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc/agc_manager_direct.h +++ b/third_party/libwebrtc/modules/audio_processing/agc/agc_manager_direct.h @@ -254,7 +254,6 @@ class MonoAgc { bool capture_output_used_ = true; bool check_volume_on_next_process_ = true; bool startup_ = true; - int calls_since_last_gain_log_ = 0; // TODO(bugs.webrtc.org/7494): Create a separate member for the applied // input volume. diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/BUILD.gn b/third_party/libwebrtc/modules/audio_processing/agc2/BUILD.gn index cdea022ea8df..bd59ad3dae63 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/BUILD.gn +++ b/third_party/libwebrtc/modules/audio_processing/agc2/BUILD.gn @@ -8,13 +8,6 @@ import("../../../webrtc.gni") -group("agc2") { - deps = [ - ":adaptive_digital", - ":fixed_digital", - ] -} - rtc_library("speech_level_estimator") { sources = [ "speech_level_estimator.cc", @@ -39,12 +32,35 @@ rtc_library("speech_level_estimator") { ] } -rtc_library("adaptive_digital") { +rtc_library("adaptive_digital_gain_controller") { sources = [ - "adaptive_digital_gain_applier.cc", - "adaptive_digital_gain_applier.h", "adaptive_digital_gain_controller.cc", "adaptive_digital_gain_controller.h", + ] + + visibility = [ + "..:gain_controller2", + "./*", + ] + + configs += [ "..:apm_debug_dump" ] + + deps = [ + ":common", + ":gain_applier", + "..:api", + "..:apm_logging", + "..:audio_frame_view", + "../../../common_audio", + "../../../rtc_base:checks", + "../../../rtc_base:logging", + "../../../rtc_base:safe_minmax", + "../../../system_wrappers:metrics", + ] +} + +rtc_library("saturation_protector") { + sources = [ "saturation_protector.cc", "saturation_protector.h", "saturation_protector_buffer.cc", @@ -60,19 +76,10 @@ rtc_library("adaptive_digital") { deps = [ ":common", - ":gain_applier", - ":noise_level_estimator", - ":speech_level_estimator", - "..:api", "..:apm_logging", - "..:audio_frame_view", - "../../../api:array_view", - "../../../common_audio", "../../../rtc_base:checks", - "../../../rtc_base:logging", "../../../rtc_base:safe_compare", "../../../rtc_base:safe_minmax", - "../../../system_wrappers:metrics", ] absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] @@ -201,6 +208,7 @@ rtc_library("input_volume_controller") { deps = [ ":clipping_predictor", ":gain_map", + ":input_volume_stats_reporter", "..:api", "..:audio_buffer", "..:audio_frame_view", @@ -223,7 +231,6 @@ rtc_library("noise_level_estimator") { "noise_level_estimator.cc", "noise_level_estimator.h", ] - visibility = [ "./*" ] deps = [ ":biquad_filter", "..:apm_logging", @@ -233,6 +240,11 @@ rtc_library("noise_level_estimator") { "../../../system_wrappers", ] + visibility = [ + "..:gain_controller2", + "./*", + ] + configs += [ "..:apm_debug_dump" ] } @@ -298,27 +310,51 @@ rtc_library("speech_level_estimator_unittest") { ] } -rtc_library("adaptive_digital_unittests") { +rtc_library("adaptive_digital_gain_controller_unittest") { testonly = true configs += [ "..:apm_debug_dump" ] - sources = [ - "adaptive_digital_gain_applier_unittest.cc", - "gain_applier_unittest.cc", - "saturation_protector_buffer_unittest.cc", - "saturation_protector_unittest.cc", - ] + sources = [ "adaptive_digital_gain_controller_unittest.cc" ] + deps = [ - ":adaptive_digital", + ":adaptive_digital_gain_controller", ":common", - ":gain_applier", ":test_utils", "..:api", "..:apm_logging", "..:audio_frame_view", - "../../../api:array_view", "../../../common_audio", - "../../../rtc_base:checks", + "../../../rtc_base:gunit_helpers", + "../../../test:test_support", + ] +} + +rtc_library("gain_applier_unittest") { + testonly = true + configs += [ "..:apm_debug_dump" ] + + sources = [ "gain_applier_unittest.cc" ] + deps = [ + ":gain_applier", + ":test_utils", + "..:audio_frame_view", + "../../../rtc_base:gunit_helpers", + "../../../test:test_support", + ] +} + +rtc_library("saturation_protector_unittest") { + testonly = true + configs += [ "..:apm_debug_dump" ] + + sources = [ + "saturation_protector_buffer_unittest.cc", + "saturation_protector_unittest.cc", + ] + deps = [ + ":common", + ":saturation_protector", + "..:apm_logging", "../../../rtc_base:gunit_helpers", "../../../test:test_support", ] diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc b/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc deleted file mode 100644 index a34f598874d5..000000000000 --- a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/agc2/adaptive_digital_gain_applier.h" - -#include - -#include "common_audio/include/audio_util.h" -#include "modules/audio_processing/agc2/agc2_common.h" -#include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_minmax.h" -#include "system_wrappers/include/metrics.h" - -namespace webrtc { -namespace { - -using AdaptiveDigitalConfig = - AudioProcessing::Config::GainController2::AdaptiveDigital; - -constexpr int kHeadroomHistogramMin = 0; -constexpr int kHeadroomHistogramMax = 50; -constexpr int kGainDbHistogramMax = 30; - -// Computes the gain for `input_level_dbfs` to reach `-config.headroom_db`. -// Clamps the gain in [0, `config.max_gain_db`]. `config.headroom_db` is a -// safety margin to allow transient peaks to exceed the target peak level -// without clipping. -float ComputeGainDb(float input_level_dbfs, - const AdaptiveDigitalConfig& config) { - // If the level is very low, apply the maximum gain. - if (input_level_dbfs < -(config.headroom_db + config.max_gain_db)) { - return config.max_gain_db; - } - // We expect to end up here most of the time: the level is below - // -headroom, but we can boost it to -headroom. - if (input_level_dbfs < -config.headroom_db) { - return -config.headroom_db - input_level_dbfs; - } - // The level is too high and we can't boost. - RTC_DCHECK_GE(input_level_dbfs, -config.headroom_db); - return 0.0f; -} - -// Returns `target_gain_db` if applying such a gain to `input_noise_level_dbfs` -// does not exceed `max_output_noise_level_dbfs`. Otherwise lowers and returns -// `target_gain_db` so that the output noise level equals -// `max_output_noise_level_dbfs`. -float LimitGainByNoise(float target_gain_db, - float input_noise_level_dbfs, - float max_output_noise_level_dbfs, - ApmDataDumper& apm_data_dumper) { - const float max_allowed_gain_db = - max_output_noise_level_dbfs - input_noise_level_dbfs; - apm_data_dumper.DumpRaw("agc2_adaptive_gain_applier_max_allowed_gain_db", - max_allowed_gain_db); - return std::min(target_gain_db, std::max(max_allowed_gain_db, 0.0f)); -} - -float LimitGainByLowConfidence(float target_gain_db, - float last_gain_db, - float limiter_audio_level_dbfs, - bool estimate_is_confident) { - if (estimate_is_confident || - limiter_audio_level_dbfs <= kLimiterThresholdForAgcGainDbfs) { - return target_gain_db; - } - const float limiter_level_dbfs_before_gain = - limiter_audio_level_dbfs - last_gain_db; - - // Compute a new gain so that `limiter_level_dbfs_before_gain` + - // `new_target_gain_db` is not great than `kLimiterThresholdForAgcGainDbfs`. - const float new_target_gain_db = std::max( - kLimiterThresholdForAgcGainDbfs - limiter_level_dbfs_before_gain, 0.0f); - return std::min(new_target_gain_db, target_gain_db); -} - -// Computes how the gain should change during this frame. -// Return the gain difference in db to 'last_gain_db'. -float ComputeGainChangeThisFrameDb(float target_gain_db, - float last_gain_db, - bool gain_increase_allowed, - float max_gain_decrease_db, - float max_gain_increase_db) { - RTC_DCHECK_GT(max_gain_decrease_db, 0); - RTC_DCHECK_GT(max_gain_increase_db, 0); - float target_gain_difference_db = target_gain_db - last_gain_db; - if (!gain_increase_allowed) { - target_gain_difference_db = std::min(target_gain_difference_db, 0.0f); - } - return rtc::SafeClamp(target_gain_difference_db, -max_gain_decrease_db, - max_gain_increase_db); -} - -// Copies the (multichannel) audio samples from `src` into `dst`. -void CopyAudio(AudioFrameView src, - std::vector>& dst) { - RTC_DCHECK_GT(src.num_channels(), 0); - RTC_DCHECK_GT(src.samples_per_channel(), 0); - RTC_DCHECK_EQ(dst.size(), src.num_channels()); - for (int c = 0; c < src.num_channels(); ++c) { - rtc::ArrayView channel_view = src.channel(c); - RTC_DCHECK_EQ(channel_view.size(), src.samples_per_channel()); - RTC_DCHECK_EQ(dst[c].size(), src.samples_per_channel()); - std::copy(channel_view.begin(), channel_view.end(), dst[c].begin()); - } -} - -} // namespace - -AdaptiveDigitalGainApplier::AdaptiveDigitalGainApplier( - ApmDataDumper* apm_data_dumper, - const AudioProcessing::Config::GainController2::AdaptiveDigital& config, - int sample_rate_hz, - int num_channels) - : apm_data_dumper_(apm_data_dumper), - gain_applier_( - /*hard_clip_samples=*/false, - /*initial_gain_factor=*/DbToRatio(config.initial_gain_db)), - config_(config), - max_gain_change_db_per_10ms_(config_.max_gain_change_db_per_second * - kFrameDurationMs / 1000.0f), - calls_since_last_gain_log_(0), - frames_to_gain_increase_allowed_( - config_.adjacent_speech_frames_threshold), - last_gain_db_(config_.initial_gain_db) { - RTC_DCHECK_GT(max_gain_change_db_per_10ms_, 0.0f); - RTC_DCHECK_GE(frames_to_gain_increase_allowed_, 1); - RTC_DCHECK_GE(config_.max_output_noise_level_dbfs, -90.0f); - RTC_DCHECK_LE(config_.max_output_noise_level_dbfs, 0.0f); - Initialize(sample_rate_hz, num_channels); -} - -void AdaptiveDigitalGainApplier::Initialize(int sample_rate_hz, - int num_channels) { - if (!config_.dry_run) { - return; - } - RTC_DCHECK_GT(sample_rate_hz, 0); - RTC_DCHECK_GT(num_channels, 0); - int frame_size = rtc::CheckedDivExact(sample_rate_hz, 100); - bool sample_rate_changed = - dry_run_frame_.empty() || // Handle initialization. - dry_run_frame_[0].size() != static_cast(frame_size); - bool num_channels_changed = - dry_run_channels_.size() != static_cast(num_channels); - if (sample_rate_changed || num_channels_changed) { - // Resize the multichannel audio vector and update the channel pointers. - dry_run_frame_.resize(num_channels); - dry_run_channels_.resize(num_channels); - for (int c = 0; c < num_channels; ++c) { - dry_run_frame_[c].resize(frame_size); - dry_run_channels_[c] = dry_run_frame_[c].data(); - } - } -} - -void AdaptiveDigitalGainApplier::Process(const FrameInfo& info, - AudioFrameView frame) { - RTC_DCHECK_GE(info.speech_level_dbfs, -150.0f); - RTC_DCHECK_GE(frame.num_channels(), 1); - RTC_DCHECK( - frame.samples_per_channel() == 80 || frame.samples_per_channel() == 160 || - frame.samples_per_channel() == 320 || frame.samples_per_channel() == 480) - << "`frame` does not look like a 10 ms frame for an APM supported sample " - "rate"; - - // Compute the input level used to select the desired gain. - RTC_DCHECK_GT(info.headroom_db, 0.0f); - const float input_level_dbfs = info.speech_level_dbfs + info.headroom_db; - - const float target_gain_db = LimitGainByLowConfidence( - LimitGainByNoise(ComputeGainDb(input_level_dbfs, config_), - info.noise_rms_dbfs, config_.max_output_noise_level_dbfs, - *apm_data_dumper_), - last_gain_db_, info.limiter_envelope_dbfs, info.speech_level_reliable); - - // Forbid increasing the gain until enough adjacent speech frames are - // observed. - bool first_confident_speech_frame = false; - if (info.speech_probability < kVadConfidenceThreshold) { - frames_to_gain_increase_allowed_ = config_.adjacent_speech_frames_threshold; - } else if (frames_to_gain_increase_allowed_ > 0) { - frames_to_gain_increase_allowed_--; - first_confident_speech_frame = frames_to_gain_increase_allowed_ == 0; - } - apm_data_dumper_->DumpRaw( - "agc2_adaptive_gain_applier_frames_to_gain_increase_allowed", - frames_to_gain_increase_allowed_); - - const bool gain_increase_allowed = frames_to_gain_increase_allowed_ == 0; - - float max_gain_increase_db = max_gain_change_db_per_10ms_; - if (first_confident_speech_frame) { - // No gain increase happened while waiting for a long enough speech - // sequence. Therefore, temporarily allow a faster gain increase. - RTC_DCHECK(gain_increase_allowed); - max_gain_increase_db *= config_.adjacent_speech_frames_threshold; - } - - const float gain_change_this_frame_db = ComputeGainChangeThisFrameDb( - target_gain_db, last_gain_db_, gain_increase_allowed, - /*max_gain_decrease_db=*/max_gain_change_db_per_10ms_, - max_gain_increase_db); - - apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_want_to_change_by_db", - target_gain_db - last_gain_db_); - apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_will_change_by_db", - gain_change_this_frame_db); - - // Optimization: avoid calling math functions if gain does not - // change. - if (gain_change_this_frame_db != 0.f) { - gain_applier_.SetGainFactor( - DbToRatio(last_gain_db_ + gain_change_this_frame_db)); - } - - // Modify `frame` only if not running in "dry run" mode. - if (!config_.dry_run) { - gain_applier_.ApplyGain(frame); - } else { - // Copy `frame` so that `ApplyGain()` is called (on a copy). - CopyAudio(frame, dry_run_frame_); - RTC_DCHECK(!dry_run_channels_.empty()); - AudioFrameView frame_copy(&dry_run_channels_[0], - frame.num_channels(), - frame.samples_per_channel()); - gain_applier_.ApplyGain(frame_copy); - } - - // Remember that the gain has changed for the next iteration. - last_gain_db_ = last_gain_db_ + gain_change_this_frame_db; - apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_applied_gain_db", - last_gain_db_); - - // Log every 10 seconds. - calls_since_last_gain_log_++; - if (calls_since_last_gain_log_ == 1000) { - calls_since_last_gain_log_ = 0; - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.EstimatedSpeechLevel", - -info.speech_level_dbfs, 0, 100, 101); - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.EstimatedNoiseLevel", - -info.noise_rms_dbfs, 0, 100, 101); - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.Agc2.Headroom", info.headroom_db, kHeadroomHistogramMin, - kHeadroomHistogramMax, - kHeadroomHistogramMax - kHeadroomHistogramMin + 1); - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.DigitalGainApplied", - last_gain_db_, 0, kGainDbHistogramMax, - kGainDbHistogramMax + 1); - RTC_LOG(LS_INFO) << "AGC2 adaptive digital" - << " | speech_dbfs: " << info.speech_level_dbfs - << " | noise_dbfs: " << info.noise_rms_dbfs - << " | headroom_db: " << info.headroom_db - << " | gain_db: " << last_gain_db_; - } -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.h b/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.h deleted file mode 100644 index dc84c1e23885..000000000000 --- a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.h +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_APPLIER_H_ -#define MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_APPLIER_H_ - -#include - -#include "modules/audio_processing/agc2/gain_applier.h" -#include "modules/audio_processing/include/audio_frame_view.h" -#include "modules/audio_processing/include/audio_processing.h" - -namespace webrtc { - -class ApmDataDumper; - -// TODO(bugs.webrtc.org/7494): Split into `GainAdaptor` and `GainApplier`. -// Selects the target digital gain, decides when and how quickly to adapt to the -// target and applies the current gain to 10 ms frames. -class AdaptiveDigitalGainApplier { - public: - // Information about a frame to process. - struct FrameInfo { - float speech_probability; // Probability of speech in the [0, 1] range. - float speech_level_dbfs; // Estimated speech level (dBFS). - bool speech_level_reliable; // True with reliable speech level estimation. - float noise_rms_dbfs; // Estimated noise RMS level (dBFS). - float headroom_db; // Headroom (dB). - float limiter_envelope_dbfs; // Envelope level from the limiter (dBFS). - }; - - AdaptiveDigitalGainApplier( - ApmDataDumper* apm_data_dumper, - const AudioProcessing::Config::GainController2::AdaptiveDigital& config, - int sample_rate_hz, - int num_channels); - AdaptiveDigitalGainApplier(const AdaptiveDigitalGainApplier&) = delete; - AdaptiveDigitalGainApplier& operator=(const AdaptiveDigitalGainApplier&) = - delete; - - void Initialize(int sample_rate_hz, int num_channels); - - // Analyzes `info`, updates the digital gain and applies it to a 10 ms - // `frame`. Supports any sample rate supported by APM. - void Process(const FrameInfo& info, AudioFrameView frame); - - private: - ApmDataDumper* const apm_data_dumper_; - GainApplier gain_applier_; - - const AudioProcessing::Config::GainController2::AdaptiveDigital config_; - const float max_gain_change_db_per_10ms_; - - int calls_since_last_gain_log_; - int frames_to_gain_increase_allowed_; - float last_gain_db_; - - std::vector> dry_run_frame_; - std::vector dry_run_channels_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_APPLIER_H_ diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc b/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc index c396ee044a00..e8edab602cc7 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc +++ b/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc @@ -13,29 +13,91 @@ #include #include "common_audio/include/audio_util.h" +#include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_minmax.h" +#include "system_wrappers/include/metrics.h" namespace webrtc { namespace { -// Peak and RMS audio levels in dBFS. -struct AudioLevels { - float peak_dbfs; - float rms_dbfs; -}; +using AdaptiveDigitalConfig = + AudioProcessing::Config::GainController2::AdaptiveDigital; -// Computes the audio levels for the first channel in `frame`. -AudioLevels ComputeAudioLevels(AudioFrameView frame) { - float peak = 0.0f; - float rms = 0.0f; - for (const auto& x : frame.channel(0)) { - peak = std::max(std::fabs(x), peak); - rms += x * x; +constexpr int kHeadroomHistogramMin = 0; +constexpr int kHeadroomHistogramMax = 50; +constexpr int kGainDbHistogramMax = 30; + +// Computes the gain for `input_level_dbfs` to reach `-config.headroom_db`. +// Clamps the gain in [0, `config.max_gain_db`]. `config.headroom_db` is a +// safety margin to allow transient peaks to exceed the target peak level +// without clipping. +float ComputeGainDb(float input_level_dbfs, + const AdaptiveDigitalConfig& config) { + // If the level is very low, apply the maximum gain. + if (input_level_dbfs < -(config.headroom_db + config.max_gain_db)) { + return config.max_gain_db; } - return {FloatS16ToDbfs(peak), - FloatS16ToDbfs(std::sqrt(rms / frame.samples_per_channel()))}; + // We expect to end up here most of the time: the level is below + // -headroom, but we can boost it to -headroom. + if (input_level_dbfs < -config.headroom_db) { + return -config.headroom_db - input_level_dbfs; + } + // The level is too high and we can't boost. + RTC_DCHECK_GE(input_level_dbfs, -config.headroom_db); + return 0.0f; +} + +// Returns `target_gain_db` if applying such a gain to `input_noise_level_dbfs` +// does not exceed `max_output_noise_level_dbfs`. Otherwise lowers and returns +// `target_gain_db` so that the output noise level equals +// `max_output_noise_level_dbfs`. +float LimitGainByNoise(float target_gain_db, + float input_noise_level_dbfs, + float max_output_noise_level_dbfs, + ApmDataDumper& apm_data_dumper) { + const float max_allowed_gain_db = + max_output_noise_level_dbfs - input_noise_level_dbfs; + apm_data_dumper.DumpRaw("agc2_adaptive_gain_applier_max_allowed_gain_db", + max_allowed_gain_db); + return std::min(target_gain_db, std::max(max_allowed_gain_db, 0.0f)); +} + +float LimitGainByLowConfidence(float target_gain_db, + float last_gain_db, + float limiter_audio_level_dbfs, + bool estimate_is_confident) { + if (estimate_is_confident || + limiter_audio_level_dbfs <= kLimiterThresholdForAgcGainDbfs) { + return target_gain_db; + } + const float limiter_level_dbfs_before_gain = + limiter_audio_level_dbfs - last_gain_db; + + // Compute a new gain so that `limiter_level_dbfs_before_gain` + + // `new_target_gain_db` is not great than `kLimiterThresholdForAgcGainDbfs`. + const float new_target_gain_db = std::max( + kLimiterThresholdForAgcGainDbfs - limiter_level_dbfs_before_gain, 0.0f); + return std::min(new_target_gain_db, target_gain_db); +} + +// Computes how the gain should change during this frame. +// Return the gain difference in db to 'last_gain_db'. +float ComputeGainChangeThisFrameDb(float target_gain_db, + float last_gain_db, + bool gain_increase_allowed, + float max_gain_decrease_db, + float max_gain_increase_db) { + RTC_DCHECK_GT(max_gain_decrease_db, 0); + RTC_DCHECK_GT(max_gain_increase_db, 0); + float target_gain_difference_db = target_gain_db - last_gain_db; + if (!gain_increase_allowed) { + target_gain_difference_db = std::min(target_gain_difference_db, 0.0f); + } + return rtc::SafeClamp(target_gain_difference_db, -max_gain_decrease_db, + max_gain_increase_db); } } // namespace @@ -43,72 +105,112 @@ AudioLevels ComputeAudioLevels(AudioFrameView frame) { AdaptiveDigitalGainController::AdaptiveDigitalGainController( ApmDataDumper* apm_data_dumper, const AudioProcessing::Config::GainController2::AdaptiveDigital& config, - int sample_rate_hz, - int num_channels) - : speech_level_estimator_(apm_data_dumper, config), - gain_controller_(apm_data_dumper, config, sample_rate_hz, num_channels), - apm_data_dumper_(apm_data_dumper), - noise_level_estimator_(CreateNoiseFloorEstimator(apm_data_dumper)), - saturation_protector_( - CreateSaturationProtector(kSaturationProtectorInitialHeadroomDb, - config.adjacent_speech_frames_threshold, - apm_data_dumper)) { - RTC_DCHECK(apm_data_dumper); - RTC_DCHECK(noise_level_estimator_); - RTC_DCHECK(saturation_protector_); + int adjacent_speech_frames_threshold) + : apm_data_dumper_(apm_data_dumper), + gain_applier_( + /*hard_clip_samples=*/false, + /*initial_gain_factor=*/DbToRatio(config.initial_gain_db)), + config_(config), + adjacent_speech_frames_threshold_(adjacent_speech_frames_threshold), + max_gain_change_db_per_10ms_(config_.max_gain_change_db_per_second * + kFrameDurationMs / 1000.0f), + calls_since_last_gain_log_(0), + frames_to_gain_increase_allowed_(adjacent_speech_frames_threshold), + last_gain_db_(config_.initial_gain_db) { + RTC_DCHECK_GT(max_gain_change_db_per_10ms_, 0.0f); + RTC_DCHECK_GE(frames_to_gain_increase_allowed_, 1); + RTC_DCHECK_GE(config_.max_output_noise_level_dbfs, -90.0f); + RTC_DCHECK_LE(config_.max_output_noise_level_dbfs, 0.0f); } -AdaptiveDigitalGainController::~AdaptiveDigitalGainController() = default; +void AdaptiveDigitalGainController::Process(const FrameInfo& info, + AudioFrameView frame) { + RTC_DCHECK_GE(info.speech_level_dbfs, -150.0f); + RTC_DCHECK_GE(frame.num_channels(), 1); + RTC_DCHECK( + frame.samples_per_channel() == 80 || frame.samples_per_channel() == 160 || + frame.samples_per_channel() == 320 || frame.samples_per_channel() == 480) + << "`frame` does not look like a 10 ms frame for an APM supported sample " + "rate"; -void AdaptiveDigitalGainController::Initialize(int sample_rate_hz, - int num_channels) { - gain_controller_.Initialize(sample_rate_hz, num_channels); -} + // Compute the input level used to select the desired gain. + RTC_DCHECK_GT(info.headroom_db, 0.0f); + const float input_level_dbfs = info.speech_level_dbfs + info.headroom_db; -void AdaptiveDigitalGainController::Process(AudioFrameView frame, - float speech_probability, - float limiter_envelope) { - AudioLevels levels = ComputeAudioLevels(frame); - apm_data_dumper_->DumpRaw("agc2_input_rms_dbfs", levels.rms_dbfs); - apm_data_dumper_->DumpRaw("agc2_input_peak_dbfs", levels.peak_dbfs); + const float target_gain_db = LimitGainByLowConfidence( + LimitGainByNoise(ComputeGainDb(input_level_dbfs, config_), + info.noise_rms_dbfs, config_.max_output_noise_level_dbfs, + *apm_data_dumper_), + last_gain_db_, info.limiter_envelope_dbfs, info.speech_level_reliable); - AdaptiveDigitalGainApplier::FrameInfo info; + // Forbid increasing the gain until enough adjacent speech frames are + // observed. + bool first_confident_speech_frame = false; + if (info.speech_probability < kVadConfidenceThreshold) { + frames_to_gain_increase_allowed_ = adjacent_speech_frames_threshold_; + } else if (frames_to_gain_increase_allowed_ > 0) { + frames_to_gain_increase_allowed_--; + first_confident_speech_frame = frames_to_gain_increase_allowed_ == 0; + } + apm_data_dumper_->DumpRaw( + "agc2_adaptive_gain_applier_frames_to_gain_increase_allowed", + frames_to_gain_increase_allowed_); - info.speech_probability = speech_probability; + const bool gain_increase_allowed = frames_to_gain_increase_allowed_ == 0; - speech_level_estimator_.Update(levels.rms_dbfs, levels.peak_dbfs, - info.speech_probability); - info.speech_level_dbfs = speech_level_estimator_.level_dbfs(); - info.speech_level_reliable = speech_level_estimator_.IsConfident(); - apm_data_dumper_->DumpRaw("agc2_speech_level_dbfs", info.speech_level_dbfs); - apm_data_dumper_->DumpRaw("agc2_speech_level_reliable", - info.speech_level_reliable); + float max_gain_increase_db = max_gain_change_db_per_10ms_; + if (first_confident_speech_frame) { + // No gain increase happened while waiting for a long enough speech + // sequence. Therefore, temporarily allow a faster gain increase. + RTC_DCHECK(gain_increase_allowed); + max_gain_increase_db *= adjacent_speech_frames_threshold_; + } - info.noise_rms_dbfs = noise_level_estimator_->Analyze(frame); - apm_data_dumper_->DumpRaw("agc2_noise_rms_dbfs", info.noise_rms_dbfs); + const float gain_change_this_frame_db = ComputeGainChangeThisFrameDb( + target_gain_db, last_gain_db_, gain_increase_allowed, + /*max_gain_decrease_db=*/max_gain_change_db_per_10ms_, + max_gain_increase_db); - saturation_protector_->Analyze(info.speech_probability, levels.peak_dbfs, - info.speech_level_dbfs); - info.headroom_db = saturation_protector_->HeadroomDb(); - apm_data_dumper_->DumpRaw("agc2_headroom_db", info.headroom_db); + apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_want_to_change_by_db", + target_gain_db - last_gain_db_); + apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_will_change_by_db", + gain_change_this_frame_db); - info.limiter_envelope_dbfs = FloatS16ToDbfs(limiter_envelope); - apm_data_dumper_->DumpRaw("agc2_limiter_envelope_dbfs", - info.limiter_envelope_dbfs); + // Optimization: avoid calling math functions if gain does not + // change. + if (gain_change_this_frame_db != 0.f) { + gain_applier_.SetGainFactor( + DbToRatio(last_gain_db_ + gain_change_this_frame_db)); + } - gain_controller_.Process(info, frame); -} + gain_applier_.ApplyGain(frame); -void AdaptiveDigitalGainController::HandleInputGainChange() { - speech_level_estimator_.Reset(); - saturation_protector_->Reset(); -} + // Remember that the gain has changed for the next iteration. + last_gain_db_ = last_gain_db_ + gain_change_this_frame_db; + apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_applied_gain_db", + last_gain_db_); -absl::optional -AdaptiveDigitalGainController::GetSpeechLevelDbfsIfConfident() const { - return speech_level_estimator_.IsConfident() - ? absl::optional(speech_level_estimator_.level_dbfs()) - : absl::nullopt; + // Log every 10 seconds. + calls_since_last_gain_log_++; + if (calls_since_last_gain_log_ == 1000) { + calls_since_last_gain_log_ = 0; + RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.EstimatedSpeechLevel", + -info.speech_level_dbfs, 0, 100, 101); + RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.EstimatedNoiseLevel", + -info.noise_rms_dbfs, 0, 100, 101); + RTC_HISTOGRAM_COUNTS_LINEAR( + "WebRTC.Audio.Agc2.Headroom", info.headroom_db, kHeadroomHistogramMin, + kHeadroomHistogramMax, + kHeadroomHistogramMax - kHeadroomHistogramMin + 1); + RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.DigitalGainApplied", + last_gain_db_, 0, kGainDbHistogramMax, + kGainDbHistogramMax + 1); + RTC_LOG(LS_INFO) << "AGC2 adaptive digital" + << " | speech_dbfs: " << info.speech_level_dbfs + << " | noise_dbfs: " << info.noise_rms_dbfs + << " | headroom_db: " << info.headroom_db + << " | gain_db: " << last_gain_db_; + } } } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.h b/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.h index 78c508836b80..01335e79db20 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.h +++ b/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.h @@ -11,56 +11,54 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_CONTROLLER_H_ #define MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_CONTROLLER_H_ -#include +#include -#include "absl/types/optional.h" -#include "modules/audio_processing/agc2/adaptive_digital_gain_applier.h" -#include "modules/audio_processing/agc2/noise_level_estimator.h" -#include "modules/audio_processing/agc2/saturation_protector.h" -#include "modules/audio_processing/agc2/speech_level_estimator.h" +#include "modules/audio_processing/agc2/gain_applier.h" #include "modules/audio_processing/include/audio_frame_view.h" #include "modules/audio_processing/include/audio_processing.h" namespace webrtc { + class ApmDataDumper; -// Gain controller that adapts and applies a variable digital gain to meet the -// target level, which is determined by the given configuration. +// Selects the target digital gain, decides when and how quickly to adapt to the +// target and applies the current gain to 10 ms frames. class AdaptiveDigitalGainController { public: + // Information about a frame to process. + struct FrameInfo { + float speech_probability; // Probability of speech in the [0, 1] range. + float speech_level_dbfs; // Estimated speech level (dBFS). + bool speech_level_reliable; // True with reliable speech level estimation. + float noise_rms_dbfs; // Estimated noise RMS level (dBFS). + float headroom_db; // Headroom (dB). + // TODO(bugs.webrtc.org/7494): Remove `limiter_envelope_dbfs`. + float limiter_envelope_dbfs; // Envelope level from the limiter (dBFS). + }; + AdaptiveDigitalGainController( ApmDataDumper* apm_data_dumper, const AudioProcessing::Config::GainController2::AdaptiveDigital& config, - int sample_rate_hz, - int num_channels); + int adjacent_speech_frames_threshold); AdaptiveDigitalGainController(const AdaptiveDigitalGainController&) = delete; AdaptiveDigitalGainController& operator=( const AdaptiveDigitalGainController&) = delete; - ~AdaptiveDigitalGainController(); - // Detects and handles changes of sample rate and or number of channels. - void Initialize(int sample_rate_hz, int num_channels); - - // Analyzes `frame`, adapts the current digital gain and applies it to - // `frame`. - // TODO(bugs.webrtc.org/7494): Remove `limiter_envelope`. - void Process(AudioFrameView frame, - float speech_probability, - float limiter_envelope); - - // Handles a gain change applied to the input signal (e.g., analog gain). - void HandleInputGainChange(); - - // Returns the most recent speech level (dBFs) if the estimator is confident. - // Otherwise returns absl::nullopt. - absl::optional GetSpeechLevelDbfsIfConfident() const; + // Analyzes `info`, updates the digital gain and applies it to a 10 ms + // `frame`. Supports any sample rate supported by APM. + void Process(const FrameInfo& info, AudioFrameView frame); private: - SpeechLevelEstimator speech_level_estimator_; - AdaptiveDigitalGainApplier gain_controller_; ApmDataDumper* const apm_data_dumper_; - std::unique_ptr noise_level_estimator_; - std::unique_ptr saturation_protector_; + GainApplier gain_applier_; + + const AudioProcessing::Config::GainController2::AdaptiveDigital config_; + const int adjacent_speech_frames_threshold_; + const float max_gain_change_db_per_10ms_; + + int calls_since_last_gain_log_; + int frames_to_gain_increase_allowed_; + float last_gain_db_; }; } // namespace webrtc diff --git a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller_gn/moz.build similarity index 89% rename from third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float_gn/moz.build rename to third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller_gn/moz.build index 09b0eb1ab0d1..b19c3f9e3e74 100644 --- a/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller_gn/moz.build @@ -12,6 +12,7 @@ AllowCompilerWarnings() DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True DEFINES["RTC_ENABLE_VP9"] = True +DEFINES["WEBRTC_APM_DEBUG_DUMP"] = "0" DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" DEFINES["WEBRTC_LIBRARY_IMPL"] = True DEFINES["WEBRTC_MOZILLA_BUILD"] = True @@ -31,7 +32,7 @@ LOCAL_INCLUDES += [ ] UNIFIED_SOURCES += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float.cc" + "/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc" ] if not CONFIG["MOZ_DEBUG"]: @@ -88,7 +89,6 @@ if CONFIG["OS_TARGET"] == "Linux": DEFINES["__STDC_FORMAT_MACROS"] = True OS_LIBS += [ - "m", "rt" ] @@ -145,6 +145,16 @@ if CONFIG["CPU_ARCH"] == "aarch64": DEFINES["WEBRTC_ARCH_ARM64"] = True DEFINES["WEBRTC_HAS_NEON"] = True +if CONFIG["CPU_ARCH"] == "arm": + + CXXFLAGS += [ + "-mfpu=neon" + ] + + DEFINES["WEBRTC_ARCH_ARM"] = True + DEFINES["WEBRTC_ARCH_ARM_V7"] = True + DEFINES["WEBRTC_HAS_NEON"] = True + if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": DEFINES["_DEBUG"] = True @@ -169,6 +179,13 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": DEFINES["USE_X11"] = "1" +if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android": + + OS_LIBS += [ + "android_support", + "unwind" + ] + if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": CXXFLAGS += [ @@ -184,6 +201,11 @@ if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True +if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux": + + DEFINES["WEBRTC_ENABLE_AVX2"] = True + DEFINES["_GNU_SOURCE"] = True + if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": CXXFLAGS += [ @@ -198,4 +220,4 @@ if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True -Library("audio_decoder_isac_float_gn") +Library("adaptive_digital_gain_controller_gn") diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier_unittest.cc b/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller_unittest.cc similarity index 61% rename from third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier_unittest.cc rename to third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller_unittest.cc index ea7485f512b3..e95cbb5067d1 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier_unittest.cc +++ b/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller_unittest.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/agc2/adaptive_digital_gain_applier.h" +#include "modules/audio_processing/agc2/adaptive_digital_gain_controller.h" #include #include @@ -48,28 +48,26 @@ using AdaptiveDigitalConfig = constexpr AdaptiveDigitalConfig kDefaultConfig{}; -// Helper to create initialized `AdaptiveDigitalGainApplier` objects. +// Helper to create initialized `AdaptiveDigitalGainController` objects. struct GainApplierHelper { GainApplierHelper(const AdaptiveDigitalConfig& config, - int sample_rate_hz, - int num_channels) + int adjacent_speech_frames_threshold) : apm_data_dumper(0), - gain_applier( - std::make_unique(&apm_data_dumper, - config, - sample_rate_hz, - num_channels)) {} + gain_applier(std::make_unique( + &apm_data_dumper, + config, + adjacent_speech_frames_threshold)) {} ApmDataDumper apm_data_dumper; - std::unique_ptr gain_applier; + std::unique_ptr gain_applier; }; // Returns a `FrameInfo` sample to simulate noiseless speech detected with // maximum probability and with level, headroom and limiter envelope chosen // so that the resulting gain equals the default initial adaptive digital gain // i.e., no gain adaptation is expected. -AdaptiveDigitalGainApplier::FrameInfo GetFrameInfoToNotAdapt( +AdaptiveDigitalGainController::FrameInfo GetFrameInfoToNotAdapt( const AdaptiveDigitalConfig& config) { - AdaptiveDigitalGainApplier::FrameInfo info; + AdaptiveDigitalGainController::FrameInfo info; info.speech_probability = kMaxSpeechProbability; info.speech_level_dbfs = -config.initial_gain_db - config.headroom_db; info.speech_level_reliable = true; @@ -79,8 +77,9 @@ AdaptiveDigitalGainApplier::FrameInfo GetFrameInfoToNotAdapt( return info; } -TEST(GainController2AdaptiveGainApplier, GainApplierShouldNotCrash) { - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/48000, kStereo); +TEST(GainController2AdaptiveDigitalGainControllerTest, + GainApplierShouldNotCrash) { + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); // Make one call with reasonable audio level values and settings. VectorFloatFrame fake_audio(kStereo, kFrameLen10ms48kHz, 10000.0f); helper.gain_applier->Process(GetFrameInfoToNotAdapt(kDefaultConfig), @@ -88,15 +87,15 @@ TEST(GainController2AdaptiveGainApplier, GainApplierShouldNotCrash) { } // Checks that the maximum allowed gain is applied. -TEST(GainController2AdaptiveGainApplier, MaxGainApplied) { +TEST(GainController2AdaptiveDigitalGainControllerTest, MaxGainApplied) { constexpr int kNumFramesToAdapt = static_cast(kDefaultConfig.max_gain_db / GetMaxGainChangePerFrameDb( kDefaultConfig.max_gain_change_db_per_second)) + kNumExtraFrames; - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/8000, kMono); - AdaptiveDigitalGainApplier::FrameInfo info = + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = -60.0f; float applied_gain; @@ -109,8 +108,8 @@ TEST(GainController2AdaptiveGainApplier, MaxGainApplied) { EXPECT_NEAR(applied_gain_db, kDefaultConfig.max_gain_db, 0.1f); } -TEST(GainController2AdaptiveGainApplier, GainDoesNotChangeFast) { - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/8000, kMono); +TEST(GainController2AdaptiveDigitalGainControllerTest, GainDoesNotChangeFast) { + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); constexpr float initial_level_dbfs = -25.0f; constexpr float kMaxGainChangeDbPerFrame = @@ -125,7 +124,7 @@ TEST(GainController2AdaptiveGainApplier, GainDoesNotChangeFast) { for (int i = 0; i < kNumFramesToAdapt; ++i) { SCOPED_TRACE(i); VectorFloatFrame fake_audio(kMono, kFrameLen10ms8kHz, 1.0f); - AdaptiveDigitalGainApplier::FrameInfo info = + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = initial_level_dbfs; helper.gain_applier->Process(info, fake_audio.float_frame_view()); @@ -139,7 +138,7 @@ TEST(GainController2AdaptiveGainApplier, GainDoesNotChangeFast) { for (int i = 0; i < kNumFramesToAdapt; ++i) { SCOPED_TRACE(i); VectorFloatFrame fake_audio(kMono, kFrameLen10ms8kHz, 1.0f); - AdaptiveDigitalGainApplier::FrameInfo info = + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = 0.f; helper.gain_applier->Process(info, fake_audio.float_frame_view()); @@ -150,13 +149,13 @@ TEST(GainController2AdaptiveGainApplier, GainDoesNotChangeFast) { } } -TEST(GainController2AdaptiveGainApplier, GainIsRampedInAFrame) { - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/48000, kMono); +TEST(GainController2AdaptiveDigitalGainControllerTest, GainIsRampedInAFrame) { + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); constexpr float initial_level_dbfs = -25.0f; VectorFloatFrame fake_audio(kMono, kFrameLen10ms48kHz, 1.0f); - AdaptiveDigitalGainApplier::FrameInfo info = + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = initial_level_dbfs; helper.gain_applier->Process(info, fake_audio.float_frame_view()); @@ -176,8 +175,8 @@ TEST(GainController2AdaptiveGainApplier, GainIsRampedInAFrame) { EXPECT_LE(maximal_difference, max_change_per_sample); } -TEST(GainController2AdaptiveGainApplier, NoiseLimitsGain) { - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/48000, kMono); +TEST(GainController2AdaptiveDigitalGainControllerTest, NoiseLimitsGain) { + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); constexpr float initial_level_dbfs = -25.0f; constexpr int num_initial_frames = @@ -190,7 +189,7 @@ TEST(GainController2AdaptiveGainApplier, NoiseLimitsGain) { for (int i = 0; i < num_initial_frames + num_frames; ++i) { VectorFloatFrame fake_audio(kMono, kFrameLen10ms48kHz, 1.0f); - AdaptiveDigitalGainApplier::FrameInfo info = + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = initial_level_dbfs; info.noise_rms_dbfs = kWithNoiseDbfs; @@ -207,19 +206,20 @@ TEST(GainController2AdaptiveGainApplier, NoiseLimitsGain) { } } -TEST(GainController2GainApplier, CanHandlePositiveSpeechLevels) { - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/48000, kStereo); +TEST(GainController2AdaptiveDigitalGainControllerTest, + CanHandlePositiveSpeechLevels) { + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); // Make one call with positive audio level values and settings. VectorFloatFrame fake_audio(kStereo, kFrameLen10ms48kHz, 10000.0f); - AdaptiveDigitalGainApplier::FrameInfo info = + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = 5.0f; helper.gain_applier->Process(info, fake_audio.float_frame_view()); } -TEST(GainController2GainApplier, AudioLevelLimitsGain) { - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/48000, kMono); +TEST(GainController2AdaptiveDigitalGainControllerTest, AudioLevelLimitsGain) { + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); constexpr float initial_level_dbfs = -25.0f; constexpr int num_initial_frames = @@ -232,7 +232,7 @@ TEST(GainController2GainApplier, AudioLevelLimitsGain) { for (int i = 0; i < num_initial_frames + num_frames; ++i) { VectorFloatFrame fake_audio(kMono, kFrameLen10ms48kHz, 1.0f); - AdaptiveDigitalGainApplier::FrameInfo info = + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = initial_level_dbfs; info.limiter_envelope_dbfs = 1.0f; @@ -250,23 +250,23 @@ TEST(GainController2GainApplier, AudioLevelLimitsGain) { } } -class AdaptiveDigitalGainApplierTest : public ::testing::TestWithParam { +class AdaptiveDigitalGainControllerParametrizedTest + : public ::testing::TestWithParam { protected: int adjacent_speech_frames_threshold() const { return GetParam(); } }; -TEST_P(AdaptiveDigitalGainApplierTest, +TEST_P(AdaptiveDigitalGainControllerParametrizedTest, DoNotIncreaseGainWithTooFewSpeechFrames) { - AdaptiveDigitalConfig config; - config.adjacent_speech_frames_threshold = adjacent_speech_frames_threshold(); - GainApplierHelper helper(config, /*sample_rate_hz=*/48000, kMono); + GainApplierHelper helper(kDefaultConfig, adjacent_speech_frames_threshold()); // Lower the speech level so that the target gain will be increased. - AdaptiveDigitalGainApplier::FrameInfo info = GetFrameInfoToNotAdapt(config); + AdaptiveDigitalGainController::FrameInfo info = + GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs -= 12.0f; float prev_gain = 0.0f; - for (int i = 0; i < config.adjacent_speech_frames_threshold; ++i) { + for (int i = 0; i < adjacent_speech_frames_threshold(); ++i) { SCOPED_TRACE(i); VectorFloatFrame audio(kMono, kFrameLen10ms48kHz, 1.0f); helper.gain_applier->Process(info, audio.float_frame_view()); @@ -278,17 +278,17 @@ TEST_P(AdaptiveDigitalGainApplierTest, } } -TEST_P(AdaptiveDigitalGainApplierTest, IncreaseGainWithEnoughSpeechFrames) { - AdaptiveDigitalConfig config; - config.adjacent_speech_frames_threshold = adjacent_speech_frames_threshold(); - GainApplierHelper helper(config, /*sample_rate_hz=*/48000, kMono); +TEST_P(AdaptiveDigitalGainControllerParametrizedTest, + IncreaseGainWithEnoughSpeechFrames) { + GainApplierHelper helper(kDefaultConfig, adjacent_speech_frames_threshold()); // Lower the speech level so that the target gain will be increased. - AdaptiveDigitalGainApplier::FrameInfo info = GetFrameInfoToNotAdapt(config); + AdaptiveDigitalGainController::FrameInfo info = + GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs -= 12.0f; float prev_gain = 0.0f; - for (int i = 0; i < config.adjacent_speech_frames_threshold; ++i) { + for (int i = 0; i < adjacent_speech_frames_threshold(); ++i) { SCOPED_TRACE(i); VectorFloatFrame audio(kMono, kFrameLen10ms48kHz, 1.0f); helper.gain_applier->Process(info, audio.float_frame_view()); @@ -303,71 +303,10 @@ TEST_P(AdaptiveDigitalGainApplierTest, IncreaseGainWithEnoughSpeechFrames) { EXPECT_GT(audio.float_frame_view().channel(0)[0], prev_gain); } -INSTANTIATE_TEST_SUITE_P(GainController2, - AdaptiveDigitalGainApplierTest, - ::testing::Values(1, 7, 31)); - -// Checks that the input is never modified when running in dry run mode. -TEST(GainController2GainApplier, DryRunDoesNotChangeInput) { - AdaptiveDigitalConfig config; - config.dry_run = true; - GainApplierHelper helper(config, /*sample_rate_hz=*/8000, kMono); - - // Simulate an input signal with log speech level. - AdaptiveDigitalGainApplier::FrameInfo info = GetFrameInfoToNotAdapt(config); - info.speech_level_dbfs = -60.0f; - const int num_frames_to_adapt = - static_cast( - config.max_gain_db / - GetMaxGainChangePerFrameDb(config.max_gain_change_db_per_second)) + - kNumExtraFrames; - constexpr float kPcmSamples = 123.456f; - // Run the gain applier and check that the PCM samples are not modified. - for (int i = 0; i < num_frames_to_adapt; ++i) { - SCOPED_TRACE(i); - VectorFloatFrame fake_audio(kMono, kFrameLen10ms8kHz, kPcmSamples); - helper.gain_applier->Process(info, fake_audio.float_frame_view()); - EXPECT_FLOAT_EQ(fake_audio.float_frame_view().channel(0)[0], kPcmSamples); - } -} - -// Checks that no sample is modified before and after the sample rate changes. -TEST(GainController2GainApplier, DryRunHandlesSampleRateChange) { - AdaptiveDigitalConfig config; - config.dry_run = true; - GainApplierHelper helper(config, /*sample_rate_hz=*/8000, kMono); - - AdaptiveDigitalGainApplier::FrameInfo info = GetFrameInfoToNotAdapt(config); - info.speech_level_dbfs = -60.0f; - constexpr float kPcmSamples = 123.456f; - VectorFloatFrame fake_audio_8k(kMono, kFrameLen10ms8kHz, kPcmSamples); - helper.gain_applier->Process(info, fake_audio_8k.float_frame_view()); - EXPECT_FLOAT_EQ(fake_audio_8k.float_frame_view().channel(0)[0], kPcmSamples); - helper.gain_applier->Initialize(/*sample_rate_hz=*/48000, kMono); - VectorFloatFrame fake_audio_48k(kMono, kFrameLen10ms48kHz, kPcmSamples); - helper.gain_applier->Process(info, fake_audio_48k.float_frame_view()); - EXPECT_FLOAT_EQ(fake_audio_48k.float_frame_view().channel(0)[0], kPcmSamples); -} - -// Checks that no sample is modified before and after the number of channels -// changes. -TEST(GainController2GainApplier, DryRunHandlesNumChannelsChange) { - AdaptiveDigitalConfig config; - config.dry_run = true; - GainApplierHelper helper(config, /*sample_rate_hz=*/8000, kMono); - - AdaptiveDigitalGainApplier::FrameInfo info = GetFrameInfoToNotAdapt(config); - info.speech_level_dbfs = -60.0f; - constexpr float kPcmSamples = 123.456f; - VectorFloatFrame fake_audio_8k(kMono, kFrameLen10ms8kHz, kPcmSamples); - helper.gain_applier->Process(info, fake_audio_8k.float_frame_view()); - EXPECT_FLOAT_EQ(fake_audio_8k.float_frame_view().channel(0)[0], kPcmSamples); - VectorFloatFrame fake_audio_48k(kStereo, kFrameLen10ms8kHz, kPcmSamples); - helper.gain_applier->Initialize(/*sample_rate_hz=*/8000, kStereo); - helper.gain_applier->Process(info, fake_audio_48k.float_frame_view()); - EXPECT_FLOAT_EQ(fake_audio_48k.float_frame_view().channel(0)[0], kPcmSamples); - EXPECT_FLOAT_EQ(fake_audio_48k.float_frame_view().channel(1)[0], kPcmSamples); -} +INSTANTIATE_TEST_SUITE_P( + GainController2, + AdaptiveDigitalGainControllerParametrizedTest, + ::testing::Values(1, 7, 31, kAdjacentSpeechFramesThreshold)); } // namespace } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/agc2_common.h b/third_party/libwebrtc/modules/audio_processing/agc2/agc2_common.h index 4af85527b826..4597bcd0159a 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/agc2_common.h +++ b/third_party/libwebrtc/modules/audio_processing/agc2/agc2_common.h @@ -29,11 +29,16 @@ constexpr int kMaximalNumberOfSamplesPerChannel = 480; // At what limiter levels should we start decreasing the adaptive digital gain. constexpr float kLimiterThresholdForAgcGainDbfs = -1.0f; -// This is the threshold for speech. Speech frames are used for updating the -// speech level, measuring the amount of speech, and decide when to allow target -// gain changes. +// Number of milliseconds to wait to periodically reset the VAD. +constexpr int kVadResetPeriodMs = 1500; + +// Speech probability threshold to detect speech activity. constexpr float kVadConfidenceThreshold = 0.95f; +// Minimum number of adjacent speech frames having a sufficiently high speech +// probability to reliably detect speech activity. +constexpr int kAdjacentSpeechFramesThreshold = 12; + // Number of milliseconds of speech frames to observe to make the estimator // confident. constexpr float kLevelEstimatorTimeToConfidenceMs = 400; diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor.cc b/third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor.cc index 2bf5fb2e328b..fd759c63e851 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor.cc +++ b/third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor.cc @@ -25,30 +25,31 @@ namespace { constexpr int kClippingPredictorMaxGainChange = 15; -// Estimates the new level from the gain error; a copy of the function -// `LevelFromGainError` in agc_manager_direct.cc. -int LevelFromGainError(int gain_error, - int level, - int min_mic_level, - int max_mic_level) { - RTC_DCHECK_GE(level, 0); - RTC_DCHECK_LE(level, max_mic_level); - if (gain_error == 0) { - return level; +// Returns an input volume in the [`min_input_volume`, `max_input_volume`] range +// that reduces `gain_error_db`, which is a gain error estimated when +// `input_volume` was applied, according to a fixed gain map. +int ComputeVolumeUpdate(int gain_error_db, + int input_volume, + int min_input_volume, + int max_input_volume) { + RTC_DCHECK_GE(input_volume, 0); + RTC_DCHECK_LE(input_volume, max_input_volume); + if (gain_error_db == 0) { + return input_volume; } - int new_level = level; - if (gain_error > 0) { - while (kGainMap[new_level] - kGainMap[level] < gain_error && - new_level < max_mic_level) { - ++new_level; + int new_volume = input_volume; + if (gain_error_db > 0) { + while (kGainMap[new_volume] - kGainMap[input_volume] < gain_error_db && + new_volume < max_input_volume) { + ++new_volume; } } else { - while (kGainMap[new_level] - kGainMap[level] > gain_error && - new_level > min_mic_level) { - --new_level; + while (kGainMap[new_volume] - kGainMap[input_volume] > gain_error_db && + new_volume > min_input_volume) { + --new_volume; } } - return new_level; + return new_volume; } float ComputeCrestFactor(const ClippingPredictorLevelBuffer::Level& level) { @@ -298,8 +299,8 @@ class ClippingPeakPredictor : public ClippingPredictor { rtc::SafeClamp(-static_cast(std::ceil(estimate_db.value())), -kClippingPredictorMaxGainChange, 0); step = - std::max(level - LevelFromGainError(estimated_gain_change, level, - min_mic_level, max_mic_level), + std::max(level - ComputeVolumeUpdate(estimated_gain_change, level, + min_mic_level, max_mic_level), default_step); } const int new_level = @@ -354,10 +355,10 @@ std::unique_ptr CreateClippingPredictor( const AudioProcessing::Config::GainController1::AnalogGainController:: ClippingPredictor& config) { if (!config.enabled) { - RTC_LOG(LS_INFO) << "[agc] Clipping prediction disabled."; + RTC_LOG(LS_INFO) << "[AGC2] Clipping prediction disabled."; return nullptr; } - RTC_LOG(LS_INFO) << "[agc] Clipping prediction enabled."; + RTC_LOG(LS_INFO) << "[AGC2] Clipping prediction enabled."; using ClippingPredictorMode = AudioProcessing::Config::GainController1:: AnalogGainController::ClippingPredictor::Mode; switch (config.mode) { diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/gain_map_internal.h b/third_party/libwebrtc/modules/audio_processing/agc2/gain_map_internal.h index 75e421899ff1..7c669fc9dda6 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/gain_map_internal.h +++ b/third_party/libwebrtc/modules/audio_processing/agc2/gain_map_internal.h @@ -13,8 +13,14 @@ namespace webrtc { -static const int kGainMapSize = 256; -// Uses parameters: si = 2, sf = 0.25, D = 8/256 +static constexpr int kGainMapSize = 256; +// Maps input volumes, which are values in the [0, 255] range, to gains in dB. +// The values below are generated with numpy as follows: +// SI = 2 # Initial slope. +// SF = 0.25 # Final slope. +// D = 8/256 # Quantization factor. +// x = np.linspace(0, 255, 256) # Input volumes. +// y = (SF * x + (SI - SF) * (1 - np.exp(-D*x)) / D - 56).round() static const int kGainMap[kGainMapSize] = { -56, -54, -52, -50, -48, -47, -45, -43, -42, -40, -38, -37, -35, -34, -33, -31, -30, -29, -27, -26, -25, -24, -23, -22, -20, -19, -18, -17, -16, -15, diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller.cc b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller.cc index 76d81d8c5968..bcc650fb3e9b 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller.cc +++ b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller.cc @@ -15,6 +15,7 @@ #include "api/array_view.h" #include "modules/audio_processing/agc2/gain_map_internal.h" +#include "modules/audio_processing/agc2/input_volume_stats_reporter.h" #include "modules/audio_processing/include/audio_frame_view.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -26,23 +27,16 @@ namespace webrtc { namespace { -// Amount of error we tolerate in the microphone level (presumably due to OS -// quantization) before we assume the user has manually adjusted the microphone. -constexpr int kLevelQuantizationSlack = 25; +// Amount of error we tolerate in the microphone input volume (presumably due to +// OS quantization) before we assume the user has manually adjusted the volume. +constexpr int kVolumeQuantizationSlack = 25; -constexpr int kMaxMicLevel = 255; -static_assert(kGainMapSize > kMaxMicLevel, "gain map too small"); -constexpr int kMinMicLevel = 12; +constexpr int kMaxInputVolume = 255; +static_assert(kGainMapSize > kMaxInputVolume, "gain map too small"); -// Prevent very large microphone level changes. -constexpr int kMaxResidualGainChange = 15; - -// Target speech level (dBFs) and speech probability threshold used to compute -// the RMS error in `GetSpeechLevelErrorDb()`. -// TODO(webrtc:7494): Move these to a config and pass in the ctor with -// kUpdateInputVolumeWaitFrames = 100. -constexpr float kSpeechProbabilitySilenceThreshold = 0.5f; -constexpr int kUpdateInputVolumeWaitFrames = 0; +// Maximum absolute RMS error. +constexpr int KMaxAbsRmsErrorDbfs = 15; +static_assert(KMaxAbsRmsErrorDbfs > 0, ""); using Agc1ClippingPredictorConfig = AudioProcessing::Config::GainController1:: AnalogGainController::ClippingPredictor; @@ -56,50 +50,31 @@ Agc1ClippingPredictorConfig CreateClippingPredictorConfig(bool enabled) { return config; } -// If the "WebRTC-Audio-2ndAgcMinMicLevelExperiment" field trial is specified, -// parses it and returns a value between 0 and 255 depending on the field-trial -// string. Returns an unspecified value if the field trial is not specified, if -// disabled or if it cannot be parsed. Example: -// 'WebRTC-Audio-2ndAgcMinMicLevelExperiment/Enabled-80' => returns 80. -absl::optional GetMinMicLevelOverride() { - constexpr char kMinMicLevelFieldTrial[] = - "WebRTC-Audio-2ndAgcMinMicLevelExperiment"; - if (!webrtc::field_trial::IsEnabled(kMinMicLevelFieldTrial)) { - return absl::nullopt; - } - const auto field_trial_string = - webrtc::field_trial::FindFullName(kMinMicLevelFieldTrial); - int min_mic_level = -1; - sscanf(field_trial_string.c_str(), "Enabled-%d", &min_mic_level); - if (min_mic_level >= 0 && min_mic_level <= 255) { - return min_mic_level; - } else { - RTC_LOG(LS_WARNING) << "[agc] Invalid parameter for " - << kMinMicLevelFieldTrial << ", ignored."; - return absl::nullopt; - } -} - -int LevelFromGainError(int gain_error, int level, int min_mic_level) { - RTC_DCHECK_GE(level, 0); - RTC_DCHECK_LE(level, kMaxMicLevel); - if (gain_error == 0) { - return level; +// Returns an input volume in the [`min_input_volume`, `kMaxInputVolume`] range +// that reduces `gain_error_db`, which is a gain error estimated when +// `input_volume` was applied, according to a fixed gain map. +int ComputeVolumeUpdate(int gain_error_db, + int input_volume, + int min_input_volume) { + RTC_DCHECK_GE(input_volume, 0); + RTC_DCHECK_LE(input_volume, kMaxInputVolume); + if (gain_error_db == 0) { + return input_volume; } - int new_level = level; - if (gain_error > 0) { - while (kGainMap[new_level] - kGainMap[level] < gain_error && - new_level < kMaxMicLevel) { - ++new_level; + int new_volume = input_volume; + if (gain_error_db > 0) { + while (kGainMap[new_volume] - kGainMap[input_volume] < gain_error_db && + new_volume < kMaxInputVolume) { + ++new_volume; } } else { - while (kGainMap[new_level] - kGainMap[level] > gain_error && - new_level > min_mic_level) { - --new_level; + while (kGainMap[new_volume] - kGainMap[input_volume] > gain_error_db && + new_volume > min_input_volume) { + --new_volume; } } - return new_level; + return new_volume; } // Returns the proportion of samples in the buffer which are at full-scale @@ -123,68 +98,83 @@ float ComputeClippedRatio(const float* const* audio, } void LogClippingMetrics(int clipping_rate) { - RTC_LOG(LS_INFO) << "Input clipping rate: " << clipping_rate << "%"; + RTC_LOG(LS_INFO) << "[AGC2] Input clipping rate: " << clipping_rate << "%"; RTC_HISTOGRAM_COUNTS_LINEAR(/*name=*/"WebRTC.Audio.Agc.InputClippingRate", /*sample=*/clipping_rate, /*min=*/0, /*max=*/100, /*bucket_count=*/50); } -// Computes the speech level error in dB. The value of `speech_level_dbfs` is -// required to be in the range [-90.0f, 30.0f] and `speech_probability` in the -// range [0.0f, 1.0f]. Returns a positive value when the speech level is below -// the target range and a negative value when the speech level is above the -// target range. -int GetSpeechLevelErrorDb(float speech_level_dbfs, - float speech_probability, - int target_range_min_dbfs, - int target_range_max_dbfs) { +// Compares `speech_level_dbfs` to the [`target_range_min_dbfs`, +// `target_range_max_dbfs`] range and returns the error to be compensated via +// input volume adjustment. Returns a positive value when the level is below +// the range, a negative value when the level is above the range, zero +// otherwise. +int GetSpeechLevelRmsErrorDb(float speech_level_dbfs, + int target_range_min_dbfs, + int target_range_max_dbfs) { constexpr float kMinSpeechLevelDbfs = -90.0f; constexpr float kMaxSpeechLevelDbfs = 30.0f; RTC_DCHECK_GE(speech_level_dbfs, kMinSpeechLevelDbfs); RTC_DCHECK_LE(speech_level_dbfs, kMaxSpeechLevelDbfs); - RTC_DCHECK_GE(speech_probability, 0.0f); - RTC_DCHECK_LE(speech_probability, 1.0f); - - // TODO(webrtc:7494): Replace with the use of `SpeechProbabilityBuffer`. - if (speech_probability < kSpeechProbabilitySilenceThreshold) { - return 0; - } - - // Ensure the speech level is in the range [-90.0f, 30.0f]. speech_level_dbfs = rtc::SafeClamp( speech_level_dbfs, kMinSpeechLevelDbfs, kMaxSpeechLevelDbfs); - // Compute the speech level distance to the target range - // [`target_range_min_dbfs`, `target_range_max_dbfs`]. - int rms_error_dbfs = 0; + int rms_error_db = 0; if (speech_level_dbfs > target_range_max_dbfs) { - rms_error_dbfs = std::round(target_range_max_dbfs - speech_level_dbfs); + rms_error_db = std::round(target_range_max_dbfs - speech_level_dbfs); } else if (speech_level_dbfs < target_range_min_dbfs) { - rms_error_dbfs = std::round(target_range_min_dbfs - speech_level_dbfs); + rms_error_db = std::round(target_range_min_dbfs - speech_level_dbfs); } - return rms_error_dbfs; + return rms_error_db; } } // namespace -MonoInputVolumeController::MonoInputVolumeController(int clipped_level_min, - int min_mic_level) - : min_mic_level_(min_mic_level), - max_level_(kMaxMicLevel), - clipped_level_min_(clipped_level_min) {} +MonoInputVolumeController::MonoInputVolumeController( + int min_input_volume_after_clipping, + int min_input_volume, + int update_input_volume_wait_frames, + float speech_probability_threshold, + float speech_ratio_threshold) + : min_input_volume_(min_input_volume), + min_input_volume_after_clipping_(min_input_volume_after_clipping), + max_input_volume_(kMaxInputVolume), + update_input_volume_wait_frames_( + std::max(update_input_volume_wait_frames, 1)), + speech_probability_threshold_(speech_probability_threshold), + speech_ratio_threshold_(speech_ratio_threshold) { + RTC_DCHECK_GE(min_input_volume_, 0); + RTC_DCHECK_LE(min_input_volume_, 255); + RTC_DCHECK_GE(min_input_volume_after_clipping_, 0); + RTC_DCHECK_LE(min_input_volume_after_clipping_, 255); + RTC_DCHECK_GE(max_input_volume_, 0); + RTC_DCHECK_LE(max_input_volume_, 255); + RTC_DCHECK_GE(update_input_volume_wait_frames_, 0); + RTC_DCHECK_GE(speech_probability_threshold_, 0.0f); + RTC_DCHECK_LE(speech_probability_threshold_, 1.0f); + RTC_DCHECK_GE(speech_ratio_threshold_, 0.0f); + RTC_DCHECK_LE(speech_ratio_threshold_, 1.0f); +} MonoInputVolumeController::~MonoInputVolumeController() = default; void MonoInputVolumeController::Initialize() { - max_level_ = kMaxMicLevel; + max_input_volume_ = kMaxInputVolume; capture_output_used_ = true; check_volume_on_next_process_ = true; - frames_since_update_gain_ = 0; + frames_since_update_input_volume_ = 0; + speech_frames_since_update_input_volume_ = 0; is_first_frame_ = true; } -void MonoInputVolumeController::Process(absl::optional rms_error_dbfs) { +// A speeh segment is considered active if at least +// `update_input_volume_wait_frames_` new frames have been processed since the +// previous update and the ratio of non-silence frames (i.e., frames with a +// `speech_probability` higher than `speech_probability_threshold_`) is at least +// `speech_ratio_threshold_`. +void MonoInputVolumeController::Process(absl::optional rms_error_db, + float speech_probability) { if (check_volume_on_next_process_) { check_volume_on_next_process_ = false; // We have to wait until the first process call to check the volume, @@ -192,85 +182,112 @@ void MonoInputVolumeController::Process(absl::optional rms_error_dbfs) { CheckVolumeAndReset(); } - if (rms_error_dbfs.has_value() && !is_first_frame_ && - frames_since_update_gain_ >= kUpdateInputVolumeWaitFrames) { - UpdateInputVolume(*rms_error_dbfs); + // Count frames with a high speech probability as speech. + if (speech_probability >= speech_probability_threshold_) { + ++speech_frames_since_update_input_volume_; + } + + // Reset the counters and maybe update the input volume. + if (++frames_since_update_input_volume_ >= update_input_volume_wait_frames_) { + const float speech_ratio = + static_cast(speech_frames_since_update_input_volume_) / + static_cast(update_input_volume_wait_frames_); + + // Always reset the counters regardless of whether the volume changes or + // not. + frames_since_update_input_volume_ = 0; + speech_frames_since_update_input_volume_ = 0; + + // Update the input volume if allowed. + if (!is_first_frame_ && speech_ratio >= speech_ratio_threshold_ && + rms_error_db.has_value()) { + UpdateInputVolume(*rms_error_db); + } } is_first_frame_ = false; - if (frames_since_update_gain_ < kUpdateInputVolumeWaitFrames) { - ++frames_since_update_gain_; - } } void MonoInputVolumeController::HandleClipping(int clipped_level_step) { RTC_DCHECK_GT(clipped_level_step, 0); - // Always decrease the maximum level, even if the current level is below - // threshold. - SetMaxLevel(std::max(clipped_level_min_, max_level_ - clipped_level_step)); + // Always decrease the maximum input volume, even if the current input volume + // is below threshold. + SetMaxLevel(std::max(min_input_volume_after_clipping_, + max_input_volume_ - clipped_level_step)); if (log_to_histograms_) { RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.AgcClippingAdjustmentAllowed", - level_ - clipped_level_step >= clipped_level_min_); + last_recommended_input_volume_ - clipped_level_step >= + min_input_volume_after_clipping_); } - if (level_ > clipped_level_min_) { - // Don't try to adjust the level if we're already below the limit. As - // a consequence, if the user has brought the level above the limit, we - // will still not react until the postproc updates the level. - SetLevel(std::max(clipped_level_min_, level_ - clipped_level_step)); - frames_since_update_gain_ = 0; + if (last_recommended_input_volume_ > min_input_volume_after_clipping_) { + // Don't try to adjust the input volume if we're already below the limit. As + // a consequence, if the user has brought the input volume above the limit, + // we will still not react until the postproc updates the input volume. + SetInputVolume( + std::max(min_input_volume_after_clipping_, + last_recommended_input_volume_ - clipped_level_step)); + frames_since_update_input_volume_ = 0; + speech_frames_since_update_input_volume_ = 0; is_first_frame_ = false; } } -void MonoInputVolumeController::SetLevel(int new_level) { - int voe_level = recommended_input_volume_; - if (voe_level == 0) { +void MonoInputVolumeController::SetInputVolume(int new_volume) { + int applied_input_volume = recommended_input_volume_; + if (applied_input_volume == 0) { RTC_DLOG(LS_INFO) - << "[agc] VolumeCallbacks returned level=0, taking no action."; + << "[AGC2] The applied input volume is zero, taking no action."; return; } - if (voe_level < 0 || voe_level > kMaxMicLevel) { - RTC_LOG(LS_ERROR) << "VolumeCallbacks returned an invalid level=" - << voe_level; + if (applied_input_volume < 0 || applied_input_volume > kMaxInputVolume) { + RTC_LOG(LS_ERROR) << "[AGC2] Invalid value for the applied input volume: " + << applied_input_volume; return; } - // Detect manual input volume adjustments by checking if the current level - // `voe_level` is outside of the `[level_ - kLevelQuantizationSlack, level_ + - // kLevelQuantizationSlack]` range where `level_` is the last input volume - // known by this gain controller. - if (voe_level > level_ + kLevelQuantizationSlack || - voe_level < level_ - kLevelQuantizationSlack) { - RTC_DLOG(LS_INFO) << "[agc] Mic volume was manually adjusted. Updating " - "stored level from " - << level_ << " to " << voe_level; - level_ = voe_level; + // Detect manual input volume adjustments by checking if the + // `applied_input_volume` is outside of the `[last_recommended_input_volume_ - + // kVolumeQuantizationSlack, last_recommended_input_volume_ + + // kVolumeQuantizationSlack]` range. + if (applied_input_volume > + last_recommended_input_volume_ + kVolumeQuantizationSlack || + applied_input_volume < + last_recommended_input_volume_ - kVolumeQuantizationSlack) { + RTC_DLOG(LS_INFO) + << "[AGC2] The input volume was manually adjusted. Updating " + "stored input volume from " + << last_recommended_input_volume_ << " to " << applied_input_volume; + last_recommended_input_volume_ = applied_input_volume; // Always allow the user to increase the volume. - if (level_ > max_level_) { - SetMaxLevel(level_); + if (last_recommended_input_volume_ > max_input_volume_) { + SetMaxLevel(last_recommended_input_volume_); } // Take no action in this case, since we can't be sure when the volume // was manually adjusted. - frames_since_update_gain_ = 0; + frames_since_update_input_volume_ = 0; + speech_frames_since_update_input_volume_ = 0; is_first_frame_ = false; return; } - new_level = std::min(new_level, max_level_); - if (new_level == level_) { + new_volume = std::min(new_volume, max_input_volume_); + if (new_volume == last_recommended_input_volume_) { return; } - recommended_input_volume_ = new_level; - RTC_DLOG(LS_INFO) << "[agc] voe_level=" << voe_level << ", level_=" << level_ - << ", new_level=" << new_level; - level_ = new_level; + recommended_input_volume_ = new_volume; + RTC_DLOG(LS_INFO) << "[AGC2] Applied input volume: " << applied_input_volume + << " | last recommended input volume: " + << last_recommended_input_volume_ + << " | newly recommended input volume: " << new_volume; + last_recommended_input_volume_ = new_volume; } -void MonoInputVolumeController::SetMaxLevel(int level) { - RTC_DCHECK_GE(level, clipped_level_min_); - max_level_ = level; - RTC_DLOG(LS_INFO) << "[agc] max_level_=" << max_level_; +void MonoInputVolumeController::SetMaxLevel(int input_volume) { + RTC_DCHECK_GE(input_volume, min_input_volume_after_clipping_); + max_input_volume_ = input_volume; + RTC_DLOG(LS_INFO) << "[AGC2] Maximum input volume updated: " + << max_input_volume_; } void MonoInputVolumeController::HandleCaptureOutputUsedChange( @@ -287,60 +304,57 @@ void MonoInputVolumeController::HandleCaptureOutputUsedChange( } int MonoInputVolumeController::CheckVolumeAndReset() { - int level = recommended_input_volume_; + int input_volume = recommended_input_volume_; // Reasons for taking action at startup: // 1) A person starting a call is expected to be heard. - // 2) Independent of interpretation of `level` == 0 we should raise it so the - // AGC can do its job properly. - if (level == 0 && !startup_) { + // 2) Independent of interpretation of `input_volume` == 0 we should raise it + // so the AGC can do its job properly. + if (input_volume == 0 && !startup_) { RTC_DLOG(LS_INFO) - << "[agc] VolumeCallbacks returned level=0, taking no action."; + << "[AGC2] The applied input volume is zero, taking no action."; return 0; } - if (level < 0 || level > kMaxMicLevel) { - RTC_LOG(LS_ERROR) << "[agc] VolumeCallbacks returned an invalid level=" - << level; + if (input_volume < 0 || input_volume > kMaxInputVolume) { + RTC_LOG(LS_ERROR) << "[AGC2] Invalid value for the applied input volume: " + << input_volume; return -1; } - RTC_DLOG(LS_INFO) << "[agc] Initial GetMicVolume()=" << level; + RTC_DLOG(LS_INFO) << "[AGC2] Initial input volume: " << input_volume; - if (level < min_mic_level_) { - level = min_mic_level_; - RTC_DLOG(LS_INFO) << "[agc] Initial volume too low, raising to " << level; - recommended_input_volume_ = level; + if (input_volume < min_input_volume_) { + input_volume = min_input_volume_; + RTC_DLOG(LS_INFO) + << "[AGC2] The initial input volume is too low, raising to " + << input_volume; + recommended_input_volume_ = input_volume; } - level_ = level; + last_recommended_input_volume_ = input_volume; startup_ = false; - frames_since_update_gain_ = 0; + frames_since_update_input_volume_ = 0; + speech_frames_since_update_input_volume_ = 0; is_first_frame_ = true; return 0; } -void MonoInputVolumeController::UpdateInputVolume(int rms_error_dbfs) { - // Always reset the counter regardless of whether the gain is changed - // or not. - frames_since_update_gain_ = 0; - - const int residual_gain = rtc::SafeClamp( - rms_error_dbfs, -kMaxResidualGainChange, kMaxResidualGainChange); - - RTC_DLOG(LS_INFO) << "[agc] rms_error_dbfs=" << rms_error_dbfs - << ", residual_gain=" << residual_gain; - - if (residual_gain == 0) { +void MonoInputVolumeController::UpdateInputVolume(int rms_error_db) { + RTC_DLOG(LS_INFO) << "[AGC2] RMS error: " << rms_error_db << " dB"; + // Prevent too large microphone input volume changes by clamping the RMS + // error. + rms_error_db = + rtc::SafeClamp(rms_error_db, -KMaxAbsRmsErrorDbfs, KMaxAbsRmsErrorDbfs); + if (rms_error_db == 0) { return; } - - SetLevel(LevelFromGainError(residual_gain, level_, min_mic_level_)); + SetInputVolume(ComputeVolumeUpdate( + rms_error_db, last_recommended_input_volume_, min_input_volume_)); } InputVolumeController::InputVolumeController(int num_capture_channels, const Config& config) - : analog_controller_enabled_(config.enabled), - num_capture_channels_(num_capture_channels), - min_mic_level_override_(GetMinMicLevelOverride()), + : num_capture_channels_(num_capture_channels), + min_input_volume_(config.min_input_volume), capture_output_used_(true), clipped_level_step_(config.clipped_level_step), clipped_ratio_threshold_(config.clipped_ratio_threshold), @@ -358,17 +372,15 @@ InputVolumeController::InputVolumeController(int num_capture_channels, target_range_max_dbfs_(config.target_range_max_dbfs), target_range_min_dbfs_(config.target_range_min_dbfs), channel_controllers_(num_capture_channels) { - RTC_LOG(LS_INFO) << "[agc] analog controller enabled: " - << (analog_controller_enabled_ ? "yes" : "no"); - const int min_mic_level = min_mic_level_override_.value_or(kMinMicLevel); - RTC_LOG(LS_INFO) << "[agc] Min mic level: " << min_mic_level - << " (overridden: " - << (min_mic_level_override_.has_value() ? "yes" : "no") - << ")"; + RTC_LOG(LS_INFO) + << "[AGC2] Input volume controller enabled. Minimum input volume: " + << min_input_volume_; for (auto& controller : channel_controllers_) { controller = std::make_unique( - config.clipped_level_min, min_mic_level); + config.clipped_level_min, min_input_volume_, + config.update_input_volume_wait_frames, + config.speech_probability_threshold, config.speech_ratio_threshold); } RTC_DCHECK(!channel_controllers_.empty()); @@ -383,7 +395,6 @@ InputVolumeController::InputVolumeController(int num_capture_channels, InputVolumeController::~InputVolumeController() {} void InputVolumeController::Initialize() { - RTC_DLOG(LS_INFO) << "InputVolumeController::Initialize"; for (auto& controller : channel_controllers_) { controller->Initialize(); } @@ -392,9 +403,18 @@ void InputVolumeController::Initialize() { AggregateChannelLevels(); clipping_rate_log_ = 0.0f; clipping_rate_log_counter_ = 0; + + applied_input_volume_ = absl::nullopt; } -void InputVolumeController::AnalyzePreProcess(const AudioBuffer& audio_buffer) { +void InputVolumeController::AnalyzeInputAudio(int applied_input_volume, + const AudioBuffer& audio_buffer) { + RTC_DCHECK_GE(applied_input_volume, 0); + RTC_DCHECK_LE(applied_input_volume, 255); + + SetAppliedInputVolume(applied_input_volume); + + RTC_DCHECK_EQ(audio_buffer.num_channels(), channel_controllers_.size()); const float* const* audio = audio_buffer.channels_const(); size_t samples_per_channel = audio_buffer.num_frames(); RTC_DCHECK(audio); @@ -413,10 +433,10 @@ void InputVolumeController::AnalyzePreProcess(const AudioBuffer& audio_buffer) { // Check for clipped samples. We do this in the preprocessing phase in order // to catch clipped echo as well. // - // If we find a sufficiently clipped frame, drop the current microphone level - // and enforce a new maximum level, dropped the same amount from the current - // maximum. This harsh treatment is an effort to avoid repeated clipped echo - // events. + // If we find a sufficiently clipped frame, drop the current microphone + // input volume and enforce a new maximum input volume, dropped the same + // amount from the current maximum. This harsh treatment is an effort to avoid + // repeated clipped echo events. float clipped_ratio = ComputeClippedRatio(audio, num_capture_channels_, samples_per_channel); clipping_rate_log_ = std::max(clipped_ratio, clipping_rate_log_); @@ -440,25 +460,30 @@ void InputVolumeController::AnalyzePreProcess(const AudioBuffer& audio_buffer) { for (int channel = 0; channel < num_capture_channels_; ++channel) { const auto step = clipping_predictor_->EstimateClippedLevelStep( channel, recommended_input_volume_, clipped_level_step_, - channel_controllers_[channel]->clipped_level_min(), kMaxMicLevel); + channel_controllers_[channel]->min_input_volume_after_clipping(), + kMaxInputVolume); if (step.has_value()) { predicted_step = std::max(predicted_step, step.value()); clipping_predicted = true; } } } + if (clipping_detected) { - RTC_DLOG(LS_INFO) << "[agc] Clipping detected. clipped_ratio=" - << clipped_ratio; + RTC_DLOG(LS_INFO) << "[AGC2] Clipping detected (ratio: " << clipped_ratio + << ")"; } + int step = clipped_level_step_; if (clipping_predicted) { predicted_step = std::max(predicted_step, clipped_level_step_); - RTC_DLOG(LS_INFO) << "[agc] Clipping predicted. step=" << predicted_step; + RTC_DLOG(LS_INFO) << "[AGC2] Clipping predicted (volume down step: " + << predicted_step << ")"; if (use_clipping_predictor_step_) { step = predicted_step; } } + if (clipping_detected || (clipping_predicted && use_clipping_predictor_step_)) { for (auto& state_ch : channel_controllers_) { @@ -469,29 +494,47 @@ void InputVolumeController::AnalyzePreProcess(const AudioBuffer& audio_buffer) { clipping_predictor_->Reset(); } } + AggregateChannelLevels(); } -void InputVolumeController::Process(absl::optional speech_probability, - absl::optional speech_level_dbfs) { - AggregateChannelLevels(); - - if (!capture_output_used_) { - return; +absl::optional InputVolumeController::RecommendInputVolume( + float speech_probability, + absl::optional speech_level_dbfs) { + // Only process if applied input volume is set. + if (!applied_input_volume_.has_value()) { + RTC_LOG(LS_ERROR) << "[AGC2] Applied input volume not set."; + return absl::nullopt; } - absl::optional rms_error_dbfs; - if (speech_probability.has_value() && speech_level_dbfs.has_value()) { - rms_error_dbfs = - GetSpeechLevelErrorDb(*speech_level_dbfs, *speech_probability, - target_range_min_dbfs_, target_range_max_dbfs_); + AggregateChannelLevels(); + const int volume_after_clipping_handling = recommended_input_volume_; + + if (!capture_output_used_) { + return applied_input_volume_; + } + + absl::optional rms_error_db; + if (speech_level_dbfs.has_value()) { + // Compute the error for all frames (both speech and non-speech frames). + rms_error_db = GetSpeechLevelRmsErrorDb( + *speech_level_dbfs, target_range_min_dbfs_, target_range_max_dbfs_); } for (auto& controller : channel_controllers_) { - controller->Process(rms_error_dbfs); + controller->Process(rms_error_db, speech_probability); } AggregateChannelLevels(); + if (volume_after_clipping_handling != recommended_input_volume_) { + // The recommended input volume was adjusted in order to match the target + // level. + UpdateHistogramOnRecommendedInputVolumeChangeToMatchTarget( + recommended_input_volume_); + } + + applied_input_volume_ = absl::nullopt; + return recommended_input_volume(); } void InputVolumeController::HandleCaptureOutputUsedChange( @@ -499,16 +542,15 @@ void InputVolumeController::HandleCaptureOutputUsedChange( for (auto& controller : channel_controllers_) { controller->HandleCaptureOutputUsedChange(capture_output_used); } + capture_output_used_ = capture_output_used; } -void InputVolumeController::set_stream_analog_level(int level) { - if (!analog_controller_enabled_) { - recommended_input_volume_ = level; - } +void InputVolumeController::SetAppliedInputVolume(int input_volume) { + applied_input_volume_ = input_volume; for (auto& controller : channel_controllers_) { - controller->set_stream_analog_level(level); + controller->set_stream_analog_level(input_volume); } AggregateChannelLevels(); @@ -519,21 +561,20 @@ void InputVolumeController::AggregateChannelLevels() { channel_controllers_[0]->recommended_analog_level(); channel_controlling_gain_ = 0; for (size_t ch = 1; ch < channel_controllers_.size(); ++ch) { - int level = channel_controllers_[ch]->recommended_analog_level(); - if (level < new_recommended_input_volume) { - new_recommended_input_volume = level; + int input_volume = channel_controllers_[ch]->recommended_analog_level(); + if (input_volume < new_recommended_input_volume) { + new_recommended_input_volume = input_volume; channel_controlling_gain_ = static_cast(ch); } } - if (min_mic_level_override_.has_value() && new_recommended_input_volume > 0) { + // Enforce the minimum input volume when a recommendation is made. + if (applied_input_volume_.has_value() && *applied_input_volume_ > 0) { new_recommended_input_volume = - std::max(new_recommended_input_volume, *min_mic_level_override_); + std::max(new_recommended_input_volume, min_input_volume_); } - if (analog_controller_enabled_) { - recommended_input_volume_ = new_recommended_input_volume; - } + recommended_input_volume_ = new_recommended_input_volume; } } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller.h b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller.h index 941dd59b7060..40eae8879e26 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller.h +++ b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller.h @@ -35,17 +35,16 @@ class InputVolumeController final { public: // Config for the constructor. struct Config { - bool enabled = false; - // TODO(bugs.webrtc.org/1275566): Describe `startup_min_volume`. - int startup_min_volume = 0; - // Lowest analog microphone level that will be applied in response to - // clipping. + // Minimum input volume that can be recommended. Not enforced when the + // applied input volume is zero outside startup. + int min_input_volume = 20; + // Lowest input volume level that will be applied in response to clipping. int clipped_level_min = 70; - // Amount the microphone level is lowered with every clipping event. - // Limited to (0, 255]. + // Amount input volume level is lowered with every clipping event. Limited + // to (0, 255]. int clipped_level_step = 15; // Proportion of clipped samples required to declare a clipping event. - // Limited to (0.f, 1.f). + // Limited to (0.0f, 1.0f). float clipped_ratio_threshold = 0.1f; // Time in frames to wait after a clipping event before checking again. // Limited to values higher than 0. @@ -56,13 +55,17 @@ class InputVolumeController final { // [`target_range_min_dbfs`, `target_range_max_dbfs`], no input volume // adjustments are done based on the speech level. For speech levels below // and above the range, the targets `target_range_min_dbfs` and - // `target_range_max_dbfs` are used, respectively. The example values - // `target_range_max_dbfs` -18 and `target_range_min_dbfs` -48 refer to a - // configuration where the zero-digital-gain target is -18 dBFS and the - // digital gain control is expected to compensate for speech level errors - // up to -30 dB. - int target_range_max_dbfs = -18; - int target_range_min_dbfs = -48; + // `target_range_max_dbfs` are used, respectively. + int target_range_max_dbfs = -30; + int target_range_min_dbfs = -50; + // Number of wait frames between the recommended input volume updates. + int update_input_volume_wait_frames = 100; + // Speech probability threshold: speech probabilities below the threshold + // are considered silence. Limited to [0.0f, 1.0f]. + float speech_probability_threshold = 0.7f; + // Minimum speech frame ratio for volume updates to be allowed. Limited to + // [0.0f, 1.0f]. + float speech_ratio_threshold = 0.9f; }; // Ctor. `num_capture_channels` specifies the number of channels for the audio @@ -77,31 +80,24 @@ class InputVolumeController final { // TODO(webrtc:7494): Integrate initialization into ctor and remove. void Initialize(); - // Sets the applied input volume. - void set_stream_analog_level(int level); + // Analyzes `audio_buffer` before `RecommendInputVolume()` is called so tha + // the analysis can be performed before digital processing operations take + // place (e.g., echo cancellation). The analysis consists of input clipping + // detection and prediction (if enabled). + void AnalyzeInputAudio(int applied_input_volume, + const AudioBuffer& audio_buffer); - // TODO(bugs.webrtc.org/7494): Add argument for the applied input volume and - // remove `set_stream_analog_level()`. - // Analyzes `audio` before `Process()` is called so that the analysis can be - // performed before digital processing operations take place (e.g., echo - // cancellation). The analysis consists of input clipping detection and - // prediction (if enabled). Must be called after `set_stream_analog_level()`. - void AnalyzePreProcess(const AudioBuffer& audio_buffer); - - // Adjusts the recommended input volume upwards/downwards based on - // `speech_level_dbfs`. Must be called after `AnalyzePreProcess()`. The value - // of `speech_probability` is expected to be in the range [0.0f, 1.0f] and - // `speech_level_dbfs` in the the range [-90.f, 30.0f]. - void Process(absl::optional speech_probability, - absl::optional speech_level_dbfs); - - // TODO(bugs.webrtc.org/7494): Return recommended input volume and remove - // `recommended_analog_level()`. - // Returns the recommended input volume. If the input volume contoller is - // disabled, returns the input volume set via the latest - // `set_stream_analog_level()` call. Must be called after - // `AnalyzePreProcess()` and `Process()`. - int recommended_analog_level() const { return recommended_input_volume_; } + // Adjusts the recommended input volume upwards/downwards based on the result + // of `AnalyzeInputAudio()` and on `speech_level_dbfs` (if specified). Must + // be called after `AnalyzeInputAudio()`. The value of `speech_probability` + // is expected to be in the range [0, 1] and `speech_level_dbfs` in the range + // [-90, 30] and both should be estimated after echo cancellation and noise + // suppression are applied. Returns a non-empty input volume recommendation if + // available. If `capture_output_used_` is true, returns the applied input + // volume. + absl::optional RecommendInputVolume( + float speech_probability, + absl::optional speech_level_dbfs); // Stores whether the capture output will be used or not. Call when the // capture stream output has been flagged to be used/not-used. If unused, the @@ -118,40 +114,48 @@ class InputVolumeController final { return use_clipping_predictor_step_; } + // Only use for testing: Use `RecommendInputVolume()` elsewhere. + // Returns the value of a member variable, needed for testing + // `AnalyzeInputAudio()`. + int recommended_input_volume() const { return recommended_input_volume_; } + + // Only use for testing. + bool capture_output_used() const { return capture_output_used_; } + private: friend class InputVolumeControllerTestHelper; + FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerTest, MinInputVolumeDefault); + FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerTest, MinInputVolumeDisabled); FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerTest, - AgcMinMicLevelExperimentDefault); + MinInputVolumeOutOfRangeAbove); FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerTest, - AgcMinMicLevelExperimentDisabled); - FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerTest, - AgcMinMicLevelExperimentOutOfRangeAbove); - FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerTest, - AgcMinMicLevelExperimentOutOfRangeBelow); - FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerTest, - AgcMinMicLevelExperimentEnabled50); + MinInputVolumeOutOfRangeBelow); + FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerTest, MinInputVolumeEnabled50); FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerParametrizedTest, ClippingParametersVerified); - void AggregateChannelLevels(); + // Sets the applied input volume and resets the recommended input volume. + void SetAppliedInputVolume(int level); - const bool analog_controller_enabled_; + void AggregateChannelLevels(); const int num_capture_channels_; - // If not empty, the value is used to override the minimum input volume. - const absl::optional min_mic_level_override_; + // Minimum input volume that can be recommended. + const int min_input_volume_; - // TODO(bugs.webrtc.org/7494): Create a separate member for the applied input - // volume. // TODO(bugs.webrtc.org/7494): Once // `AudioProcessingImpl::recommended_stream_analog_level()` becomes a trivial // getter, leave uninitialized. - // Recommended input volume. After `set_stream_analog_level()` is called it - // holds the observed input volume. Possibly updated by `AnalyzePreProcess()` - // and `Process()`; after these calls, holds the recommended input volume. + // Recommended input volume. After `SetAppliedInputVolume()` is called it + // holds holds the observed input volume. Possibly updated by + // `AnalyzePreProcess()` and `Process()`; after these calls, holds the + // recommended input volume. int recommended_input_volume_ = 0; + // Applied input volume. After `SetAppliedInputVolume()` is called it holds + // the current applied volume. + absl::optional applied_input_volume_; bool capture_output_used_; @@ -181,7 +185,11 @@ class InputVolumeController final { // convention. class MonoInputVolumeController { public: - MonoInputVolumeController(int clipped_level_min, int min_mic_level); + MonoInputVolumeController(int min_input_volume_after_clipping, + int min_input_volume, + int update_input_volume_wait_frames, + float speech_probability_threshold, + float speech_ratio_threshold); ~MonoInputVolumeController(); MonoInputVolumeController(const MonoInputVolumeController&) = delete; MonoInputVolumeController& operator=(const MonoInputVolumeController&) = @@ -191,32 +199,38 @@ class MonoInputVolumeController { void HandleCaptureOutputUsedChange(bool capture_output_used); // Sets the current input volume. - void set_stream_analog_level(int level) { recommended_input_volume_ = level; } + void set_stream_analog_level(int input_volume) { + recommended_input_volume_ = input_volume; + } // Lowers the recommended input volume in response to clipping based on the // suggested reduction `clipped_level_step`. Must be called after // `set_stream_analog_level()`. void HandleClipping(int clipped_level_step); - // Adjusts the recommended input volume upwards/downwards depending on whether - // `rms_error_dbfs` is positive or negative. Must be called after - // `HandleClipping()`. - void Process(absl::optional rms_error_dbfs); + // TODO(bugs.webrtc.org/7494): Rename, audio not passed to the method anymore. + // Adjusts the recommended input volume upwards/downwards depending on the + // result of `HandleClipping()` and on `rms_error_dbfs`. Updates are only + // allowed for active speech segments and when `rms_error_dbfs` is not empty. + // Must be called after `HandleClipping()`. + void Process(absl::optional rms_error_dbfs, float speech_probability); // Returns the recommended input volume. Must be called after `Process()`. int recommended_analog_level() const { return recommended_input_volume_; } void ActivateLogging() { log_to_histograms_ = true; } - int clipped_level_min() const { return clipped_level_min_; } + int min_input_volume_after_clipping() const { + return min_input_volume_after_clipping_; + } // Only used for testing. - int min_mic_level() const { return min_mic_level_; } + int min_input_volume() const { return min_input_volume_; } private: // Sets a new input volume, after first checking that it hasn't been updated // by the user, in which case no action is taken. - void SetLevel(int new_level); + void SetInputVolume(int new_volume); // Sets the maximum input volume that the input volume controller is allowed // to apply. The volume must be at least `kClippedLevelMin`. @@ -229,10 +243,11 @@ class MonoInputVolumeController { // action and cache the updated level. void UpdateInputVolume(int rms_error_dbfs); - const int min_mic_level_; + const int min_input_volume_; + const int min_input_volume_after_clipping_; + int max_input_volume_; - int level_ = 0; - int max_level_; + int last_recommended_input_volume_ = 0; bool capture_output_used_ = true; bool check_volume_on_next_process_ = true; @@ -248,11 +263,18 @@ class MonoInputVolumeController { bool log_to_histograms_ = false; - const int clipped_level_min_; - - // Frames since the last `UpdateInputVolume()` call. - int frames_since_update_gain_ = 0; + // Counters for frames and speech frames since the last update in the + // recommended input volume. + const int update_input_volume_wait_frames_; + int frames_since_update_input_volume_ = 0; + int speech_frames_since_update_input_volume_ = 0; bool is_first_frame_ = true; + + // Speech probability threshold for a frame to be considered speech (instead + // of silence). Limited to [0.0f, 1.0f]. + const float speech_probability_threshold_; + // Minimum ratio of speech frames. Limited to [0.0f, 1.0f]. + const float speech_ratio_threshold_; }; } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller_unittest.cc b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller_unittest.cc index 68544d62bbe1..638cfd1df3df 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller_unittest.cc +++ b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller_unittest.cc @@ -14,11 +14,11 @@ #include #include #include -#include #include #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/strings/string_builder.h" +#include "system_wrappers/include/metrics.h" #include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" @@ -38,12 +38,15 @@ constexpr int kNumChannels = 1; constexpr int kInitialInputVolume = 128; constexpr int kClippedMin = 165; // Arbitrary, but different from the default. constexpr float kAboveClippedThreshold = 0.2f; -constexpr int kMinMicLevel = 12; +constexpr int kMinMicLevel = 20; constexpr int kClippedLevelStep = 15; constexpr float kClippedRatioThreshold = 0.1f; constexpr int kClippedWaitFrames = 300; constexpr float kHighSpeechProbability = 0.7f; +constexpr float kLowSpeechProbability = 0.1f; constexpr float kSpeechLevel = -25.0f; +constexpr float kSpeechProbabilityThreshold = 0.5f; +constexpr float kSpeechRatioThreshold = 0.8f; constexpr float kMinSample = std::numeric_limits::min(); constexpr float kMaxSample = std::numeric_limits::max(); @@ -53,18 +56,16 @@ using ClippingPredictorConfig = AudioProcessing::Config::GainController1:: using InputVolumeControllerConfig = InputVolumeController::Config; -constexpr InputVolumeControllerConfig kDefaultInputVolumeControllerConfig{}; constexpr ClippingPredictorConfig kDefaultClippingPredictorConfig{}; std::unique_ptr CreateInputVolumeController( - int startup_min_volume, - int clipped_level_step, - float clipped_ratio_threshold, - int clipped_wait_frames, - bool enable_clipping_predictor = false) { + int clipped_level_step = kClippedLevelStep, + float clipped_ratio_threshold = kClippedRatioThreshold, + int clipped_wait_frames = kClippedWaitFrames, + bool enable_clipping_predictor = false, + int update_input_volume_wait_frames = 0) { InputVolumeControllerConfig config{ - .enabled = true, - .startup_min_volume = startup_min_volume, + .min_input_volume = kMinMicLevel, .clipped_level_min = kClippedMin, .clipped_level_step = clipped_level_step, .clipped_ratio_threshold = clipped_ratio_threshold, @@ -72,81 +73,15 @@ std::unique_ptr CreateInputVolumeController( .enable_clipping_predictor = enable_clipping_predictor, .target_range_max_dbfs = -18, .target_range_min_dbfs = -30, + .update_input_volume_wait_frames = update_input_volume_wait_frames, + .speech_probability_threshold = kSpeechProbabilityThreshold, + .speech_ratio_threshold = kSpeechRatioThreshold, }; return std::make_unique(/*num_capture_channels=*/1, config); } -// Deprecated. -// TODO(bugs.webrtc.org/7494): Delete this helper, use -// `InputVolumeControllerTestHelper::CallAgcSequence()` instead. -// Calls `AnalyzePreProcess()` on `manager` `num_calls` times. `peak_ratio` is a -// value in [0, 1] which determines the amplitude of the samples (1 maps to full -// scale). The first half of the calls is made on frames which are half filled -// with zeros in order to simulate a signal with different crest factors. -void CallPreProcessAudioBuffer(int num_calls, - float peak_ratio, - InputVolumeController& manager) { - RTC_DCHECK_LE(peak_ratio, 1.0f); - AudioBuffer audio_buffer(kSampleRateHz, kNumChannels, kSampleRateHz, - kNumChannels, kSampleRateHz, kNumChannels); - const int num_channels = audio_buffer.num_channels(); - const int num_frames = audio_buffer.num_frames(); - - // Make half of the calls with half zeroed frames. - for (int ch = 0; ch < num_channels; ++ch) { - // 50% of the samples in one frame are zero. - for (int i = 0; i < num_frames; i += 2) { - audio_buffer.channels()[ch][i] = peak_ratio * 32767.0f; - audio_buffer.channels()[ch][i + 1] = 0.0f; - } - } - for (int n = 0; n < num_calls / 2; ++n) { - manager.AnalyzePreProcess(audio_buffer); - } - - // Make the remaining half of the calls with frames whose samples are all set. - for (int ch = 0; ch < num_channels; ++ch) { - for (int i = 0; i < num_frames; ++i) { - audio_buffer.channels()[ch][i] = peak_ratio * 32767.0f; - } - } - for (int n = 0; n < num_calls - num_calls / 2; ++n) { - manager.AnalyzePreProcess(audio_buffer); - } -} - -constexpr char kMinMicLevelFieldTrial[] = - "WebRTC-Audio-2ndAgcMinMicLevelExperiment"; - -std::string GetAgcMinMicLevelExperimentFieldTrial(const std::string& value) { - char field_trial_buffer[64]; - rtc::SimpleStringBuilder builder(field_trial_buffer); - builder << kMinMicLevelFieldTrial << "/" << value << "/"; - return builder.str(); -} - -std::string GetAgcMinMicLevelExperimentFieldTrialEnabled( - int enabled_value, - const std::string& suffix = "") { - RTC_DCHECK_GE(enabled_value, 0); - RTC_DCHECK_LE(enabled_value, 255); - char field_trial_buffer[64]; - rtc::SimpleStringBuilder builder(field_trial_buffer); - builder << kMinMicLevelFieldTrial << "/Enabled-" << enabled_value << suffix - << "/"; - return builder.str(); -} - -std::string GetAgcMinMicLevelExperimentFieldTrial( - absl::optional min_mic_level) { - if (min_mic_level.has_value()) { - return GetAgcMinMicLevelExperimentFieldTrialEnabled(*min_mic_level); - } - return GetAgcMinMicLevelExperimentFieldTrial("Disabled"); -} - // (Over)writes `samples_value` for the samples in `audio_buffer`. // When `clipped_ratio`, a value in [0, 1], is greater than 0, the corresponding // fraction of the frame is set to a full scale value to simulate clipping. @@ -171,17 +106,19 @@ void WriteAudioBufferSamples(float samples_value, } } -// Deprecated. -// TODO(bugs.webrtc.org/7494): Delete this helper, use -// `InputVolumeControllerTestHelper::CallAgcSequence()` instead. -void CallPreProcessAndProcess(int num_calls, - const AudioBuffer& audio_buffer, - absl::optional speech_probability, - absl::optional speech_level, - InputVolumeController& manager) { - for (int n = 0; n < num_calls; ++n) { - manager.AnalyzePreProcess(audio_buffer); - manager.Process(speech_probability, speech_level); +// (Over)writes samples in `audio_buffer`. Alternates samples `samples_value` +// and zero. +void WriteAlternatingAudioBufferSamples(float samples_value, + AudioBuffer& audio_buffer) { + RTC_DCHECK_GE(samples_value, kMinSample); + RTC_DCHECK_LE(samples_value, kMaxSample); + const int num_channels = audio_buffer.num_channels(); + const int num_frames = audio_buffer.num_frames(); + for (int ch = 0; ch < num_channels; ++ch) { + for (int i = 0; i < num_frames; i += 2) { + audio_buffer.channels()[ch][i] = samples_value; + audio_buffer.channels()[ch][i + 1] = 0.0f; + } } } @@ -210,16 +147,19 @@ class SpeechSamplesReader { } // Reads `num_frames` 10 ms frames from the beginning of the PCM file, applies - // `gain_db` and feeds the frames into `agc` by calling `AnalyzePreProcess()` - // and `Process()` for each frame. Reads the number of 10 ms frames available - // in the PCM file if `num_frames` is too large - i.e., does not loop. - // `speech_probability` and `speech_level` are passed to - // `Process()`. - void Feed(int num_frames, - int gain_db, - absl::optional speech_probability, - absl::optional speech_level, - InputVolumeController& agc) { + // `gain_db` and feeds the frames into `controller` by calling + // `AnalyzeInputAudio()` and `RecommendInputVolume()` for each frame. Reads + // the number of 10 ms frames available in the PCM file if `num_frames` is too + // large - i.e., does not loop. `speech_probability` and `speech_level_dbfs` + // are passed to `RecommendInputVolume()`. + int Feed(int num_frames, + int applied_input_volume, + int gain_db, + float speech_probability, + absl::optional speech_level_dbfs, + InputVolumeController& controller) { + RTC_DCHECK(controller.capture_output_used()); + float gain = std::pow(10.0f, gain_db / 20.0f); // From dB to linear gain. is_.seekg(0, is_.beg); // Start from the beginning of the PCM file. @@ -236,10 +176,17 @@ class SpeechSamplesReader { return rtc::SafeClamp(static_cast(v) * gain, kMinSample, kMaxSample); }); + controller.AnalyzeInputAudio(applied_input_volume, audio_buffer_); + const auto recommended_input_volume = controller.RecommendInputVolume( + speech_probability, speech_level_dbfs); - agc.AnalyzePreProcess(audio_buffer_); - agc.Process(speech_probability, speech_level); + // Expect no errors: Applied volume set for every frame; + // `RecommendInputVolume()` returns a non-empty value. + EXPECT_TRUE(recommended_input_volume.has_value()); + + applied_input_volume = *recommended_input_volume; } + return applied_input_volume; } private: @@ -249,14 +196,24 @@ class SpeechSamplesReader { const std::streamsize buffer_num_bytes_; }; +// Runs the MonoInputVolumeControl processing sequence following the API +// contract. Returns the updated recommended input volume. +float UpdateRecommendedInputVolume(MonoInputVolumeController& mono_controller, + int applied_input_volume, + float speech_probability, + absl::optional rms_error_dbfs) { + mono_controller.set_stream_analog_level(applied_input_volume); + EXPECT_EQ(mono_controller.recommended_analog_level(), applied_input_volume); + mono_controller.Process(rms_error_dbfs, speech_probability); + return mono_controller.recommended_analog_level(); +} + } // namespace // TODO(bugs.webrtc.org/12874): Use constexpr struct with designated // initializers once fixed. constexpr InputVolumeControllerConfig GetInputVolumeControllerTestConfig() { InputVolumeControllerConfig config{ - .enabled = true, - .startup_min_volume = kInitialInputVolume, .clipped_level_min = kClippedMin, .clipped_level_step = kClippedLevelStep, .clipped_ratio_threshold = kClippedRatioThreshold, @@ -264,16 +221,13 @@ constexpr InputVolumeControllerConfig GetInputVolumeControllerTestConfig() { .enable_clipping_predictor = kDefaultClippingPredictorConfig.enabled, .target_range_max_dbfs = -18, .target_range_min_dbfs = -30, + .update_input_volume_wait_frames = 0, + .speech_probability_threshold = 0.5f, + .speech_ratio_threshold = 1.0f, }; return config; } -constexpr InputVolumeControllerConfig GetDisabledInputVolumeControllerConfig() { - InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); - config.enabled = false; - return config; -} - // Helper class that provides an `InputVolumeController` instance with an // `AudioBuffer` instance and `CallAgcSequence()`, a helper method that runs the // `InputVolumeController` instance on the `AudioBuffer` one by sticking to the @@ -281,16 +235,17 @@ constexpr InputVolumeControllerConfig GetDisabledInputVolumeControllerConfig() { class InputVolumeControllerTestHelper { public: // Ctor. Initializes `audio_buffer` with zeros. - InputVolumeControllerTestHelper() + // TODO(bugs.webrtc.org/7494): Remove the default argument. + InputVolumeControllerTestHelper(const InputVolumeController::Config& config = + GetInputVolumeControllerTestConfig()) : audio_buffer(kSampleRateHz, kNumChannels, kSampleRateHz, kNumChannels, kSampleRateHz, kNumChannels), - manager(/*num_capture_channels=*/1, - GetInputVolumeControllerTestConfig()) { - manager.Initialize(); + controller(/*num_capture_channels=*/1, config) { + controller.Initialize(); WriteAudioBufferSamples(/*samples_value=*/0.0f, /*clipped_ratio=*/0.0f, audio_buffer); } @@ -298,445 +253,424 @@ class InputVolumeControllerTestHelper { // Calls the sequence of `InputVolumeController` methods according to the API // contract, namely: // - Sets the applied input volume; - // - Uses `audio_buffer` to call `AnalyzePreProcess()` and `Process()`; + // - Uses `audio_buffer` to call `AnalyzeInputAudio()` and + // `RecommendInputVolume()`; // Returns the recommended input volume. - int CallAgcSequence(int applied_input_volume, - absl::optional speech_probability, - absl::optional speech_level) { - manager.set_stream_analog_level(applied_input_volume); - manager.AnalyzePreProcess(audio_buffer); - manager.Process(speech_probability, speech_level); - - return manager.recommended_analog_level(); - } - - // Deprecated. - // TODO(bugs.webrtc.org/7494): Let the caller write `audio_buffer` and use - // `CallAgcSequence()`. - void CallProcess(int num_calls, - absl::optional speech_probability, - absl::optional speech_level) { + absl::optional CallAgcSequence(int applied_input_volume, + float speech_probability, + absl::optional speech_level_dbfs, + int num_calls = 1) { + RTC_DCHECK_GE(num_calls, 1); + absl::optional volume = applied_input_volume; for (int i = 0; i < num_calls; ++i) { - manager.Process(speech_probability, speech_level); + // Repeat the initial volume if `RecommendInputVolume()` doesn't return a + // value. + controller.AnalyzeInputAudio(volume.value_or(applied_input_volume), + audio_buffer); + volume = controller.RecommendInputVolume(speech_probability, + speech_level_dbfs); + + // Allow deviation from the API contract: `RecommendInputVolume()` doesn't + // return a recommended input volume. + if (volume.has_value()) { + EXPECT_EQ(*volume, controller.recommended_input_volume()); + } } + return volume; } // Deprecated. // TODO(bugs.webrtc.org/7494): Let the caller write `audio_buffer` and use // `CallAgcSequence()`. - void CallPreProc(int num_calls, float clipped_ratio) { + int CallRecommendInputVolume(int num_calls, + int initial_volume, + float speech_probability, + absl::optional speech_level_dbfs) { + RTC_DCHECK(controller.capture_output_used()); + + // Create non-clipping audio for `AnalyzeInputAudio()`. + WriteAlternatingAudioBufferSamples(0.1f * kMaxSample, audio_buffer); + int volume = initial_volume; + for (int i = 0; i < num_calls; ++i) { + controller.AnalyzeInputAudio(volume, audio_buffer); + const auto recommended_input_volume = controller.RecommendInputVolume( + speech_probability, speech_level_dbfs); + + // Expect no errors: Applied volume set for every frame; + // `RecommendInputVolume()` returns a non-empty value. + EXPECT_TRUE(recommended_input_volume.has_value()); + + volume = *recommended_input_volume; + } + return volume; + } + + // Deprecated. + // TODO(bugs.webrtc.org/7494): Let the caller write `audio_buffer` and use + // `CallAgcSequence()`. + void CallAnalyzeInputAudio(int num_calls, float clipped_ratio) { + RTC_DCHECK(controller.capture_output_used()); + RTC_DCHECK_GE(clipped_ratio, 0.0f); RTC_DCHECK_LE(clipped_ratio, 1.0f); WriteAudioBufferSamples(/*samples_value=*/0.0f, clipped_ratio, audio_buffer); for (int i = 0; i < num_calls; ++i) { - manager.AnalyzePreProcess(audio_buffer); - } - } - - // Deprecated. - // TODO(bugs.webrtc.org/7494): Let the caller write `audio_buffer` and use - // `CallAgcSequence()`. - void CallPreProcForChangingAudio(int num_calls, float peak_ratio) { - RTC_DCHECK_GE(peak_ratio, 0.0f); - RTC_DCHECK_LE(peak_ratio, 1.0f); - const float samples_value = peak_ratio * 32767.0f; - - // Make half of the calls on a frame where the samples alternate - // `sample_values` and zeros. - WriteAudioBufferSamples(samples_value, /*clipped_ratio=*/0.0f, - audio_buffer); - for (size_t ch = 0; ch < audio_buffer.num_channels(); ++ch) { - for (size_t k = 1; k < audio_buffer.num_frames(); k += 2) { - audio_buffer.channels()[ch][k] = 0.0f; - } - } - for (int i = 0; i < num_calls / 2; ++i) { - manager.AnalyzePreProcess(audio_buffer); - } - - // Make half of thecalls on a frame where all the samples equal - // `sample_values`. - WriteAudioBufferSamples(samples_value, /*clipped_ratio=*/0.0f, - audio_buffer); - for (int i = 0; i < num_calls - num_calls / 2; ++i) { - manager.AnalyzePreProcess(audio_buffer); + controller.AnalyzeInputAudio(controller.recommended_input_volume(), + audio_buffer); } } AudioBuffer audio_buffer; - InputVolumeController manager; + InputVolumeController controller; }; class InputVolumeControllerParametrizedTest - : public ::testing::TestWithParam, bool>> { - protected: - InputVolumeControllerParametrizedTest() - : field_trials_( - GetAgcMinMicLevelExperimentFieldTrial(std::get<0>(GetParam()))) {} + : public ::testing::TestWithParam {}; - bool IsMinMicLevelOverridden() const { - return std::get<0>(GetParam()).has_value(); - } - int GetMinMicLevel() const { - return std::get<0>(GetParam()).value_or(kMinMicLevel); - } +TEST_P(InputVolumeControllerParametrizedTest, + StartupMinVolumeConfigurationRespectedWhenAppliedInputVolumeAboveMin) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); - bool RmsErrorHasValue() const { return std::get<1>(GetParam()); } - absl::optional GetValueOrEmpty(float value) const { - return RmsErrorHasValue() ? absl::optional(value) : absl::nullopt; - } + EXPECT_EQ(*helper.CallAgcSequence(/*applied_input_volume=*/128, + /*speech_probability=*/0.9f, + /*speech_level_dbfs=*/-80), + 128); +} - private: - test::ScopedFieldTrials field_trials_; -}; - -INSTANTIATE_TEST_SUITE_P( - , +TEST_P( InputVolumeControllerParametrizedTest, - ::testing::Combine(testing::Values(absl::nullopt, 12, 20), - testing::Values(true))); + StartupMinVolumeConfigurationRespectedWhenAppliedInputVolumeMaybeBelowMin) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); -// Checks that when the analog controller is disabled, no downward adaptation -// takes place. -// TODO(webrtc:7494): Revisit the test after moving the number of update wait -// frames to AMP config. The test passes but internally the gain update timing -// differs. -TEST_P(InputVolumeControllerParametrizedTest, - DisabledAnalogAgcDoesNotAdaptDownwards) { - InputVolumeController manager_no_analog_agc( - kNumChannels, GetDisabledInputVolumeControllerConfig()); - manager_no_analog_agc.Initialize(); - InputVolumeController manager_with_analog_agc( - kNumChannels, GetInputVolumeControllerTestConfig()); - manager_with_analog_agc.Initialize(); - - AudioBuffer audio_buffer(kSampleRateHz, kNumChannels, kSampleRateHz, - kNumChannels, kSampleRateHz, kNumChannels); - - constexpr int kInputVolume = 250; - static_assert(kInputVolume > kInitialInputVolume, "Increase `kInputVolume`."); - manager_no_analog_agc.set_stream_analog_level(kInputVolume); - manager_with_analog_agc.set_stream_analog_level(kInputVolume); - - // Make a first call with input that doesn't clip in order to let the - // controller read the input volume. That is needed because clipping input - // causes the controller to stay in idle state for - // `InputVolumeControllerConfig::clipped_wait_frames` frames. - WriteAudioBufferSamples(/*samples_value=*/0.0f, /*clipping_ratio=*/0.0f, - audio_buffer); - manager_no_analog_agc.AnalyzePreProcess(audio_buffer); - manager_with_analog_agc.AnalyzePreProcess(audio_buffer); - manager_no_analog_agc.Process(GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(-18.0f)); - manager_with_analog_agc.Process(GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(-18.0f)); - - // Feed clipping input to trigger a downward adapation of the analog level. - WriteAudioBufferSamples(/*samples_value=*/0.0f, /*clipping_ratio=*/0.2f, - audio_buffer); - manager_no_analog_agc.AnalyzePreProcess(audio_buffer); - manager_with_analog_agc.AnalyzePreProcess(audio_buffer); - manager_no_analog_agc.Process(GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(-10.0f)); - manager_with_analog_agc.Process(GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(-10.0f)); - - // Check that no adaptation occurs when the analog controller is disabled - // and make sure that the test triggers a downward adaptation otherwise. - EXPECT_EQ(manager_no_analog_agc.recommended_analog_level(), kInputVolume); - ASSERT_LT(manager_with_analog_agc.recommended_analog_level(), kInputVolume); -} - -// Checks that when the analog controller is disabled, no upward adaptation -// takes place. -// TODO(webrtc:7494): Revisit the test after moving the number of update wait -// frames to APM config. The test passes but internally the gain update timing -// differs. -TEST_P(InputVolumeControllerParametrizedTest, - DisabledAnalogAgcDoesNotAdaptUpwards) { - InputVolumeController manager_no_analog_agc( - kNumChannels, GetDisabledInputVolumeControllerConfig()); - manager_no_analog_agc.Initialize(); - InputVolumeController manager_with_analog_agc( - kNumChannels, GetInputVolumeControllerTestConfig()); - manager_with_analog_agc.Initialize(); - - constexpr int kInputVolume = kInitialInputVolume; - manager_no_analog_agc.set_stream_analog_level(kInputVolume); - manager_with_analog_agc.set_stream_analog_level(kInputVolume); - - // Feed speech with low energy to trigger an upward adapation of the analog - // level. - constexpr int kNumFrames = 125; - constexpr int kGainDb = -20; - SpeechSamplesReader reader; - reader.Feed(kNumFrames, kGainDb, GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(-42.0f), manager_no_analog_agc); - reader.Feed(kNumFrames, kGainDb, GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(-42.0f), manager_with_analog_agc); - - // Check that no adaptation occurs when the analog controller is disabled - // and make sure that the test triggers an upward adaptation otherwise. - EXPECT_EQ(manager_no_analog_agc.recommended_analog_level(), kInputVolume); - ASSERT_GT(manager_with_analog_agc.recommended_analog_level(), kInputVolume); + EXPECT_GE(*helper.CallAgcSequence(/*applied_input_volume=*/10, + /*speech_probability=*/0.9f, + /*speech_level_dbfs=*/-80), + 10); } TEST_P(InputVolumeControllerParametrizedTest, - StartupMinVolumeConfigurationIsRespected) { - InputVolumeControllerTestHelper helper; + StartupMinVolumeRespectedWhenAppliedVolumeNonZero) { + const int kMinInputVolume = GetParam(); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = kMinInputVolume, + .target_range_min_dbfs = -30, + .update_input_volume_wait_frames = 1, + .speech_probability_threshold = 0.5f, + .speech_ratio_threshold = 0.5f}); - helper.CallAgcSequence(kInitialInputVolume, - GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + // Volume change possible; speech level below the digital gain window. + int volume = *helper.CallAgcSequence(/*applied_input_volume=*/1, + /*speech_probability=*/0.9f, + /*speech_level_dbfs=*/-80); - EXPECT_EQ(kInitialInputVolume, helper.manager.recommended_analog_level()); + EXPECT_EQ(volume, kMinInputVolume); +} + +TEST_P(InputVolumeControllerParametrizedTest, + MinVolumeRepeatedlyRespectedWhenAppliedVolumeNonZero) { + const int kMinInputVolume = GetParam(); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = kMinInputVolume, + .target_range_min_dbfs = -30, + .update_input_volume_wait_frames = 1, + .speech_probability_threshold = 0.5f, + .speech_ratio_threshold = 0.5f}); + + // Volume change possible; speech level below the digital gain window. + for (int i = 0; i < 100; ++i) { + const int volume = *helper.CallAgcSequence(/*applied_input_volume=*/1, + /*speech_probability=*/0.9f, + /*speech_level_dbfs=*/-80); + EXPECT_GE(volume, kMinInputVolume); + } +} + +TEST_P(InputVolumeControllerParametrizedTest, + StartupMinVolumeRespectedOnceWhenAppliedVolumeZero) { + const int kMinInputVolume = GetParam(); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = kMinInputVolume, + .target_range_min_dbfs = -30, + .update_input_volume_wait_frames = 1, + .speech_probability_threshold = 0.5f, + .speech_ratio_threshold = 0.5f}); + + int volume = *helper.CallAgcSequence(/*applied_input_volume=*/0, + /*speech_probability=*/0.9f, + /*speech_level_dbfs=*/-80); + + EXPECT_EQ(volume, kMinInputVolume); + + // No change of volume regardless of a speech level below the digital gain + // window; applied volume is zero. + volume = *helper.CallAgcSequence(/*applied_input_volume=*/0, + /*speech_probability=*/0.9f, + /*speech_level_dbfs=*/-80); + + EXPECT_EQ(volume, 0); } TEST_P(InputVolumeControllerParametrizedTest, MicVolumeResponseToRmsError) { - const auto speech_probability = GetValueOrEmpty(kHighSpeechProbability); - - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(kInitialInputVolume, speech_probability, - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + int volume = *helper.CallAgcSequence(kInitialInputVolume, + kHighSpeechProbability, kSpeechLevel); // Inside the digital gain's window; no change of volume. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-23.0f)); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -23.0f); // Inside the digital gain's window; no change of volume. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-28.0f)); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -28.0f); // Above the digital gain's window; volume should be increased. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-29.0f)); - EXPECT_EQ(128, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -29.0f); + EXPECT_EQ(volume, 128); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-38.0f)); - EXPECT_EQ(156, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -38.0f); + EXPECT_EQ(volume, 156); // Inside the digital gain's window; no change of volume. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-23.0f)); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-18.0f)); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -23.0f); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -18.0f); // Below the digial gain's window; volume should be decreased. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-17.0f)); - EXPECT_EQ(155, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -17.0f); + EXPECT_EQ(volume, 155); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-17.0f)); - EXPECT_EQ(151, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -17.0f); + EXPECT_EQ(volume, 151); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-9.0f)); - EXPECT_EQ(119, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -9.0f); + EXPECT_EQ(volume, 119); } TEST_P(InputVolumeControllerParametrizedTest, MicVolumeIsLimited) { - const auto speech_probability = GetValueOrEmpty(kHighSpeechProbability); - - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(kInitialInputVolume, speech_probability, - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + const int min_input_volume = GetParam(); + config.min_input_volume = min_input_volume; + InputVolumeControllerTestHelper helper(config); + int volume = *helper.CallAgcSequence(kInitialInputVolume, + kHighSpeechProbability, kSpeechLevel); // Maximum upwards change is limited. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-48.0f)); - EXPECT_EQ(183, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 183); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-48.0f)); - EXPECT_EQ(243, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 243); // Won't go higher than the maximum. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-48.0f)); - EXPECT_EQ(255, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 255); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-17.0f)); - EXPECT_EQ(254, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -17.0f); + EXPECT_EQ(volume, 254); // Maximum downwards change is limited. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(22.0f)); - EXPECT_EQ(194, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, 194); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(22.0f)); - EXPECT_EQ(137, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, 137); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(22.0f)); - EXPECT_EQ(88, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, 88); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(22.0f)); - EXPECT_EQ(54, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, 54); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(22.0f)); - EXPECT_EQ(33, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, 33); // Won't go lower than the minimum. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(22.0f)); - EXPECT_EQ(std::max(18, GetMinMicLevel()), - helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, std::max(18, min_input_volume)); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(22.0f)); - EXPECT_EQ(std::max(12, GetMinMicLevel()), - helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, std::max(12, min_input_volume)); } TEST_P(InputVolumeControllerParametrizedTest, NoActionWhileMuted) { - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(kInitialInputVolume, - GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerTestHelper helper_1( + /*config=*/{.min_input_volume = GetParam()}); + InputVolumeControllerTestHelper helper_2( + /*config=*/{.min_input_volume = GetParam()}); - helper.manager.HandleCaptureOutputUsedChange(false); - helper.manager.Process(GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + int volume_1 = *helper_1.CallAgcSequence(/*applied_input_volume=*/255, + kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/1); + int volume_2 = *helper_2.CallAgcSequence(/*applied_input_volume=*/255, + kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/1); + + EXPECT_EQ(volume_1, 255); + EXPECT_EQ(volume_2, 255); + + helper_2.controller.HandleCaptureOutputUsedChange(false); + + WriteAlternatingAudioBufferSamples(kMaxSample, helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kMaxSample, helper_2.audio_buffer); + + volume_1 = + *helper_1.CallAgcSequence(volume_1, kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/1); + volume_2 = + *helper_2.CallAgcSequence(volume_2, kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/1); + + EXPECT_LT(volume_1, 255); + EXPECT_EQ(volume_2, 255); } TEST_P(InputVolumeControllerParametrizedTest, UnmutingChecksVolumeWithoutRaising) { - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(kInitialInputVolume, - GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + kSpeechLevel); - helper.manager.HandleCaptureOutputUsedChange(false); - helper.manager.HandleCaptureOutputUsedChange(true); + helper.controller.HandleCaptureOutputUsedChange(false); + helper.controller.HandleCaptureOutputUsedChange(true); constexpr int kInputVolume = 127; - helper.manager.set_stream_analog_level(kInputVolume); // SetMicVolume should not be called. - helper.CallProcess(/*num_calls=*/1, GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); - EXPECT_EQ(127, helper.manager.recommended_analog_level()); + EXPECT_EQ( + helper.CallRecommendInputVolume(/*num_calls=*/1, kInputVolume, + kHighSpeechProbability, kSpeechLevel), + kInputVolume); } TEST_P(InputVolumeControllerParametrizedTest, UnmutingRaisesTooLowVolume) { - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(kInitialInputVolume, - GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + const int min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = min_input_volume}); + helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + kSpeechLevel); - helper.manager.HandleCaptureOutputUsedChange(false); - helper.manager.HandleCaptureOutputUsedChange(true); + helper.controller.HandleCaptureOutputUsedChange(false); + helper.controller.HandleCaptureOutputUsedChange(true); constexpr int kInputVolume = 11; - helper.manager.set_stream_analog_level(kInputVolume); - helper.CallProcess(/*num_calls=*/1, GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); - EXPECT_EQ(GetMinMicLevel(), helper.manager.recommended_analog_level()); + EXPECT_EQ( + helper.CallRecommendInputVolume(/*num_calls=*/1, kInputVolume, + kHighSpeechProbability, kSpeechLevel), + min_input_volume); } TEST_P(InputVolumeControllerParametrizedTest, ManualLevelChangeResultsInNoSetMicCall) { - const auto speech_probability = GetValueOrEmpty(kHighSpeechProbability); - - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(kInitialInputVolume, speech_probability, - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + int volume = *helper.CallAgcSequence(kInitialInputVolume, + kHighSpeechProbability, kSpeechLevel); // GetMicVolume returns a value outside of the quantization slack, indicating // a manual volume change. - ASSERT_NE(helper.manager.recommended_analog_level(), 154); - helper.manager.set_stream_analog_level(154); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-29.0f)); - EXPECT_EQ(154, helper.manager.recommended_analog_level()); + ASSERT_NE(volume, 154); + volume = helper.CallRecommendInputVolume( + /*num_calls=*/1, /*initial_volume=*/154, kHighSpeechProbability, -29.0f); + EXPECT_EQ(volume, 154); // Do the same thing, except downwards now. - helper.manager.set_stream_analog_level(100); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-17.0f)); - EXPECT_EQ(100, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume( + /*num_calls=*/1, /*initial_volume=*/100, kHighSpeechProbability, -17.0f); + EXPECT_EQ(volume, 100); // And finally verify the AGC continues working without a manual change. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-17.0f)); - EXPECT_EQ(99, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -17.0f); + EXPECT_EQ(volume, 99); } TEST_P(InputVolumeControllerParametrizedTest, RecoveryAfterManualLevelChangeFromMax) { - const auto speech_probability = GetValueOrEmpty(kHighSpeechProbability); - - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(kInitialInputVolume, speech_probability, - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + int volume = *helper.CallAgcSequence(kInitialInputVolume, + kHighSpeechProbability, kSpeechLevel); // Force the mic up to max volume. Takes a few steps due to the residual // gain limitation. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-48.0f)); - EXPECT_EQ(183, helper.manager.recommended_analog_level()); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-48.0f)); - EXPECT_EQ(243, helper.manager.recommended_analog_level()); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-48.0f)); - EXPECT_EQ(255, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 183); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 243); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 255); // Manual change does not result in SetMicVolume call. - helper.manager.set_stream_analog_level(50); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-17.0f)); - EXPECT_EQ(50, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume( + /*num_calls=*/1, /*initial_volume=*/50, kHighSpeechProbability, -17.0f); + EXPECT_EQ(helper.controller.recommended_input_volume(), 50); // Continues working as usual afterwards. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-38.0f)); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -38.0f); - EXPECT_EQ(65, helper.manager.recommended_analog_level()); + EXPECT_EQ(volume, 65); } -// Checks that, when the min mic level override is not specified, AGC ramps up -// towards the minimum mic level after the mic level is manually set below the -// minimum gain to enforce. +// Checks that the minimum input volume is enforced during the upward adjustment +// of the input volume. TEST_P(InputVolumeControllerParametrizedTest, - RecoveryAfterManualLevelChangeBelowMinWithoutMinMicLevelOverride) { - if (IsMinMicLevelOverridden()) { - GTEST_SKIP() << "Skipped. Min mic level overridden."; - } + EnforceMinInputVolumeDuringUpwardsAdjustment) { + const int min_input_volume = GetParam(); + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = min_input_volume; + InputVolumeControllerTestHelper helper(config); + int volume = *helper.CallAgcSequence(kInitialInputVolume, + kHighSpeechProbability, kSpeechLevel); - const auto speech_probability = GetValueOrEmpty(kHighSpeechProbability); + // Manual change below min, but strictly positive, otherwise no action will be + // taken. + volume = helper.CallRecommendInputVolume( + /*num_calls=*/1, /*initial_volume=*/1, kHighSpeechProbability, -17.0f); - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(kInitialInputVolume, speech_probability, - GetValueOrEmpty(kSpeechLevel)); + // Trigger an upward adjustment of the input volume. + EXPECT_EQ(volume, min_input_volume); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -29.0f); + EXPECT_EQ(volume, min_input_volume); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -30.0f); + EXPECT_EQ(volume, min_input_volume); - // Manual change below min, but strictly positive, otherwise AGC won't take - // any action. - helper.manager.set_stream_analog_level(1); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-17.0f)); - EXPECT_EQ(1, helper.manager.recommended_analog_level()); - - // Continues working as usual afterwards. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-29.0f)); - EXPECT_EQ(1, helper.manager.recommended_analog_level()); - - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-48.0f)); - EXPECT_EQ(10, helper.manager.recommended_analog_level()); - - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-38.0f)); - EXPECT_EQ(16, helper.manager.recommended_analog_level()); + // After a number of consistently low speech level observations, the input + // volume is eventually raised above the minimum. + volume = helper.CallRecommendInputVolume(/*num_calls=*/10, volume, + kHighSpeechProbability, -38.0f); + EXPECT_GT(volume, min_input_volume); } // Checks that, when the min mic level override is specified, AGC immediately @@ -744,500 +678,200 @@ TEST_P(InputVolumeControllerParametrizedTest, // minimum gain to enforce. TEST_P(InputVolumeControllerParametrizedTest, RecoveryAfterManualLevelChangeBelowMin) { - if (!IsMinMicLevelOverridden()) { - GTEST_SKIP() << "Skipped. Min mic level not overridden."; - } - - const auto speech_probability = GetValueOrEmpty(kHighSpeechProbability); - - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(kInitialInputVolume, speech_probability, - GetValueOrEmpty(kSpeechLevel)); + const int min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = min_input_volume}); + int volume = *helper.CallAgcSequence(kInitialInputVolume, + kHighSpeechProbability, kSpeechLevel); // Manual change below min, but strictly positive, otherwise // AGC won't take any action. - helper.manager.set_stream_analog_level(1); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-17.0f)); - EXPECT_EQ(GetMinMicLevel(), helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume( + /*num_calls=*/1, /*initial_volume=*/1, kHighSpeechProbability, -17.0f); + EXPECT_EQ(volume, min_input_volume); } TEST_P(InputVolumeControllerParametrizedTest, NoClippingHasNoImpact) { - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(kInitialInputVolume, - GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + kSpeechLevel); - helper.CallPreProc(/*num_calls=*/100, /*clipped_ratio=*/0); - EXPECT_EQ(128, helper.manager.recommended_analog_level()); + helper.CallAnalyzeInputAudio(/*num_calls=*/100, /*clipped_ratio=*/0); + EXPECT_EQ(helper.controller.recommended_input_volume(), 128); } TEST_P(InputVolumeControllerParametrizedTest, ClippingUnderThresholdHasNoImpact) { - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(kInitialInputVolume, - GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + kSpeechLevel); - helper.CallPreProc(/*num_calls=*/1, /*clipped_ratio=*/0.099); - EXPECT_EQ(128, helper.manager.recommended_analog_level()); + helper.CallAnalyzeInputAudio(/*num_calls=*/1, /*clipped_ratio=*/0.099); + EXPECT_EQ(helper.controller.recommended_input_volume(), 128); } TEST_P(InputVolumeControllerParametrizedTest, ClippingLowersVolume) { - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(/*applied_input_volume=*/255, - GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(/*applied_input_volume=*/255, kHighSpeechProbability, + kSpeechLevel); - helper.CallPreProc(/*num_calls=*/1, /*clipped_ratio=*/0.2); - EXPECT_EQ(240, helper.manager.recommended_analog_level()); + helper.CallAnalyzeInputAudio(/*num_calls=*/1, /*clipped_ratio=*/0.2); + EXPECT_EQ(helper.controller.recommended_input_volume(), 240); } TEST_P(InputVolumeControllerParametrizedTest, WaitingPeriodBetweenClippingChecks) { - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(/*applied_input_volume=*/255, - GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(/*applied_input_volume=*/255, kHighSpeechProbability, + kSpeechLevel); - helper.CallPreProc(/*num_calls=*/1, /*clipped_ratio=*/kAboveClippedThreshold); - EXPECT_EQ(240, helper.manager.recommended_analog_level()); + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), 240); - helper.CallPreProc(/*num_calls=*/300, - /*clipped_ratio=*/kAboveClippedThreshold); - EXPECT_EQ(240, helper.manager.recommended_analog_level()); + helper.CallAnalyzeInputAudio(/*num_calls=*/300, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), 240); - helper.CallPreProc(/*num_calls=*/1, /*clipped_ratio=*/kAboveClippedThreshold); - EXPECT_EQ(225, helper.manager.recommended_analog_level()); + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), 225); } TEST_P(InputVolumeControllerParametrizedTest, ClippingLoweringIsLimited) { - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(/*applied_input_volume=*/180, - GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + helper.CallAgcSequence(/*applied_input_volume=*/180, kHighSpeechProbability, + kSpeechLevel); - helper.CallPreProc(/*num_calls=*/1, /*clipped_ratio=*/kAboveClippedThreshold); - EXPECT_EQ(kClippedMin, helper.manager.recommended_analog_level()); + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), kClippedMin); - helper.CallPreProc(/*num_calls=*/1000, - /*clipped_ratio=*/kAboveClippedThreshold); - EXPECT_EQ(kClippedMin, helper.manager.recommended_analog_level()); + helper.CallAnalyzeInputAudio(/*num_calls=*/1000, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), kClippedMin); } TEST_P(InputVolumeControllerParametrizedTest, ClippingMaxIsRespectedWhenEqualToLevel) { - const auto speech_probability = GetValueOrEmpty(kHighSpeechProbability); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(/*applied_input_volume=*/255, kHighSpeechProbability, + kSpeechLevel); - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(/*applied_input_volume=*/255, speech_probability, - GetValueOrEmpty(kSpeechLevel)); + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), 240); - helper.CallPreProc(/*num_calls=*/1, /*clipped_ratio=*/kAboveClippedThreshold); - EXPECT_EQ(240, helper.manager.recommended_analog_level()); - - helper.CallProcess(/*num_calls=*/10, speech_probability, - GetValueOrEmpty(-48.0f)); - EXPECT_EQ(240, helper.manager.recommended_analog_level()); + helper.CallRecommendInputVolume(/*num_calls=*/10, /*initial_volume=*/240, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(helper.controller.recommended_input_volume(), 240); } TEST_P(InputVolumeControllerParametrizedTest, ClippingMaxIsRespectedWhenHigherThanLevel) { - const auto speech_probability = GetValueOrEmpty(kHighSpeechProbability); + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + helper.CallAgcSequence(/*applied_input_volume=*/200, kHighSpeechProbability, + kSpeechLevel); - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(/*applied_input_volume=*/200, speech_probability, - GetValueOrEmpty(kSpeechLevel)); + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + int volume = helper.controller.recommended_input_volume(); + EXPECT_EQ(volume, 185); - helper.CallPreProc(/*num_calls=*/1, /*clipped_ratio=*/kAboveClippedThreshold); - EXPECT_EQ(185, helper.manager.recommended_analog_level()); - - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-58.0f)); - EXPECT_EQ(240, helper.manager.recommended_analog_level()); - helper.CallProcess(/*num_calls=*/10, speech_probability, - GetValueOrEmpty(-58.0f)); - EXPECT_EQ(240, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -58.0f); + EXPECT_EQ(volume, 240); + volume = helper.CallRecommendInputVolume(/*num_calls=*/10, volume, + kHighSpeechProbability, -58.0f); + EXPECT_EQ(volume, 240); } TEST_P(InputVolumeControllerParametrizedTest, UserCanRaiseVolumeAfterClipping) { - const auto speech_probability = GetValueOrEmpty(kHighSpeechProbability); + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + helper.CallAgcSequence(/*applied_input_volume=*/225, kHighSpeechProbability, + kSpeechLevel); - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(/*applied_input_volume=*/225, speech_probability, - GetValueOrEmpty(kSpeechLevel)); - - helper.CallPreProc(/*num_calls=*/1, /*clipped_ratio=*/kAboveClippedThreshold); - EXPECT_EQ(210, helper.manager.recommended_analog_level()); + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), 210); // User changed the volume. - helper.manager.set_stream_analog_level(250); - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-32.0f)); - EXPECT_EQ(250, helper.manager.recommended_analog_level()); + int volume = helper.CallRecommendInputVolume( + /*num_calls=*/1, /*initial_volume-*/ 250, kHighSpeechProbability, -32.0f); + EXPECT_EQ(volume, 250); // Move down... - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-8.0f)); - EXPECT_EQ(210, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -8.0f); + EXPECT_EQ(volume, 210); // And back up to the new max established by the user. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-58.0f)); - EXPECT_EQ(250, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -58.0f); + EXPECT_EQ(volume, 250); // Will not move above new maximum. - helper.CallProcess(/*num_calls=*/1, speech_probability, - GetValueOrEmpty(-48.0f)); - EXPECT_EQ(250, helper.manager.recommended_analog_level()); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 250); } TEST_P(InputVolumeControllerParametrizedTest, ClippingDoesNotPullLowVolumeBackUp) { - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(/*applied_input_volume=*/80, - GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + helper.CallAgcSequence(/*applied_input_volume=*/80, kHighSpeechProbability, + kSpeechLevel); - int initial_volume = helper.manager.recommended_analog_level(); - helper.CallPreProc(/*num_calls=*/1, /*clipped_ratio=*/kAboveClippedThreshold); - EXPECT_EQ(initial_volume, helper.manager.recommended_analog_level()); + int initial_volume = helper.controller.recommended_input_volume(); + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), initial_volume); } TEST_P(InputVolumeControllerParametrizedTest, TakesNoActionOnZeroMicVolume) { - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(kInitialInputVolume, - GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + kSpeechLevel); - helper.manager.set_stream_analog_level(0); - helper.CallProcess(/*num_calls=*/10, GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(-48.0f)); - EXPECT_EQ(0, helper.manager.recommended_analog_level()); + EXPECT_EQ( + helper.CallRecommendInputVolume(/*num_calls=*/10, /*initial_volume=*/0, + kHighSpeechProbability, -48.0f), + 0); } TEST_P(InputVolumeControllerParametrizedTest, ClippingDetectionLowersVolume) { - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(/*applied_input_volume=*/255, - GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + int volume = *helper.CallAgcSequence(/*applied_input_volume=*/255, + kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/1); - EXPECT_EQ(255, helper.manager.recommended_analog_level()); - helper.CallPreProcForChangingAudio(/*num_calls=*/100, /*peak_ratio=*/0.99f); - EXPECT_EQ(255, helper.manager.recommended_analog_level()); - helper.CallPreProcForChangingAudio(/*num_calls=*/100, /*peak_ratio=*/1.0f); - EXPECT_EQ(240, helper.manager.recommended_analog_level()); -} + EXPECT_EQ(volume, 255); -TEST_P(InputVolumeControllerParametrizedTest, - DisabledClippingPredictorDoesNotLowerVolume) { - InputVolumeControllerTestHelper helper; - helper.CallAgcSequence(/*applied_input_volume=*/255, - GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + WriteAlternatingAudioBufferSamples(0.99f * kMaxSample, helper.audio_buffer); + volume = *helper.CallAgcSequence(volume, kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/100); - EXPECT_FALSE(helper.manager.clipping_predictor_enabled()); - EXPECT_EQ(255, helper.manager.recommended_analog_level()); - helper.CallPreProcForChangingAudio(/*num_calls=*/100, /*peak_ratio=*/0.99f); - EXPECT_EQ(255, helper.manager.recommended_analog_level()); - helper.CallPreProcForChangingAudio(/*num_calls=*/100, /*peak_ratio=*/0.99f); - EXPECT_EQ(255, helper.manager.recommended_analog_level()); -} + EXPECT_EQ(volume, 255); -TEST(InputVolumeControllerTest, AgcMinMicLevelExperimentDefault) { - std::unique_ptr manager = - CreateInputVolumeController(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); - EXPECT_EQ(manager->channel_controllers_[0]->min_mic_level(), kMinMicLevel); -} + WriteAlternatingAudioBufferSamples(kMaxSample, helper.audio_buffer); + volume = *helper.CallAgcSequence(volume, kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/100); -TEST(InputVolumeControllerTest, AgcMinMicLevelExperimentDisabled) { - for (const std::string& field_trial_suffix : {"", "_20220210"}) { - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrial("Disabled" + field_trial_suffix)); - std::unique_ptr manager = - CreateInputVolumeController(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); - - EXPECT_EQ(manager->channel_controllers_[0]->min_mic_level(), kMinMicLevel); - } -} - -// Checks that a field-trial parameter outside of the valid range [0,255] is -// ignored. -TEST(InputVolumeControllerTest, AgcMinMicLevelExperimentOutOfRangeAbove) { - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrial("Enabled-256")); - std::unique_ptr manager = - CreateInputVolumeController(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); - EXPECT_EQ(manager->channel_controllers_[0]->min_mic_level(), kMinMicLevel); -} - -// Checks that a field-trial parameter outside of the valid range [0,255] is -// ignored. -TEST(InputVolumeControllerTest, AgcMinMicLevelExperimentOutOfRangeBelow) { - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrial("Enabled--1")); - std::unique_ptr manager = - CreateInputVolumeController(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); - EXPECT_EQ(manager->channel_controllers_[0]->min_mic_level(), kMinMicLevel); -} - -// Verifies that a valid experiment changes the minimum microphone level. The -// start volume is larger than the min level and should therefore not be -// changed. -TEST(InputVolumeControllerTest, AgcMinMicLevelExperimentEnabled50) { - constexpr int kMinMicLevelOverride = 50; - for (const std::string& field_trial_suffix : {"", "_20220210"}) { - SCOPED_TRACE(field_trial_suffix); - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrialEnabled(kMinMicLevelOverride, - field_trial_suffix)); - std::unique_ptr manager = - CreateInputVolumeController(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); - - EXPECT_EQ(manager->channel_controllers_[0]->min_mic_level(), - kMinMicLevelOverride); - } -} - -// Checks that, when the "WebRTC-Audio-AgcMinMicLevelExperiment" field trial is -// specified with a valid value, the mic level never gets lowered beyond the -// override value in the presence of clipping. -TEST(InputVolumeControllerTest, - AgcMinMicLevelExperimentCheckMinLevelWithClipping) { - constexpr int kMinMicLevelOverride = 250; - - // Create and initialize two AGCs by specifying and leaving unspecified the - // relevant field trial. - const auto factory = []() { - std::unique_ptr manager = - CreateInputVolumeController(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); - manager->Initialize(); - manager->set_stream_analog_level(kInitialInputVolume); - return manager; - }; - std::unique_ptr manager = factory(); - std::unique_ptr manager_with_rms; - { - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrialEnabled(kMinMicLevelOverride)); - manager_with_rms = factory(); - } - - // Create a test input signal which containts 80% of clipped samples. - AudioBuffer audio_buffer(kSampleRateHz, 1, kSampleRateHz, 1, kSampleRateHz, - 1); - WriteAudioBufferSamples(/*samples_value=*/4000.0f, /*clipped_ratio=*/0.8f, - audio_buffer); - - // Simulate 4 seconds of clipping; it is expected to trigger a downward - // adjustment of the analog gain. - CallPreProcessAndProcess(/*num_calls=*/400, audio_buffer, - /*speech_probability=*/absl::nullopt, - /*speech_level=*/absl::nullopt, *manager); - CallPreProcessAndProcess(/*num_calls=*/400, audio_buffer, - /*speech_probability=*/absl::nullopt, - /*speech_level=*/absl::nullopt, *manager_with_rms); - - // Make sure that an adaptation occurred. - ASSERT_GT(manager->recommended_analog_level(), 0); - - // Check that the test signal triggers a larger downward adaptation for - // `manager`, which is allowed to reach a lower gain. - EXPECT_GT(manager_with_rms->recommended_analog_level(), - manager->recommended_analog_level()); - // Check that the gain selected by `manager_with_rms` equals the minimum - // value overridden via field trial. - EXPECT_EQ(manager_with_rms->recommended_analog_level(), kMinMicLevelOverride); -} - -// Checks that, when the "WebRTC-Audio-AgcMinMicLevelExperiment" field trial is -// specified with a valid value, the mic level never gets lowered beyond the -// override value in the presence of clipping when RMS error is not empty. -// TODO(webrtc:7494): Revisit the test after moving the number of update wait -// frames to APM config. The test passes but internally the gain update timing -// differs. -TEST(InputVolumeControllerTest, - AgcMinMicLevelExperimentCheckMinLevelWithClippingWithRmsError) { - constexpr int kMinMicLevelOverride = 250; - - // Create and initialize two AGCs by specifying and leaving unspecified the - // relevant field trial. - const auto factory = []() { - std::unique_ptr manager = - CreateInputVolumeController(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); - manager->Initialize(); - manager->set_stream_analog_level(kInitialInputVolume); - return manager; - }; - std::unique_ptr manager = factory(); - std::unique_ptr manager_with_rms; - { - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrialEnabled(kMinMicLevelOverride)); - manager_with_rms = factory(); - } - - // Create a test input signal which containts 80% of clipped samples. - AudioBuffer audio_buffer(kSampleRateHz, 1, kSampleRateHz, 1, kSampleRateHz, - 1); - WriteAudioBufferSamples(/*samples_value=*/4000.0f, /*clipped_ratio=*/0.8f, - audio_buffer); - - // Simulate 4 seconds of clipping; it is expected to trigger a downward - // adjustment of the analog gain. - CallPreProcessAndProcess( - /*num_calls=*/400, audio_buffer, - /*speech_probability=*/0.7f, - /*speech_level=*/-18.0f, *manager); - CallPreProcessAndProcess( - /*num_calls=*/400, audio_buffer, - /*speech_probability=*/0.7f, - /*speech_level=*/-18.0f, *manager_with_rms); - - // Make sure that an adaptation occurred. - ASSERT_GT(manager->recommended_analog_level(), 0); - - // Check that the test signal triggers a larger downward adaptation for - // `manager`, which is allowed to reach a lower gain. - EXPECT_GT(manager_with_rms->recommended_analog_level(), - manager->recommended_analog_level()); - // Check that the gain selected by `manager_with_rms` equals the minimum - // value overridden via field trial. - EXPECT_EQ(manager_with_rms->recommended_analog_level(), kMinMicLevelOverride); -} - -// Checks that, when the "WebRTC-Audio-AgcMinMicLevelExperiment" field trial is -// specified with a value lower than the `clipped_level_min`, the behavior of -// the analog gain controller is the same as that obtained when the field trial -// is not specified. -TEST(InputVolumeControllerTest, - AgcMinMicLevelExperimentCompareMicLevelWithClipping) { - // Create and initialize two AGCs by specifying and leaving unspecified the - // relevant field trial. - const auto factory = []() { - // Use a large clipped level step to more quickly decrease the analog gain - // with clipping. - InputVolumeControllerConfig config = kDefaultInputVolumeControllerConfig; - config.enabled = true; - config.startup_min_volume = kInitialInputVolume; - config.clipped_level_step = 64; - config.clipped_ratio_threshold = kClippedRatioThreshold; - config.clipped_wait_frames = kClippedWaitFrames; - auto controller = std::make_unique( - /*num_capture_channels=*/1, config); - controller->Initialize(); - controller->set_stream_analog_level(kInitialInputVolume); - return controller; - }; - std::unique_ptr manager = factory(); - std::unique_ptr manager_with_rms; - { - constexpr int kMinMicLevelOverride = 20; - static_assert(kDefaultInputVolumeControllerConfig.clipped_level_min >= - kMinMicLevelOverride, - "Use a lower override value."); - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrialEnabled(kMinMicLevelOverride)); - manager_with_rms = factory(); - } - - // Create a test input signal which containts 80% of clipped samples. - AudioBuffer audio_buffer(kSampleRateHz, 1, kSampleRateHz, 1, kSampleRateHz, - 1); - WriteAudioBufferSamples(/*samples_value=*/4000.0f, /*clipped_ratio=*/0.8f, - audio_buffer); - - // Simulate 4 seconds of clipping; it is expected to trigger a downward - // adjustment of the analog gain. - CallPreProcessAndProcess(/*num_calls=*/400, audio_buffer, - /*speech_probability=*/absl::nullopt, - /*speech_level=*/absl::nullopt, *manager); - CallPreProcessAndProcess(/*num_calls=*/400, audio_buffer, - /*speech_probability=*/absl::nullopt, - /*speech_level=*/absl::nullopt, *manager_with_rms); - - // Make sure that an adaptation occurred. - ASSERT_GT(manager->recommended_analog_level(), 0); - - // Check that the selected analog gain is the same for both controllers and - // that it equals the minimum level reached when clipping is handled. That is - // expected because the minimum microphone level override is less than the - // minimum level used when clipping is detected. - EXPECT_EQ(manager->recommended_analog_level(), - manager_with_rms->recommended_analog_level()); - EXPECT_EQ(manager_with_rms->recommended_analog_level(), - kDefaultInputVolumeControllerConfig.clipped_level_min); -} - -// Checks that, when the "WebRTC-Audio-AgcMinMicLevelExperiment" field trial is -// specified with a value lower than the `clipped_level_min`, the behavior of -// the analog gain controller is the same as that obtained when the field trial -// is not specified. -// TODO(webrtc:7494): Revisit the test after moving the number of update wait -// frames to APM config. The test passes but internally the gain update timing -// differs. -TEST(InputVolumeControllerTest, - AgcMinMicLevelExperimentCompareMicLevelWithClippingWithRmsError) { - // Create and initialize two AGCs by specifying and leaving unspecified the - // relevant field trial. - const auto factory = []() { - // Use a large clipped level step to more quickly decrease the analog gain - // with clipping. - InputVolumeControllerConfig config = kDefaultInputVolumeControllerConfig; - config.enabled = true; - config.startup_min_volume = kInitialInputVolume; - config.clipped_level_step = 64; - config.clipped_ratio_threshold = kClippedRatioThreshold; - config.clipped_wait_frames = kClippedWaitFrames; - auto controller = std::make_unique( - /*num_capture_channels=*/1, config); - controller->Initialize(); - controller->set_stream_analog_level(kInitialInputVolume); - return controller; - }; - std::unique_ptr manager = factory(); - std::unique_ptr manager_with_rms; - { - constexpr int kMinMicLevelOverride = 20; - static_assert(kDefaultInputVolumeControllerConfig.clipped_level_min >= - kMinMicLevelOverride, - "Use a lower override value."); - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrialEnabled(kMinMicLevelOverride)); - manager_with_rms = factory(); - } - - // Create a test input signal which containts 80% of clipped samples. - AudioBuffer audio_buffer(kSampleRateHz, 1, kSampleRateHz, 1, kSampleRateHz, - 1); - WriteAudioBufferSamples(/*samples_value=*/4000.0f, /*clipped_ratio=*/0.8f, - audio_buffer); - - CallPreProcessAndProcess( - /*num_calls=*/400, audio_buffer, - /*speech_probability=*/0.7f, - /*speech_level=*/-18.0f, *manager); - CallPreProcessAndProcess( - /*num_calls=*/400, audio_buffer, - /*speech_probability=*/0.7f, - /*speech_level=*/-18.0f, *manager_with_rms); - - // Make sure that an adaptation occurred. - ASSERT_GT(manager->recommended_analog_level(), 0); - - // Check that the selected analog gain is the same for both controllers and - // that it equals the minimum level reached when clipping is handled. That is - // expected because the minimum microphone level override is less than the - // minimum level used when clipping is detected. - EXPECT_EQ(manager->recommended_analog_level(), - manager_with_rms->recommended_analog_level()); - EXPECT_EQ(manager_with_rms->recommended_analog_level(), - kDefaultInputVolumeControllerConfig.clipped_level_min); + EXPECT_EQ(volume, 240); } // TODO(bugs.webrtc.org/12774): Test the bahavior of `clipped_level_step`. @@ -1245,220 +879,909 @@ TEST(InputVolumeControllerTest, // TODO(bugs.webrtc.org/12774): Test the bahavior of `clipped_wait_frames`. // Verifies that configurable clipping parameters are initialized as intended. TEST_P(InputVolumeControllerParametrizedTest, ClippingParametersVerified) { - if (RmsErrorHasValue()) { - GTEST_SKIP() << "Skipped. RMS error does not affect the test."; - } - - std::unique_ptr manager = - CreateInputVolumeController(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); - manager->Initialize(); - EXPECT_EQ(manager->clipped_level_step_, kClippedLevelStep); - EXPECT_EQ(manager->clipped_ratio_threshold_, kClippedRatioThreshold); - EXPECT_EQ(manager->clipped_wait_frames_, kClippedWaitFrames); - std::unique_ptr manager_custom = - CreateInputVolumeController(kInitialInputVolume, - /*clipped_level_step=*/10, + std::unique_ptr controller = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames); + controller->Initialize(); + EXPECT_EQ(controller->clipped_level_step_, kClippedLevelStep); + EXPECT_EQ(controller->clipped_ratio_threshold_, kClippedRatioThreshold); + EXPECT_EQ(controller->clipped_wait_frames_, kClippedWaitFrames); + std::unique_ptr controller_custom = + CreateInputVolumeController(/*clipped_level_step=*/10, /*clipped_ratio_threshold=*/0.2f, /*clipped_wait_frames=*/50); - manager_custom->Initialize(); - EXPECT_EQ(manager_custom->clipped_level_step_, 10); - EXPECT_EQ(manager_custom->clipped_ratio_threshold_, 0.2f); - EXPECT_EQ(manager_custom->clipped_wait_frames_, 50); + controller_custom->Initialize(); + EXPECT_EQ(controller_custom->clipped_level_step_, 10); + EXPECT_EQ(controller_custom->clipped_ratio_threshold_, 0.2f); + EXPECT_EQ(controller_custom->clipped_wait_frames_, 50); } TEST_P(InputVolumeControllerParametrizedTest, DisableClippingPredictorDisablesClippingPredictor) { - if (RmsErrorHasValue()) { - GTEST_SKIP() << "Skipped. RMS error does not affect the test."; - } + std::unique_ptr controller = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false); + controller->Initialize(); - std::unique_ptr manager = CreateInputVolumeController( - kInitialInputVolume, kClippedLevelStep, kClippedRatioThreshold, - kClippedWaitFrames, /*enable_clipping_predictor=*/false); - manager->Initialize(); - - EXPECT_FALSE(manager->clipping_predictor_enabled()); - EXPECT_FALSE(manager->use_clipping_predictor_step()); -} - -TEST_P(InputVolumeControllerParametrizedTest, - ClippingPredictorDisabledByDefault) { - if (RmsErrorHasValue()) { - GTEST_SKIP() << "Skipped. RMS error does not affect the test."; - } - - constexpr ClippingPredictorConfig kDefaultConfig; - EXPECT_FALSE(kDefaultConfig.enabled); + EXPECT_FALSE(controller->clipping_predictor_enabled()); + EXPECT_FALSE(controller->use_clipping_predictor_step()); } TEST_P(InputVolumeControllerParametrizedTest, EnableClippingPredictorEnablesClippingPredictor) { - if (RmsErrorHasValue()) { - GTEST_SKIP() << "Skipped. RMS error does not affect the test."; - } + std::unique_ptr controller = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/true); + controller->Initialize(); - std::unique_ptr manager = CreateInputVolumeController( - kInitialInputVolume, kClippedLevelStep, kClippedRatioThreshold, - kClippedWaitFrames, /*enable_clipping_predictor=*/true); - manager->Initialize(); - - EXPECT_TRUE(manager->clipping_predictor_enabled()); - EXPECT_TRUE(manager->use_clipping_predictor_step()); + EXPECT_TRUE(controller->clipping_predictor_enabled()); + EXPECT_TRUE(controller->use_clipping_predictor_step()); } TEST_P(InputVolumeControllerParametrizedTest, DisableClippingPredictorDoesNotLowerVolume) { - AudioBuffer audio_buffer(kSampleRateHz, kNumChannels, kSampleRateHz, - kNumChannels, kSampleRateHz, kNumChannels); - + int volume = 255; InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); config.enable_clipping_predictor = false; - InputVolumeController manager(/*num_capture_channels=*/1, config); - manager.Initialize(); - manager.set_stream_analog_level(/*level=*/255); - EXPECT_FALSE(manager.clipping_predictor_enabled()); - EXPECT_FALSE(manager.use_clipping_predictor_step()); - EXPECT_EQ(manager.recommended_analog_level(), 255); - manager.Process(GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); - CallPreProcessAudioBuffer(/*num_calls=*/10, /*peak_ratio=*/0.99f, manager); - EXPECT_EQ(manager.recommended_analog_level(), 255); - CallPreProcessAudioBuffer(/*num_calls=*/300, /*peak_ratio=*/0.99f, manager); - EXPECT_EQ(manager.recommended_analog_level(), 255); - CallPreProcessAudioBuffer(/*num_calls=*/10, /*peak_ratio=*/0.99f, manager); - EXPECT_EQ(manager.recommended_analog_level(), 255); + auto helper = InputVolumeControllerTestHelper(config); + helper.controller.Initialize(); + + EXPECT_FALSE(helper.controller.clipping_predictor_enabled()); + EXPECT_FALSE(helper.controller.use_clipping_predictor_step()); + + // Expect no change if clipping prediction is enabled. + for (int j = 0; j < 31; ++j) { + WriteAlternatingAudioBufferSamples(0.99f * kMaxSample, helper.audio_buffer); + volume = + *helper.CallAgcSequence(volume, kLowSpeechProbability, kSpeechLevel, + /*num_calls=*/5); + + WriteAudioBufferSamples(0.99f * kMaxSample, /*clipped_ratio=*/0.0f, + helper.audio_buffer); + volume = + *helper.CallAgcSequence(volume, kLowSpeechProbability, kSpeechLevel, + /*num_calls=*/5); + + EXPECT_EQ(volume, 255); + } } +// TODO(bugs.webrtc.org/7494): Split into several smaller tests. TEST_P(InputVolumeControllerParametrizedTest, UsedClippingPredictionsProduceLowerAnalogLevels) { - AudioBuffer audio_buffer(kSampleRateHz, kNumChannels, kSampleRateHz, - kNumChannels, kSampleRateHz, kNumChannels); - - InputVolumeControllerConfig config_with_prediction = - GetInputVolumeControllerTestConfig(); - config_with_prediction.enable_clipping_predictor = true; - - InputVolumeControllerConfig config_without_prediction = - GetInputVolumeControllerTestConfig(); - - config_without_prediction.enable_clipping_predictor = false; - InputVolumeController manager_without_prediction(/*num_capture_channels=*/1, - config_without_prediction); - InputVolumeController manager_with_prediction(/*num_capture_channels=*/1, - config_with_prediction); - - manager_with_prediction.Initialize(); - manager_without_prediction.Initialize(); - constexpr int kInitialLevel = 255; - constexpr float kClippingPeakRatio = 1.0f; constexpr float kCloseToClippingPeakRatio = 0.99f; - constexpr float kZeroPeakRatio = 0.0f; - manager_with_prediction.set_stream_analog_level(kInitialLevel); - manager_without_prediction.set_stream_analog_level(kInitialLevel); + int volume_1 = kInitialLevel; + int volume_2 = kInitialLevel; - manager_with_prediction.Process(GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); - manager_without_prediction.Process(GetValueOrEmpty(kHighSpeechProbability), - GetValueOrEmpty(kSpeechLevel)); + // Create two helpers, one with clipping prediction and one without. + auto config_1 = GetInputVolumeControllerTestConfig(); + auto config_2 = GetInputVolumeControllerTestConfig(); + config_1.enable_clipping_predictor = true; + config_2.enable_clipping_predictor = false; + auto helper_1 = InputVolumeControllerTestHelper(config_1); + auto helper_2 = InputVolumeControllerTestHelper(config_2); + helper_1.controller.Initialize(); + helper_2.controller.Initialize(); - EXPECT_TRUE(manager_with_prediction.clipping_predictor_enabled()); - EXPECT_FALSE(manager_without_prediction.clipping_predictor_enabled()); - EXPECT_TRUE(manager_with_prediction.use_clipping_predictor_step()); - EXPECT_EQ(manager_with_prediction.recommended_analog_level(), kInitialLevel); - EXPECT_EQ(manager_without_prediction.recommended_analog_level(), - kInitialLevel); + EXPECT_TRUE(helper_1.controller.clipping_predictor_enabled()); + EXPECT_FALSE(helper_2.controller.clipping_predictor_enabled()); + EXPECT_TRUE(helper_1.controller.use_clipping_predictor_step()); - // Expect a change in the analog level when the prediction step is used. - CallPreProcessAudioBuffer(/*num_calls=*/10, kCloseToClippingPeakRatio, - manager_with_prediction); - CallPreProcessAudioBuffer(/*num_calls=*/10, kCloseToClippingPeakRatio, - manager_without_prediction); - EXPECT_EQ(manager_with_prediction.recommended_analog_level(), - kInitialLevel - kClippedLevelStep); - EXPECT_EQ(manager_without_prediction.recommended_analog_level(), - kInitialLevel); + // Expect a change if clipping prediction is enabled. + WriteAlternatingAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + WriteAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + /*clipped_ratio=*/0.0f, helper_1.audio_buffer); + WriteAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + /*clipped_ratio=*/0.0f, helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + EXPECT_EQ(volume_1, kInitialLevel - kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel); // Expect no change during waiting. - CallPreProcessAudioBuffer(kClippedWaitFrames, kCloseToClippingPeakRatio, - manager_with_prediction); - CallPreProcessAudioBuffer(kClippedWaitFrames, kCloseToClippingPeakRatio, - manager_without_prediction); - EXPECT_EQ(manager_with_prediction.recommended_analog_level(), - kInitialLevel - kClippedLevelStep); - EXPECT_EQ(manager_without_prediction.recommended_analog_level(), - kInitialLevel); + for (int i = 0; i < kClippedWaitFrames / 10; ++i) { + WriteAlternatingAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + WriteAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + /*clipped_ratio=*/0.0f, helper_1.audio_buffer); + WriteAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + /*clipped_ratio=*/0.0f, helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + EXPECT_EQ(volume_1, kInitialLevel - kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel); + } // Expect a change when the prediction step is used. - CallPreProcessAudioBuffer(/*num_calls=*/10, kCloseToClippingPeakRatio, - manager_with_prediction); - CallPreProcessAudioBuffer(/*num_calls=*/10, kCloseToClippingPeakRatio, - manager_without_prediction); - EXPECT_EQ(manager_with_prediction.recommended_analog_level(), - kInitialLevel - 2 * kClippedLevelStep); - EXPECT_EQ(manager_without_prediction.recommended_analog_level(), - kInitialLevel); + WriteAlternatingAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + WriteAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + /*clipped_ratio=*/0.0f, helper_1.audio_buffer); + WriteAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + /*clipped_ratio=*/0.0f, helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + EXPECT_EQ(volume_1, kInitialLevel - 2 * kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel); // Expect no change when clipping is not detected or predicted. - CallPreProcessAudioBuffer(2 * kClippedWaitFrames, kZeroPeakRatio, - manager_with_prediction); - CallPreProcessAudioBuffer(2 * kClippedWaitFrames, kZeroPeakRatio, - manager_without_prediction); - EXPECT_EQ(manager_with_prediction.recommended_analog_level(), - kInitialLevel - 2 * kClippedLevelStep); - EXPECT_EQ(manager_without_prediction.recommended_analog_level(), - kInitialLevel); + for (int i = 0; i < 2 * kClippedWaitFrames / 10; ++i) { + WriteAlternatingAudioBufferSamples(/*samples_value=*/0.0f, + helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(/*samples_value=*/0.0f, + helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + WriteAudioBufferSamples(/*samples_value=*/0.0f, /*clipped_ratio=*/0.0f, + helper_1.audio_buffer); + WriteAudioBufferSamples(/*samples_value=*/0.0f, /*clipped_ratio=*/0.0f, + helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + } + + EXPECT_EQ(volume_1, kInitialLevel - 2 * kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel); // Expect a change for clipping frames. - CallPreProcessAudioBuffer(/*num_calls=*/1, kClippingPeakRatio, - manager_with_prediction); - CallPreProcessAudioBuffer(/*num_calls=*/1, kClippingPeakRatio, - manager_without_prediction); - EXPECT_EQ(manager_with_prediction.recommended_analog_level(), - kInitialLevel - 3 * kClippedLevelStep); - EXPECT_EQ(manager_without_prediction.recommended_analog_level(), - kInitialLevel - kClippedLevelStep); + WriteAlternatingAudioBufferSamples(kMaxSample, helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kMaxSample, helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 1); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 1); + + EXPECT_EQ(volume_1, kInitialLevel - 3 * kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel - kClippedLevelStep); // Expect no change during waiting. - CallPreProcessAudioBuffer(kClippedWaitFrames, kClippingPeakRatio, - manager_with_prediction); - CallPreProcessAudioBuffer(kClippedWaitFrames, kClippingPeakRatio, - manager_without_prediction); - EXPECT_EQ(manager_with_prediction.recommended_analog_level(), - kInitialLevel - 3 * kClippedLevelStep); - EXPECT_EQ(manager_without_prediction.recommended_analog_level(), - kInitialLevel - kClippedLevelStep); + for (int i = 0; i < kClippedWaitFrames / 10; ++i) { + WriteAlternatingAudioBufferSamples(kMaxSample, helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kMaxSample, helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + WriteAudioBufferSamples(kMaxSample, /*clipped_ratio=*/1.0f, + helper_1.audio_buffer); + WriteAudioBufferSamples(kMaxSample, /*clipped_ratio=*/1.0f, + helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + } + + EXPECT_EQ(volume_1, kInitialLevel - 3 * kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel - kClippedLevelStep); // Expect a change for clipping frames. - CallPreProcessAudioBuffer(/*num_calls=*/1, kClippingPeakRatio, - manager_with_prediction); - CallPreProcessAudioBuffer(/*num_calls=*/1, kClippingPeakRatio, - manager_without_prediction); - EXPECT_EQ(manager_with_prediction.recommended_analog_level(), - kInitialLevel - 4 * kClippedLevelStep); - EXPECT_EQ(manager_without_prediction.recommended_analog_level(), - kInitialLevel - 2 * kClippedLevelStep); + WriteAlternatingAudioBufferSamples(kMaxSample, helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kMaxSample, helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 1); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 1); + + EXPECT_EQ(volume_1, kInitialLevel - 4 * kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel - 2 * kClippedLevelStep); } // Checks that passing an empty speech level has no effect on the input volume. TEST_P(InputVolumeControllerParametrizedTest, EmptyRmsErrorHasNoEffect) { - InputVolumeController manager(kNumChannels, - GetInputVolumeControllerTestConfig()); - manager.Initialize(); - - constexpr int kInputVolume = kInitialInputVolume; - manager.set_stream_analog_level(kInputVolume); + InputVolumeController controller(kNumChannels, + GetInputVolumeControllerTestConfig()); + controller.Initialize(); // Feed speech with low energy that would trigger an upward adapation of - // the analog level if an speech level and RMS values were not empty. + // the analog level if an speech level was not low and the RMS level empty. constexpr int kNumFrames = 125; constexpr int kGainDb = -20; SpeechSamplesReader reader; - reader.Feed(kNumFrames, kGainDb, absl::nullopt, absl::nullopt, manager); + int volume = reader.Feed(kNumFrames, kInitialInputVolume, kGainDb, + kLowSpeechProbability, absl::nullopt, controller); // Check that no adaptation occurs. - ASSERT_EQ(manager.recommended_analog_level(), kInputVolume); + ASSERT_EQ(volume, kInitialInputVolume); +} + +// Checks that the recommended input volume is not updated unless enough +// frames have been processed after the previous update. +TEST(InputVolumeControllerTest, UpdateInputVolumeWaitFramesIsEffective) { + constexpr int kInputVolume = kInitialInputVolume; + std::unique_ptr controller_wait_0 = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false, + /*update_input_volume_wait_frames=*/0); + std::unique_ptr controller_wait_100 = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false, + /*update_input_volume_wait_frames=*/100); + controller_wait_0->Initialize(); + controller_wait_100->Initialize(); + + SpeechSamplesReader reader_1; + SpeechSamplesReader reader_2; + int volume_wait_0 = reader_1.Feed( + /*num_frames=*/99, kInputVolume, /*gain_db=*/0, kHighSpeechProbability, + /*speech_level_dbfs=*/-42.0f, *controller_wait_0); + int volume_wait_100 = reader_2.Feed( + /*num_frames=*/99, kInputVolume, /*gain_db=*/0, kHighSpeechProbability, + /*speech_level_dbfs=*/-42.0f, *controller_wait_100); + + // Check that adaptation only occurs if enough frames have been processed. + ASSERT_GT(volume_wait_0, kInputVolume); + ASSERT_EQ(volume_wait_100, kInputVolume); + + volume_wait_0 = + reader_1.Feed(/*num_frames=*/1, volume_wait_0, + /*gain_db=*/0, kHighSpeechProbability, + /*speech_level_dbfs=*/-42.0f, *controller_wait_0); + volume_wait_100 = + reader_2.Feed(/*num_frames=*/1, volume_wait_100, + /*gain_db=*/0, kHighSpeechProbability, + /*speech_level_dbfs=*/-42.0f, *controller_wait_100); + + // Check that adaptation only occurs when enough frames have been processed. + ASSERT_GT(volume_wait_0, kInputVolume); + ASSERT_GT(volume_wait_100, kInputVolume); +} + +INSTANTIATE_TEST_SUITE_P(, + InputVolumeControllerParametrizedTest, + ::testing::Values(12, 20)); + +TEST(InputVolumeControllerTest, + MinInputVolumeEnforcedWithClippingWhenAboveClippedLevelMin) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = 80, .clipped_level_min = 70}); + + // Trigger a downward adjustment caused by clipping input. Use a low speech + // probability to limit the volume changes to clipping handling. + WriteAudioBufferSamples(/*samples_value=*/4000.0f, /*clipped_ratio=*/0.8f, + helper.audio_buffer); + constexpr int kNumCalls = 800; + helper.CallAgcSequence(/*applied_input_volume=*/100, kLowSpeechProbability, + /*speech_level_dbfs=*/-18.0f, kNumCalls); + + EXPECT_EQ(helper.controller.recommended_input_volume(), 80); +} + +TEST(InputVolumeControllerTest, + ClippedlevelMinEnforcedWithClippingWhenAboveMinInputVolume) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = 70, .clipped_level_min = 80}); + + // Trigger a downward adjustment caused by clipping input. Use a low speech + // probability to limit the volume changes to clipping handling. + WriteAudioBufferSamples(/*samples_value=*/4000.0f, /*clipped_ratio=*/0.8f, + helper.audio_buffer); + constexpr int kNumCalls = 800; + helper.CallAgcSequence(/*applied_input_volume=*/100, kLowSpeechProbability, + /*speech_level_dbfs=*/-18.0f, kNumCalls); + + EXPECT_EQ(helper.controller.recommended_input_volume(), 80); +} + +TEST(InputVolumeControllerTest, SpeechRatioThresholdIsEffective) { + constexpr int kInputVolume = kInitialInputVolume; + // Create two input volume controllers with 10 frames between volume updates + // and the minimum speech ratio of 0.8 and speech probability threshold 0.5. + std::unique_ptr controller_1 = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false, + /*update_input_volume_wait_frames=*/10); + std::unique_ptr controller_2 = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false, + /*update_input_volume_wait_frames=*/10); + controller_1->Initialize(); + controller_2->Initialize(); + + SpeechSamplesReader reader_1; + SpeechSamplesReader reader_2; + + int volume_1 = reader_1.Feed(/*num_frames=*/1, kInputVolume, /*gain_db=*/0, + /*speech_probability=*/0.7f, + /*speech_level_dbfs=*/-42.0f, *controller_1); + int volume_2 = reader_2.Feed(/*num_frames=*/1, kInputVolume, /*gain_db=*/0, + /*speech_probability=*/0.4f, + /*speech_level_dbfs=*/-42.0f, *controller_2); + + ASSERT_EQ(volume_1, kInputVolume); + ASSERT_EQ(volume_2, kInputVolume); + + volume_1 = reader_1.Feed(/*num_frames=*/2, volume_1, /*gain_db=*/0, + /*speech_probability=*/0.4f, + /*speech_level_dbfs=*/-42.0f, *controller_1); + volume_2 = reader_2.Feed(/*num_frames=*/2, volume_2, /*gain_db=*/0, + /*speech_probability=*/0.4f, + /*speech_level_dbfs=*/-42.0f, *controller_2); + + ASSERT_EQ(volume_1, kInputVolume); + ASSERT_EQ(volume_2, kInputVolume); + + volume_1 = reader_1.Feed( + /*num_frames=*/7, volume_1, /*gain_db=*/0, + /*speech_probability=*/0.7f, /*speech_level_dbfs=*/-42.0f, *controller_1); + volume_2 = reader_2.Feed( + /*num_frames=*/7, volume_2, /*gain_db=*/0, + /*speech_probability=*/0.7f, /*speech_level_dbfs=*/-42.0f, *controller_2); + + ASSERT_GT(volume_1, kInputVolume); + ASSERT_EQ(volume_2, kInputVolume); +} + +TEST(InputVolumeControllerTest, SpeechProbabilityThresholdIsEffective) { + constexpr int kInputVolume = kInitialInputVolume; + // Create two input volume controllers with the exact same settings and + // 10 frames between volume updates. + std::unique_ptr controller_1 = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false, + /*update_input_volume_wait_frames=*/10); + std::unique_ptr controller_2 = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false, + /*update_input_volume_wait_frames=*/10); + controller_1->Initialize(); + controller_2->Initialize(); + + SpeechSamplesReader reader_1; + SpeechSamplesReader reader_2; + + // Process with two sets of inputs: Use `reader_1` to process inputs + // that make the volume to be adjusted after enough frames have been + // processsed and `reader_2` to process inputs that won't make the volume + // to be adjusted. + int volume_1 = reader_1.Feed(/*num_frames=*/1, kInputVolume, /*gain_db=*/0, + /*speech_probability=*/0.5f, + /*speech_level_dbfs=*/-42.0f, *controller_1); + int volume_2 = reader_2.Feed(/*num_frames=*/1, kInputVolume, /*gain_db=*/0, + /*speech_probability=*/0.49f, + /*speech_level_dbfs=*/-42.0f, *controller_2); + + ASSERT_EQ(volume_1, kInputVolume); + ASSERT_EQ(volume_2, kInputVolume); + + reader_1.Feed(/*num_frames=*/2, volume_1, /*gain_db=*/0, + /*speech_probability=*/0.49f, /*speech_level_dbfs=*/-42.0f, + *controller_1); + reader_2.Feed(/*num_frames=*/2, volume_2, /*gain_db=*/0, + /*speech_probability=*/0.49f, /*speech_level_dbfs=*/-42.0f, + *controller_2); + + ASSERT_EQ(volume_1, kInputVolume); + ASSERT_EQ(volume_2, kInputVolume); + + volume_1 = reader_1.Feed( + /*num_frames=*/7, volume_1, /*gain_db=*/0, + /*speech_probability=*/0.5f, /*speech_level_dbfs=*/-42.0f, *controller_1); + volume_2 = reader_2.Feed( + /*num_frames=*/7, volume_2, /*gain_db=*/0, + /*speech_probability=*/0.5f, /*speech_level_dbfs=*/-42.0f, *controller_2); + + ASSERT_GT(volume_1, kInputVolume); + ASSERT_EQ(volume_2, kInputVolume); +} + +TEST(InputVolumeControllerTest, + DoNotLogRecommendedInputVolumeOnChangeToMatchTarget) { + metrics::Reset(); + + SpeechSamplesReader reader; + auto controller = CreateInputVolumeController(); + controller->Initialize(); + // Trigger a downward volume change by inputting audio that clips. Pass a + // speech level that falls in the target range to make sure that the + // adaptation is not made to match the target range. + constexpr int kStartupVolume = 255; + const int volume = reader.Feed(/*num_frames=*/14, kStartupVolume, + /*gain_db=*/50, kHighSpeechProbability, + /*speech_level_dbfs=*/-20.0f, *controller); + ASSERT_LT(volume, kStartupVolume); + EXPECT_METRIC_THAT( + metrics::Samples( + "WebRTC.Audio.Apm.RecommendedInputVolume.OnChangeToMatchTarget"), + ::testing::IsEmpty()); +} + +TEST(InputVolumeControllerTest, + LogRecommendedInputVolumeOnUpwardChangeToMatchTarget) { + metrics::Reset(); + + SpeechSamplesReader reader; + auto controller = CreateInputVolumeController(); + controller->Initialize(); + constexpr int kStartupVolume = 100; + // Trigger an upward volume change by inputting audio that does not clip and + // by passing a speech level below the target range. + const int volume = reader.Feed(/*num_frames=*/14, kStartupVolume, + /*gain_db=*/-6, kHighSpeechProbability, + /*speech_level_dbfs=*/-50.0f, *controller); + ASSERT_GT(volume, kStartupVolume); + EXPECT_METRIC_THAT( + metrics::Samples( + "WebRTC.Audio.Apm.RecommendedInputVolume.OnChangeToMatchTarget"), + ::testing::Not(::testing::IsEmpty())); +} + +TEST(InputVolumeControllerTest, + LogRecommendedInputVolumeOnDownwardChangeToMatchTarget) { + metrics::Reset(); + + SpeechSamplesReader reader; + auto controller = CreateInputVolumeController(); + controller->Initialize(); + constexpr int kStartupVolume = 100; + // Trigger a downward volume change by inputting audio that does not clip and + // by passing a speech level above the target range. + const int volume = reader.Feed(/*num_frames=*/14, kStartupVolume, + /*gain_db=*/-6, kHighSpeechProbability, + /*speech_level_dbfs=*/-5.0f, *controller); + ASSERT_LT(volume, kStartupVolume); + EXPECT_METRIC_THAT( + metrics::Samples( + "WebRTC.Audio.Apm.RecommendedInputVolume.OnChangeToMatchTarget"), + ::testing::Not(::testing::IsEmpty())); +} + +TEST(MonoInputVolumeControllerTest, CheckHandleClippingLowersVolume) { + constexpr int kInitialInputVolume = 100; + constexpr int kInputVolumeStep = 29; + MonoInputVolumeController mono_controller( + /*clipped_level_min=*/70, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/3, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller.Initialize(); + + UpdateRecommendedInputVolume(mono_controller, kInitialInputVolume, + kLowSpeechProbability, + /*rms_error_dbfs*/ -10.0f); + + mono_controller.HandleClipping(kInputVolumeStep); + + EXPECT_EQ(mono_controller.recommended_analog_level(), + kInitialInputVolume - kInputVolumeStep); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessNegativeRmsErrorDecreasesInputVolume) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/3, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller.Initialize(); + + int volume = UpdateRecommendedInputVolume( + mono_controller, kInitialInputVolume, kHighSpeechProbability, -10.0f); + volume = UpdateRecommendedInputVolume(mono_controller, volume, + kHighSpeechProbability, -10.0f); + volume = UpdateRecommendedInputVolume(mono_controller, volume, + kHighSpeechProbability, -10.0f); + + EXPECT_LT(volume, kInitialInputVolume); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessPositiveRmsErrorIncreasesInputVolume) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/3, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller.Initialize(); + + int volume = UpdateRecommendedInputVolume( + mono_controller, kInitialInputVolume, kHighSpeechProbability, 10.0f); + volume = UpdateRecommendedInputVolume(mono_controller, volume, + kHighSpeechProbability, 10.0f); + volume = UpdateRecommendedInputVolume(mono_controller, volume, + kHighSpeechProbability, 10.0f); + + EXPECT_GT(volume, kInitialInputVolume); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessNegativeRmsErrorDecreasesInputVolumeWithLimit) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_3( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, + /*speech_probability_threshold=*/0.7, + /*speech_ratio_threshold=*/0.8); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + mono_controller_3.Initialize(); + + // Process RMS errors in the range + // [`-kMaxResidualGainChange`, `kMaxResidualGainChange`]. + int volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, kInitialInputVolume, kHighSpeechProbability, -14.0f); + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kHighSpeechProbability, -14.0f); + // Process RMS errors outside the range + // [`-kMaxResidualGainChange`, `kMaxResidualGainChange`]. + int volume_2 = UpdateRecommendedInputVolume( + mono_controller_2, kInitialInputVolume, kHighSpeechProbability, -15.0f); + int volume_3 = UpdateRecommendedInputVolume( + mono_controller_3, kInitialInputVolume, kHighSpeechProbability, -30.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -15.0f); + volume_3 = UpdateRecommendedInputVolume(mono_controller_3, volume_3, + kHighSpeechProbability, -30.0f); + + EXPECT_LT(volume_1, kInitialInputVolume); + EXPECT_LT(volume_2, volume_1); + EXPECT_EQ(volume_2, volume_3); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessPositiveRmsErrorIncreasesInputVolumeWithLimit) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_3( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + mono_controller_3.Initialize(); + + // Process RMS errors in the range + // [`-kMaxResidualGainChange`, `kMaxResidualGainChange`]. + int volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, kInitialInputVolume, kHighSpeechProbability, 14.0f); + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kHighSpeechProbability, 14.0f); + // Process RMS errors outside the range + // [`-kMaxResidualGainChange`, `kMaxResidualGainChange`]. + int volume_2 = UpdateRecommendedInputVolume( + mono_controller_2, kInitialInputVolume, kHighSpeechProbability, 15.0f); + int volume_3 = UpdateRecommendedInputVolume( + mono_controller_3, kInitialInputVolume, kHighSpeechProbability, 30.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, 15.0f); + volume_3 = UpdateRecommendedInputVolume(mono_controller_3, volume_3, + kHighSpeechProbability, 30.0f); + + EXPECT_GT(volume_1, kInitialInputVolume); + EXPECT_GT(volume_2, volume_1); + EXPECT_EQ(volume_2, volume_3); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessRmsErrorDecreasesInputVolumeRepeatedly) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller.Initialize(); + + int volume_before = UpdateRecommendedInputVolume( + mono_controller, kInitialInputVolume, kHighSpeechProbability, -10.0f); + volume_before = UpdateRecommendedInputVolume(mono_controller, volume_before, + kHighSpeechProbability, -10.0f); + + EXPECT_LT(volume_before, kInitialInputVolume); + + int volume_after = UpdateRecommendedInputVolume( + mono_controller, volume_before, kHighSpeechProbability, -10.0f); + volume_after = UpdateRecommendedInputVolume(mono_controller, volume_after, + kHighSpeechProbability, -10.0f); + + EXPECT_LT(volume_after, volume_before); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessPositiveRmsErrorIncreasesInputVolumeRepeatedly) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller.Initialize(); + + int volume_before = UpdateRecommendedInputVolume( + mono_controller, kInitialInputVolume, kHighSpeechProbability, 10.0f); + volume_before = UpdateRecommendedInputVolume(mono_controller, volume_before, + kHighSpeechProbability, 10.0f); + + EXPECT_GT(volume_before, kInitialInputVolume); + + int volume_after = UpdateRecommendedInputVolume( + mono_controller, volume_before, kHighSpeechProbability, 10.0f); + volume_after = UpdateRecommendedInputVolume(mono_controller, volume_after, + kHighSpeechProbability, 10.0f); + + EXPECT_GT(volume_after, volume_before); +} + +TEST(MonoInputVolumeControllerTest, CheckClippedLevelMinIsEffective) { + constexpr int kInitialInputVolume = 100; + constexpr int kClippedLevelMin = 70; + MonoInputVolumeController mono_controller_1( + kClippedLevelMin, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + kClippedLevelMin, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + + // Process one frame to reset the state for `HandleClipping()`. + EXPECT_EQ(UpdateRecommendedInputVolume(mono_controller_1, kInitialInputVolume, + kLowSpeechProbability, -10.0f), + kInitialInputVolume); + EXPECT_EQ(UpdateRecommendedInputVolume(mono_controller_2, kInitialInputVolume, + kLowSpeechProbability, -10.0f), + kInitialInputVolume); + + mono_controller_1.HandleClipping(29); + mono_controller_2.HandleClipping(31); + + EXPECT_EQ(mono_controller_2.recommended_analog_level(), kClippedLevelMin); + EXPECT_LT(mono_controller_2.recommended_analog_level(), + mono_controller_1.recommended_analog_level()); +} + +TEST(MonoInputVolumeControllerTest, CheckMinMicLevelIsEffective) { + constexpr int kInitialInputVolume = 100; + constexpr int kMinMicLevel = 64; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, kMinMicLevel, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, kMinMicLevel, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + + int volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, kInitialInputVolume, kHighSpeechProbability, -10.0f); + int volume_2 = UpdateRecommendedInputVolume( + mono_controller_2, kInitialInputVolume, kHighSpeechProbability, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_EQ(volume_2, kInitialInputVolume); + + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kHighSpeechProbability, -10.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -30.0f); + + EXPECT_LT(volume_1, kInitialInputVolume); + EXPECT_LT(volume_2, volume_1); + EXPECT_EQ(volume_2, kMinMicLevel); +} + +TEST(MonoInputVolumeControllerTest, + CheckUpdateInputVolumeWaitFramesIsEffective) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/1, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/3, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + + int volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, kInitialInputVolume, kHighSpeechProbability, -10.0f); + int volume_2 = UpdateRecommendedInputVolume( + mono_controller_2, kInitialInputVolume, kHighSpeechProbability, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_EQ(volume_2, kInitialInputVolume); + + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kHighSpeechProbability, -10.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -10.0f); + + EXPECT_LT(volume_1, kInitialInputVolume); + EXPECT_EQ(volume_2, kInitialInputVolume); + + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -10.0f); + + EXPECT_LT(volume_2, kInitialInputVolume); +} + +TEST(MonoInputVolumeControllerTest, + CheckSpeechProbabilityThresholdIsEffective) { + constexpr int kInitialInputVolume = 100; + constexpr float kSpeechProbabilityThreshold = 0.8f; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/2, kSpeechProbabilityThreshold, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/2, kSpeechProbabilityThreshold, + kSpeechRatioThreshold); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + + int volume_1 = + UpdateRecommendedInputVolume(mono_controller_1, kInitialInputVolume, + kSpeechProbabilityThreshold, -10.0f); + int volume_2 = + UpdateRecommendedInputVolume(mono_controller_2, kInitialInputVolume, + kSpeechProbabilityThreshold, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_EQ(volume_2, kInitialInputVolume); + + volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, volume_1, kSpeechProbabilityThreshold - 0.1f, -10.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kSpeechProbabilityThreshold, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_LT(volume_2, volume_1); +} + +TEST(MonoInputVolumeControllerTest, CheckSpeechRatioThresholdIsEffective) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/4, kHighSpeechProbability, + /*speech_ratio_threshold=*/0.75f); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/4, kHighSpeechProbability, + /*speech_ratio_threshold=*/0.75f); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + + int volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, kInitialInputVolume, kHighSpeechProbability, -10.0f); + int volume_2 = UpdateRecommendedInputVolume( + mono_controller_2, kInitialInputVolume, kHighSpeechProbability, -10.0f); + + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kHighSpeechProbability, -10.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -10.0f); + + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kLowSpeechProbability, -10.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kLowSpeechProbability, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_EQ(volume_2, kInitialInputVolume); + + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kLowSpeechProbability, -10.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_LT(volume_2, volume_1); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessEmptyRmsErrorDoesNotLowerVolume) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + + int volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, kInitialInputVolume, kHighSpeechProbability, -10.0f); + int volume_2 = UpdateRecommendedInputVolume( + mono_controller_2, kInitialInputVolume, kHighSpeechProbability, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_EQ(volume_2, kInitialInputVolume); + + volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, volume_1, kHighSpeechProbability, absl::nullopt); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_LT(volume_2, volume_1); } } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter.cc b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter.cc index cf6149eb492d..05624b1f9227 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter.cc +++ b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter.cc @@ -50,6 +50,16 @@ constexpr absl::string_view MetricNamePrefix( } } +metrics::Histogram* CreateVolumeHistogram(InputVolumeType input_volume_type) { + char buffer[64]; + rtc::SimpleStringBuilder builder(buffer); + builder << MetricNamePrefix(input_volume_type) << "OnChange"; + return metrics::HistogramFactoryGetCountsLinear(/*name=*/builder.str(), + /*min=*/1, + /*max=*/kMaxInputVolume, + /*bucket_count=*/50); +} + metrics::Histogram* CreateRateHistogram(InputVolumeType input_volume_type, absl::string_view name) { char buffer[64]; @@ -76,7 +86,8 @@ metrics::Histogram* CreateAverageHistogram(InputVolumeType input_volume_type, InputVolumeStatsReporter::InputVolumeStatsReporter(InputVolumeType type) : histograms_( - {.decrease_rate = CreateRateHistogram(type, "DecreaseRate"), + {.on_volume_change = CreateVolumeHistogram(type), + .decrease_rate = CreateRateHistogram(type, "DecreaseRate"), .decrease_average = CreateAverageHistogram(type, "DecreaseAverage"), .increase_rate = CreateRateHistogram(type, "IncreaseRate"), .increase_average = CreateAverageHistogram(type, "IncreaseAverage"), @@ -101,6 +112,9 @@ void InputVolumeStatsReporter::UpdateStatistics(int input_volume) { RTC_DCHECK_LE(input_volume, kMaxInputVolume); if (previous_input_volume_.has_value() && input_volume != previous_input_volume_.value()) { + // Update stats when the input volume changes. + metrics::HistogramAdd(histograms_.on_volume_change, input_volume); + // Update stats that are periodically logged. const int volume_change = input_volume - previous_input_volume_.value(); if (volume_change < 0) { ++volume_update_stats_.num_decreases; @@ -148,4 +162,10 @@ void InputVolumeStatsReporter::LogVolumeUpdateStats() const { } } +void UpdateHistogramOnRecommendedInputVolumeChangeToMatchTarget(int volume) { + RTC_HISTOGRAM_COUNTS_LINEAR( + "WebRTC.Audio.Apm.RecommendedInputVolume.OnChangeToMatchTarget", volume, + 1, kMaxInputVolume, 50); +} + } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter.h b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter.h index 4df5a85a0cc4..31b110031c1f 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter.h +++ b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter.h @@ -65,6 +65,7 @@ class InputVolumeStatsReporter { // Histograms. struct Histograms { + metrics::Histogram* const on_volume_change; metrics::Histogram* const decrease_rate; metrics::Histogram* const decrease_average; metrics::Histogram* const increase_rate; @@ -72,8 +73,9 @@ class InputVolumeStatsReporter { metrics::Histogram* const update_rate; metrics::Histogram* const update_average; bool AllPointersSet() const { - return !!decrease_rate && !!decrease_average && !!increase_rate && - !!increase_average && !!update_rate && !!update_average; + return !!on_volume_change && !!decrease_rate && !!decrease_average && + !!increase_rate && !!increase_average && !!update_rate && + !!update_average; } } histograms_; @@ -83,6 +85,12 @@ class InputVolumeStatsReporter { int log_volume_update_stats_counter_ = 0; absl::optional previous_input_volume_ = absl::nullopt; }; + +// Updates the histogram that keeps track of recommended input volume changes +// required in order to match the target level in the input volume adaptation +// process. +void UpdateHistogramOnRecommendedInputVolumeChangeToMatchTarget(int volume); + } // namespace webrtc #endif // MODULES_AUDIO_PROCESSING_AGC2_INPUT_VOLUME_STATS_REPORTER_H_ diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter_unittest.cc b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter_unittest.cc index a3e2ccaf91c6..e762c1fb59f3 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter_unittest.cc +++ b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter_unittest.cc @@ -31,6 +31,10 @@ class InputVolumeStatsReporterTest protected: InputVolumeType InputVolumeType() const { return GetParam(); } + std::string VolumeLabel() const { + return (rtc::StringBuilder(kLabelPrefix) << VolumeTypeLabel() << "OnChange") + .str(); + } std::string DecreaseRateLabel() const { return (rtc::StringBuilder(kLabelPrefix) << VolumeTypeLabel() << "DecreaseRate") @@ -73,7 +77,13 @@ class InputVolumeStatsReporterTest } }; -TEST_P(InputVolumeStatsReporterTest, CheckLogVolumeUpdateStatsEmpty) { +TEST_P(InputVolumeStatsReporterTest, CheckVolumeOnChangeIsEmpty) { + InputVolumeStatsReporter stats_reporter(InputVolumeType()); + stats_reporter.UpdateStatistics(10); + EXPECT_METRIC_THAT(metrics::Samples(VolumeLabel()), ::testing::ElementsAre()); +} + +TEST_P(InputVolumeStatsReporterTest, CheckRateAverageStatsEmpty) { InputVolumeStatsReporter stats_reporter(InputVolumeType()); constexpr int kInputVolume = 10; stats_reporter.UpdateStatistics(kInputVolume); @@ -96,20 +106,33 @@ TEST_P(InputVolumeStatsReporterTest, CheckLogVolumeUpdateStatsEmpty) { ::testing::ElementsAre()); } -TEST_P(InputVolumeStatsReporterTest, CheckLogVolumeUpdateStatsNotEmpty) { +TEST_P(InputVolumeStatsReporterTest, CheckSamples) { InputVolumeStatsReporter stats_reporter(InputVolumeType()); - constexpr int kInputVolume = 10; - stats_reporter.UpdateStatistics(kInputVolume); + + constexpr int kInputVolume1 = 10; + stats_reporter.UpdateStatistics(kInputVolume1); // Update until periodic logging. + constexpr int kInputVolume2 = 12; for (int i = 0; i < kFramesIn60Seconds; i += 2) { - stats_reporter.UpdateStatistics(kInputVolume + 2); - stats_reporter.UpdateStatistics(kInputVolume); + stats_reporter.UpdateStatistics(kInputVolume2); + stats_reporter.UpdateStatistics(kInputVolume1); } // Update until periodic logging. + constexpr int kInputVolume3 = 13; for (int i = 0; i < kFramesIn60Seconds; i += 2) { - stats_reporter.UpdateStatistics(kInputVolume + 3); - stats_reporter.UpdateStatistics(kInputVolume); + stats_reporter.UpdateStatistics(kInputVolume3); + stats_reporter.UpdateStatistics(kInputVolume1); } + + // Check volume changes stats. + EXPECT_METRIC_THAT( + metrics::Samples(VolumeLabel()), + ::testing::ElementsAre( + ::testing::Pair(kInputVolume1, kFramesIn60Seconds), + ::testing::Pair(kInputVolume2, kFramesIn60Seconds / 2), + ::testing::Pair(kInputVolume3, kFramesIn60Seconds / 2))); + + // Check volume change rate stats. EXPECT_METRIC_THAT( metrics::Samples(UpdateRateLabel()), ::testing::ElementsAre(::testing::Pair(kFramesIn60Seconds - 1, 1), @@ -121,6 +144,8 @@ TEST_P(InputVolumeStatsReporterTest, CheckLogVolumeUpdateStatsNotEmpty) { EXPECT_METRIC_THAT( metrics::Samples(IncreaseRateLabel()), ::testing::ElementsAre(::testing::Pair(kFramesIn60Seconds / 2, 2))); + + // Check volume change average stats. EXPECT_METRIC_THAT( metrics::Samples(UpdateAverageLabel()), ::testing::ElementsAre(::testing::Pair(2, 1), ::testing::Pair(3, 1))); diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/noise_level_estimator.cc b/third_party/libwebrtc/modules/audio_processing/agc2/noise_level_estimator.cc index 9fb1c24b6545..691513b5094a 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/noise_level_estimator.cc +++ b/third_party/libwebrtc/modules/audio_processing/agc2/noise_level_estimator.cc @@ -72,6 +72,7 @@ class NoiseFloorEstimator : public NoiseLevelEstimator { "noise levels."); NoiseFloorEstimator(ApmDataDumper* data_dumper) : data_dumper_(data_dumper) { + RTC_DCHECK(data_dumper_); // Initially assume that 48 kHz will be used. `Analyze()` will detect the // used sample rate and call `Initialize()` again if needed. Initialize(/*sample_rate_hz=*/48000); @@ -91,8 +92,9 @@ class NoiseFloorEstimator : public NoiseLevelEstimator { const float frame_energy = FrameEnergy(frame); if (frame_energy <= min_noise_energy_) { // Ignore frames when muted or below the minimum measurable energy. - data_dumper_->DumpRaw("agc2_noise_floor_estimator_preliminary_level", - noise_energy_); + if (data_dumper_) + data_dumper_->DumpRaw("agc2_noise_floor_estimator_preliminary_level", + noise_energy_); return EnergyToDbfs(noise_energy_, static_cast(frame.samples_per_channel())); } @@ -104,8 +106,9 @@ class NoiseFloorEstimator : public NoiseLevelEstimator { preliminary_noise_energy_ = frame_energy; preliminary_noise_energy_set_ = true; } - data_dumper_->DumpRaw("agc2_noise_floor_estimator_preliminary_level", - preliminary_noise_energy_); + if (data_dumper_) + data_dumper_->DumpRaw("agc2_noise_floor_estimator_preliminary_level", + preliminary_noise_energy_); if (counter_ == 0) { // Full period observed. @@ -128,8 +131,13 @@ class NoiseFloorEstimator : public NoiseLevelEstimator { noise_energy_ = std::min(noise_energy_, preliminary_noise_energy_); counter_--; } - return EnergyToDbfs(noise_energy_, - static_cast(frame.samples_per_channel())); + + float noise_rms_dbfs = EnergyToDbfs( + noise_energy_, static_cast(frame.samples_per_channel())); + if (data_dumper_) + data_dumper_->DumpRaw("agc2_noise_rms_dbfs", noise_rms_dbfs); + + return noise_rms_dbfs; } private: diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/saturation_protector_gn/moz.build similarity index 96% rename from third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gn/moz.build rename to third_party/libwebrtc/modules/audio_processing/agc2/saturation_protector_gn/moz.build index 6431293d8009..0b754d4fbdde 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_processing/agc2/saturation_protector_gn/moz.build @@ -32,8 +32,6 @@ LOCAL_INCLUDES += [ ] UNIFIED_SOURCES += [ - "/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc", - "/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc", "/third_party/libwebrtc/modules/audio_processing/agc2/saturation_protector.cc", "/third_party/libwebrtc/modules/audio_processing/agc2/saturation_protector_buffer.cc" ] @@ -223,4 +221,4 @@ if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True -Library("adaptive_digital_gn") +Library("saturation_protector_gn") diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator.cc b/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator.cc index 8e234f7d7f65..7bf3252116fa 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator.cc +++ b/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator.cc @@ -46,12 +46,15 @@ float SpeechLevelEstimator::LevelEstimatorState::Ratio::GetRatio() const { SpeechLevelEstimator::SpeechLevelEstimator( ApmDataDumper* apm_data_dumper, - const AudioProcessing::Config::GainController2::AdaptiveDigital& config) + const AudioProcessing::Config::GainController2::AdaptiveDigital& config, + int adjacent_speech_frames_threshold) : apm_data_dumper_(apm_data_dumper), initial_speech_level_dbfs_(GetInitialSpeechLevelEstimateDbfs(config)), - adjacent_speech_frames_threshold_( - config.adjacent_speech_frames_threshold), - level_dbfs_(initial_speech_level_dbfs_) { + adjacent_speech_frames_threshold_(adjacent_speech_frames_threshold), + level_dbfs_(initial_speech_level_dbfs_), + // TODO(bugs.webrtc.org/7494): Remove init below when AGC2 input volume + // controller temporal dependency removed. + is_confident_(false) { RTC_DCHECK(apm_data_dumper_); RTC_DCHECK_GE(adjacent_speech_frames_threshold_, 1); Reset(); @@ -110,23 +113,26 @@ void SpeechLevelEstimator::Update(float rms_dbfs, level_dbfs_ = ClampLevelEstimateDbfs(level_dbfs); } } + UpdateIsConfident(); DumpDebugData(); } -bool SpeechLevelEstimator::IsConfident() const { +void SpeechLevelEstimator::UpdateIsConfident() { if (adjacent_speech_frames_threshold_ == 1) { // Ignore `reliable_state_` when a single frame is enough to update the // level estimate (because it is not used). - return preliminary_state_.time_to_confidence_ms == 0; + is_confident_ = preliminary_state_.time_to_confidence_ms == 0; + return; } // Once confident, it remains confident. RTC_DCHECK(reliable_state_.time_to_confidence_ms != 0 || preliminary_state_.time_to_confidence_ms == 0); // During the first long enough speech sequence, `reliable_state_` must be // ignored since `preliminary_state_` is used. - return reliable_state_.time_to_confidence_ms == 0 || - (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_ && - preliminary_state_.time_to_confidence_ms == 0); + is_confident_ = + reliable_state_.time_to_confidence_ms == 0 || + (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_ && + preliminary_state_.time_to_confidence_ms == 0); } void SpeechLevelEstimator::Reset() { @@ -144,6 +150,10 @@ void SpeechLevelEstimator::ResetLevelEstimatorState( } void SpeechLevelEstimator::DumpDebugData() const { + if (!apm_data_dumper_) + return; + apm_data_dumper_->DumpRaw("agc2_speech_level_dbfs", level_dbfs_); + apm_data_dumper_->DumpRaw("agc2_speech_level_is_confident", is_confident_); apm_data_dumper_->DumpRaw( "agc2_adaptive_level_estimator_num_adjacent_speech_frames", num_adjacent_speech_frames_); diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator.h b/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator.h index 25e949119cd5..4d9f106ba9e0 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator.h +++ b/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator.h @@ -28,7 +28,8 @@ class SpeechLevelEstimator { public: SpeechLevelEstimator( ApmDataDumper* apm_data_dumper, - const AudioProcessing::Config::GainController2::AdaptiveDigital& config); + const AudioProcessing::Config::GainController2::AdaptiveDigital& config, + int adjacent_speech_frames_threshold); SpeechLevelEstimator(const SpeechLevelEstimator&) = delete; SpeechLevelEstimator& operator=(const SpeechLevelEstimator&) = delete; @@ -37,7 +38,7 @@ class SpeechLevelEstimator { // Returns the estimated speech plus noise level. float level_dbfs() const { return level_dbfs_; } // Returns true if the estimator is confident on its current estimate. - bool IsConfident() const; + bool is_confident() const { return is_confident_; } void Reset(); @@ -58,6 +59,8 @@ class SpeechLevelEstimator { }; static_assert(std::is_trivially_copyable::value, ""); + void UpdateIsConfident(); + void ResetLevelEstimatorState(LevelEstimatorState& state) const; void DumpDebugData() const; @@ -69,6 +72,7 @@ class SpeechLevelEstimator { LevelEstimatorState preliminary_state_; LevelEstimatorState reliable_state_; float level_dbfs_; + bool is_confident_; int num_adjacent_speech_frames_; }; diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator_unittest.cc b/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator_unittest.cc index 57208de01464..e1c5f85434cb 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator_unittest.cc +++ b/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator_unittest.cc @@ -42,13 +42,6 @@ void RunOnConstantLevel(int num_iterations, } } -constexpr AdaptiveDigitalConfig GetAdaptiveDigitalConfig( - int adjacent_speech_frames_threshold) { - AdaptiveDigitalConfig config; - config.adjacent_speech_frames_threshold = adjacent_speech_frames_threshold; - return config; -} - constexpr float kNoSpeechProbability = 0.0f; constexpr float kLowSpeechProbability = kVadConfidenceThreshold / 2.0f; constexpr float kMaxSpeechProbability = 1.0f; @@ -59,7 +52,8 @@ struct TestLevelEstimator { : data_dumper(0), estimator(std::make_unique( &data_dumper, - GetAdaptiveDigitalConfig(adjacent_speech_frames_threshold))), + AdaptiveDigitalConfig{}, + adjacent_speech_frames_threshold)), initial_speech_level_dbfs(estimator->level_dbfs()), level_rms_dbfs(initial_speech_level_dbfs / 2.0f), level_peak_dbfs(initial_speech_level_dbfs / 3.0f) { @@ -99,7 +93,7 @@ TEST(GainController2SpeechLevelEstimator, IsNotConfident) { level_estimator.level_rms_dbfs, level_estimator.level_peak_dbfs, kMaxSpeechProbability, *level_estimator.estimator); - EXPECT_FALSE(level_estimator.estimator->IsConfident()); + EXPECT_FALSE(level_estimator.estimator->is_confident()); } // Checks that the level controller becomes confident when enough speech frames @@ -110,7 +104,7 @@ TEST(GainController2SpeechLevelEstimator, IsConfident) { level_estimator.level_rms_dbfs, level_estimator.level_peak_dbfs, kMaxSpeechProbability, *level_estimator.estimator); - EXPECT_TRUE(level_estimator.estimator->IsConfident()); + EXPECT_TRUE(level_estimator.estimator->is_confident()); } // Checks that the estimated level is not affected by the level of non-speech @@ -156,7 +150,7 @@ TEST(GainController2SpeechLevelEstimator, ConvergenceSpeedAfterConfidence) { // No estimate change should occur, but confidence is achieved. ASSERT_FLOAT_EQ(level_estimator.estimator->level_dbfs(), level_estimator.initial_speech_level_dbfs); - ASSERT_TRUE(level_estimator.estimator->IsConfident()); + ASSERT_TRUE(level_estimator.estimator->is_confident()); // After confidence. constexpr float kConvergenceTimeAfterConfidenceNumFrames = 600; // 6 seconds. static_assert( diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper.cc b/third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper.cc index 91448f8d861a..af6325dea7ac 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper.cc +++ b/third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper.cc @@ -52,6 +52,13 @@ class MonoVadImpl : public VoiceActivityDetectorWrapper::MonoVad { } // namespace +VoiceActivityDetectorWrapper::VoiceActivityDetectorWrapper( + const AvailableCpuFeatures& cpu_features, + int sample_rate_hz) + : VoiceActivityDetectorWrapper(kVadResetPeriodMs, + cpu_features, + sample_rate_hz) {} + VoiceActivityDetectorWrapper::VoiceActivityDetectorWrapper( int vad_reset_period_ms, const AvailableCpuFeatures& cpu_features, diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper.h b/third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper.h index 6df0ead2716c..459c47163032 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper.h +++ b/third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper.h @@ -40,6 +40,10 @@ class VoiceActivityDetectorWrapper { virtual float Analyze(rtc::ArrayView frame) = 0; }; + // Ctor. Uses `cpu_features` to instantiate the default VAD. + VoiceActivityDetectorWrapper(const AvailableCpuFeatures& cpu_features, + int sample_rate_hz); + // Ctor. `vad_reset_period_ms` indicates the period in milliseconds to call // `MonoVad::Reset()`; it must be equal to or greater than the duration of two // frames. Uses `cpu_features` to instantiate the default VAD. diff --git a/third_party/libwebrtc/modules/audio_processing/audio_buffer.h b/third_party/libwebrtc/modules/audio_processing/audio_buffer.h index d866b8bce552..b9ea3000a2b6 100644 --- a/third_party/libwebrtc/modules/audio_processing/audio_buffer.h +++ b/third_party/libwebrtc/modules/audio_processing/audio_buffer.h @@ -32,7 +32,7 @@ enum Band { kBand0To8kHz = 0, kBand8To16kHz = 1, kBand16To24kHz = 2 }; class AudioBuffer { public: static const int kSplitBandSize = 160; - static const size_t kMaxSampleRate = 384000; + static const int kMaxSampleRate = 384000; AudioBuffer(size_t input_rate, size_t input_num_channels, size_t buffer_rate, diff --git a/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.cc b/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.cc index a0415e2bc39c..b0f5692d52ea 100644 --- a/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.cc +++ b/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.cc @@ -12,6 +12,7 @@ #include #include +#include #include #include #include @@ -30,6 +31,7 @@ #include "modules/audio_processing/logging/apm_data_dumper.h" #include "modules/audio_processing/optionally_built_submodule_creators.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -67,29 +69,6 @@ bool UseSetupSpecificDefaultAec3Congfig() { "WebRTC-Aec3SetupSpecificDefaultConfigDefaultsKillSwitch"); } -// If the "WebRTC-Audio-TransientSuppressorVadMode" field trial is unspecified, -// returns `TransientSuppressor::VadMode::kDefault`, otherwise parses the field -// trial and returns the specified mode: -// - WebRTC-Audio-TransientSuppressorVadMode/Enabled-Default returns `kDefault`; -// - WebRTC-Audio-TransientSuppressorVadMode/Enabled-RnnVad returns `kRnnVad`; -// - WebRTC-Audio-TransientSuppressorVadMode/Enabled-NoVad returns `kNoVad`. -TransientSuppressor::VadMode GetTransientSuppressorVadMode() { - constexpr char kFieldTrial[] = "WebRTC-Audio-TransientSuppressorVadMode"; - std::string full_name = webrtc::field_trial::FindFullName(kFieldTrial); - if (full_name.empty() || absl::EndsWith(full_name, "-Default")) { - return TransientSuppressor::VadMode::kDefault; - } - if (absl::EndsWith(full_name, "-RnnVad")) { - return TransientSuppressor::VadMode::kRnnVad; - } - if (absl::EndsWith(full_name, "-NoVad")) { - return TransientSuppressor::VadMode::kNoVad; - } - // Fallback to default. - RTC_LOG(LS_WARNING) << "Invalid parameter for " << kFieldTrial; - return TransientSuppressor::VadMode::kDefault; -} - // Identify the native processing rate that best handles a sample rate. int SuitableProcessRate(int minimum_rate, int max_splitting_rate, @@ -143,6 +122,199 @@ void PackRenderAudioBufferForEchoDetector(const AudioBuffer& audio, audio.channels_const()[0] + audio.num_frames()); } +// Options for gracefully handling processing errors. +enum class FormatErrorOutputOption { + kOutputExactCopyOfInput, + kOutputBroadcastCopyOfFirstInputChannel, + kOutputSilence, + kDoNothing +}; + +enum class AudioFormatValidity { + // Format is supported by APM. + kValidAndSupported, + // Format has a reasonable interpretation but is not supported. + kValidButUnsupportedSampleRate, + // The remaining enums values signal that the audio does not have a reasonable + // interpretation and cannot be used. + kInvalidSampleRate, + kInvalidChannelCount +}; + +AudioFormatValidity ValidateAudioFormat(const StreamConfig& config) { + if (config.sample_rate_hz() < 0) + return AudioFormatValidity::kInvalidSampleRate; + if (config.num_channels() == 0) + return AudioFormatValidity::kInvalidChannelCount; + + // Format has a reasonable interpretation, but may still be unsupported. + if (config.sample_rate_hz() < 8000 || + config.sample_rate_hz() > AudioBuffer::kMaxSampleRate) + return AudioFormatValidity::kValidButUnsupportedSampleRate; + + // Format is fully supported. + return AudioFormatValidity::kValidAndSupported; +} + +int AudioFormatValidityToErrorCode(AudioFormatValidity validity) { + switch (validity) { + case AudioFormatValidity::kValidAndSupported: + return AudioProcessing::kNoError; + case AudioFormatValidity::kValidButUnsupportedSampleRate: // fall-through + case AudioFormatValidity::kInvalidSampleRate: + return AudioProcessing::kBadSampleRateError; + case AudioFormatValidity::kInvalidChannelCount: + return AudioProcessing::kBadNumberChannelsError; + } + RTC_DCHECK(false); +} + +// Returns an AudioProcessing::Error together with the best possible option for +// output audio content. +std::pair ChooseErrorOutputOption( + const StreamConfig& input_config, + const StreamConfig& output_config) { + AudioFormatValidity input_validity = ValidateAudioFormat(input_config); + AudioFormatValidity output_validity = ValidateAudioFormat(output_config); + + if (input_validity == AudioFormatValidity::kValidAndSupported && + output_validity == AudioFormatValidity::kValidAndSupported && + (output_config.num_channels() == 1 || + output_config.num_channels() == input_config.num_channels())) { + return {AudioProcessing::kNoError, FormatErrorOutputOption::kDoNothing}; + } + + int error_code = AudioFormatValidityToErrorCode(input_validity); + if (error_code == AudioProcessing::kNoError) { + error_code = AudioFormatValidityToErrorCode(output_validity); + } + if (error_code == AudioProcessing::kNoError) { + // The individual formats are valid but there is some error - must be + // channel mismatch. + error_code = AudioProcessing::kBadNumberChannelsError; + } + + FormatErrorOutputOption output_option; + if (output_validity != AudioFormatValidity::kValidAndSupported && + output_validity != AudioFormatValidity::kValidButUnsupportedSampleRate) { + // The output format is uninterpretable: cannot do anything. + output_option = FormatErrorOutputOption::kDoNothing; + } else if (input_validity != AudioFormatValidity::kValidAndSupported && + input_validity != + AudioFormatValidity::kValidButUnsupportedSampleRate) { + // The input format is uninterpretable: cannot use it, must output silence. + output_option = FormatErrorOutputOption::kOutputSilence; + } else if (input_config.sample_rate_hz() != output_config.sample_rate_hz()) { + // Sample rates do not match: Cannot copy input into output, output silence. + // Note: If the sample rates are in a supported range, we could resample. + // However, that would significantly increase complexity of this error + // handling code. + output_option = FormatErrorOutputOption::kOutputSilence; + } else if (input_config.num_channels() != output_config.num_channels()) { + // Channel counts do not match: We cannot easily map input channels to + // output channels. + output_option = + FormatErrorOutputOption::kOutputBroadcastCopyOfFirstInputChannel; + } else { + // The formats match exactly. + RTC_DCHECK(input_config == output_config); + output_option = FormatErrorOutputOption::kOutputExactCopyOfInput; + } + return std::make_pair(error_code, output_option); +} + +// Checks if the audio format is supported. If not, the output is populated in a +// best-effort manner and an APM error code is returned. +int HandleUnsupportedAudioFormats(const int16_t* const src, + const StreamConfig& input_config, + const StreamConfig& output_config, + int16_t* const dest) { + RTC_DCHECK(src); + RTC_DCHECK(dest); + + auto [error_code, output_option] = + ChooseErrorOutputOption(input_config, output_config); + if (error_code == AudioProcessing::kNoError) + return AudioProcessing::kNoError; + + const size_t num_output_channels = output_config.num_channels(); + switch (output_option) { + case FormatErrorOutputOption::kOutputSilence: + memset(dest, 0, output_config.num_samples() * sizeof(int16_t)); + break; + case FormatErrorOutputOption::kOutputBroadcastCopyOfFirstInputChannel: + for (size_t i = 0; i < output_config.num_frames(); ++i) { + int16_t sample = src[input_config.num_channels() * i]; + for (size_t ch = 0; ch < num_output_channels; ++ch) { + dest[ch + num_output_channels * i] = sample; + } + } + break; + case FormatErrorOutputOption::kOutputExactCopyOfInput: + memcpy(dest, src, output_config.num_samples() * sizeof(int16_t)); + break; + case FormatErrorOutputOption::kDoNothing: + break; + } + return error_code; +} + +// Checks if the audio format is supported. If not, the output is populated in a +// best-effort manner and an APM error code is returned. +int HandleUnsupportedAudioFormats(const float* const* src, + const StreamConfig& input_config, + const StreamConfig& output_config, + float* const* dest) { + RTC_DCHECK(src); + RTC_DCHECK(dest); + for (size_t i = 0; i < input_config.num_channels(); ++i) { + RTC_DCHECK(src[i]); + } + for (size_t i = 0; i < output_config.num_channels(); ++i) { + RTC_DCHECK(dest[i]); + } + + auto [error_code, output_option] = + ChooseErrorOutputOption(input_config, output_config); + if (error_code == AudioProcessing::kNoError) + return AudioProcessing::kNoError; + + const size_t num_output_channels = output_config.num_channels(); + switch (output_option) { + case FormatErrorOutputOption::kOutputSilence: + for (size_t ch = 0; ch < num_output_channels; ++ch) { + memset(dest[ch], 0, output_config.num_frames() * sizeof(float)); + } + break; + case FormatErrorOutputOption::kOutputBroadcastCopyOfFirstInputChannel: + for (size_t ch = 0; ch < num_output_channels; ++ch) { + memcpy(dest[ch], src[0], output_config.num_frames() * sizeof(float)); + } + break; + case FormatErrorOutputOption::kOutputExactCopyOfInput: + for (size_t ch = 0; ch < num_output_channels; ++ch) { + memcpy(dest[ch], src[ch], output_config.num_frames() * sizeof(float)); + } + break; + case FormatErrorOutputOption::kDoNothing: + break; + } + return error_code; +} + +using DownmixMethod = AudioProcessing::Config::Pipeline::DownmixMethod; + +void SetDownmixMethod(AudioBuffer& buffer, DownmixMethod method) { + switch (method) { + case DownmixMethod::kAverageChannels: + buffer.set_downmixing_by_averaging(); + break; + case DownmixMethod::kUseFirstChannel: + buffer.set_downmixing_to_specific_channel(/*channel=*/0); + break; + } +} + constexpr int kUnspecifiedDataDumpInputVolume = -100; } // namespace @@ -150,6 +322,230 @@ constexpr int kUnspecifiedDataDumpInputVolume = -100; // Throughout webrtc, it's assumed that success is represented by zero. static_assert(AudioProcessing::kNoError == 0, "kNoError must be zero"); +absl::optional +AudioProcessingImpl::GetGainController2ExperimentParams() { + constexpr char kFieldTrialName[] = "WebRTC-Audio-GainController2"; + + if (!field_trial::IsEnabled(kFieldTrialName)) { + return absl::nullopt; + } + + FieldTrialFlag enabled("Enabled", false); + + // Whether the gain control should switch to AGC2. Enabled by default. + FieldTrialParameter switch_to_agc2("switch_to_agc2", true); + + // AGC2 input volume controller configuration. + constexpr InputVolumeController::Config kDefaultInputVolumeControllerConfig; + FieldTrialConstrained min_input_volume( + "min_input_volume", kDefaultInputVolumeControllerConfig.min_input_volume, + 0, 255); + FieldTrialConstrained clipped_level_min( + "clipped_level_min", + kDefaultInputVolumeControllerConfig.clipped_level_min, 0, 255); + FieldTrialConstrained clipped_level_step( + "clipped_level_step", + kDefaultInputVolumeControllerConfig.clipped_level_step, 0, 255); + FieldTrialConstrained clipped_ratio_threshold( + "clipped_ratio_threshold", + kDefaultInputVolumeControllerConfig.clipped_ratio_threshold, 0, 1); + FieldTrialConstrained clipped_wait_frames( + "clipped_wait_frames", + kDefaultInputVolumeControllerConfig.clipped_wait_frames, 0, + absl::nullopt); + FieldTrialParameter enable_clipping_predictor( + "enable_clipping_predictor", + kDefaultInputVolumeControllerConfig.enable_clipping_predictor); + FieldTrialConstrained target_range_max_dbfs( + "target_range_max_dbfs", + kDefaultInputVolumeControllerConfig.target_range_max_dbfs, -90, 30); + FieldTrialConstrained target_range_min_dbfs( + "target_range_min_dbfs", + kDefaultInputVolumeControllerConfig.target_range_min_dbfs, -90, 30); + FieldTrialConstrained update_input_volume_wait_frames( + "update_input_volume_wait_frames", + kDefaultInputVolumeControllerConfig.update_input_volume_wait_frames, 0, + absl::nullopt); + FieldTrialConstrained speech_probability_threshold( + "speech_probability_threshold", + kDefaultInputVolumeControllerConfig.speech_probability_threshold, 0, 1); + FieldTrialConstrained speech_ratio_threshold( + "speech_ratio_threshold", + kDefaultInputVolumeControllerConfig.speech_ratio_threshold, 0, 1); + + // AGC2 adaptive digital controller configuration. + constexpr AudioProcessing::Config::GainController2::AdaptiveDigital + kDefaultAdaptiveDigitalConfig; + FieldTrialConstrained headroom_db( + "headroom_db", kDefaultAdaptiveDigitalConfig.headroom_db, 0, + absl::nullopt); + FieldTrialConstrained max_gain_db( + "max_gain_db", kDefaultAdaptiveDigitalConfig.max_gain_db, 0, + absl::nullopt); + FieldTrialConstrained initial_gain_db( + "initial_gain_db", kDefaultAdaptiveDigitalConfig.initial_gain_db, 0, + absl::nullopt); + FieldTrialConstrained max_gain_change_db_per_second( + "max_gain_change_db_per_second", + kDefaultAdaptiveDigitalConfig.max_gain_change_db_per_second, 0, + absl::nullopt); + FieldTrialConstrained max_output_noise_level_dbfs( + "max_output_noise_level_dbfs", + kDefaultAdaptiveDigitalConfig.max_output_noise_level_dbfs, absl::nullopt, + 0); + + // Transient suppressor. + FieldTrialParameter disallow_transient_suppressor_usage( + "disallow_transient_suppressor_usage", false); + + // Field-trial based override for the input volume controller and adaptive + // digital configs. + ParseFieldTrial( + {&enabled, &switch_to_agc2, &min_input_volume, &clipped_level_min, + &clipped_level_step, &clipped_ratio_threshold, &clipped_wait_frames, + &enable_clipping_predictor, &target_range_max_dbfs, + &target_range_min_dbfs, &update_input_volume_wait_frames, + &speech_probability_threshold, &speech_ratio_threshold, &headroom_db, + &max_gain_db, &initial_gain_db, &max_gain_change_db_per_second, + &max_output_noise_level_dbfs, &disallow_transient_suppressor_usage}, + field_trial::FindFullName(kFieldTrialName)); + // Checked already by `IsEnabled()` before parsing, therefore always true. + RTC_DCHECK(enabled); + + const bool do_not_change_agc_config = !switch_to_agc2.Get(); + if (do_not_change_agc_config && !disallow_transient_suppressor_usage.Get()) { + // Return an unspecifed value since, in this case, both the AGC2 and TS + // configurations won't be adjusted. + return absl::nullopt; + } + using Params = AudioProcessingImpl::GainController2ExperimentParams; + if (do_not_change_agc_config) { + // Return a value that leaves the AGC2 config unchanged and that always + // disables TS. + return Params{.agc2_config = absl::nullopt, + .disallow_transient_suppressor_usage = true}; + } + // Return a value that switches all the gain control to AGC2. + return Params{ + .agc2_config = + Params::Agc2Config{ + .input_volume_controller = + { + .min_input_volume = min_input_volume.Get(), + .clipped_level_min = clipped_level_min.Get(), + .clipped_level_step = clipped_level_step.Get(), + .clipped_ratio_threshold = + static_cast(clipped_ratio_threshold.Get()), + .clipped_wait_frames = clipped_wait_frames.Get(), + .enable_clipping_predictor = + enable_clipping_predictor.Get(), + .target_range_max_dbfs = target_range_max_dbfs.Get(), + .target_range_min_dbfs = target_range_min_dbfs.Get(), + .update_input_volume_wait_frames = + update_input_volume_wait_frames.Get(), + .speech_probability_threshold = static_cast( + speech_probability_threshold.Get()), + .speech_ratio_threshold = + static_cast(speech_ratio_threshold.Get()), + }, + .adaptive_digital_controller = + { + .enabled = false, + .headroom_db = static_cast(headroom_db.Get()), + .max_gain_db = static_cast(max_gain_db.Get()), + .initial_gain_db = + static_cast(initial_gain_db.Get()), + .max_gain_change_db_per_second = static_cast( + max_gain_change_db_per_second.Get()), + .max_output_noise_level_dbfs = + static_cast(max_output_noise_level_dbfs.Get()), + }}, + .disallow_transient_suppressor_usage = + disallow_transient_suppressor_usage.Get()}; +} + +AudioProcessing::Config AudioProcessingImpl::AdjustConfig( + const AudioProcessing::Config& config, + const absl::optional& + experiment_params) { + if (!experiment_params.has_value() || + (!experiment_params->agc2_config.has_value() && + !experiment_params->disallow_transient_suppressor_usage)) { + // When the experiment parameters are unspecified or when the AGC and TS + // configuration are not overridden, return the unmodified configuration. + return config; + } + + AudioProcessing::Config adjusted_config = config; + + // Override the transient suppressor configuration. + if (experiment_params->disallow_transient_suppressor_usage) { + adjusted_config.transient_suppression.enabled = false; + } + + // Override the auto gain control configuration if the AGC1 analog gain + // controller is active and `experiment_params->agc2_config` is specified. + const bool agc1_analog_enabled = + config.gain_controller1.enabled && + (config.gain_controller1.mode == + AudioProcessing::Config::GainController1::kAdaptiveAnalog || + config.gain_controller1.analog_gain_controller.enabled); + if (agc1_analog_enabled && experiment_params->agc2_config.has_value()) { + // Check that the unadjusted AGC config meets the preconditions. + const bool hybrid_agc_config_detected = + config.gain_controller1.enabled && + config.gain_controller1.analog_gain_controller.enabled && + !config.gain_controller1.analog_gain_controller + .enable_digital_adaptive && + config.gain_controller2.enabled && + config.gain_controller2.adaptive_digital.enabled; + const bool full_agc1_config_detected = + config.gain_controller1.enabled && + config.gain_controller1.analog_gain_controller.enabled && + config.gain_controller1.analog_gain_controller + .enable_digital_adaptive && + !config.gain_controller2.enabled; + const bool one_and_only_one_input_volume_controller = + hybrid_agc_config_detected != full_agc1_config_detected; + if (!one_and_only_one_input_volume_controller || + config.gain_controller2.input_volume_controller.enabled) { + RTC_LOG(LS_ERROR) << "Cannot adjust AGC config (precondition failed)"; + if (!one_and_only_one_input_volume_controller) + RTC_LOG(LS_ERROR) + << "One and only one input volume controller must be enabled."; + if (config.gain_controller2.input_volume_controller.enabled) + RTC_LOG(LS_ERROR) + << "The AGC2 input volume controller must be disabled."; + } else { + adjusted_config.gain_controller1.enabled = false; + adjusted_config.gain_controller1.analog_gain_controller.enabled = false; + + adjusted_config.gain_controller2.enabled = true; + adjusted_config.gain_controller2.input_volume_controller.enabled = true; + adjusted_config.gain_controller2.adaptive_digital = + experiment_params->agc2_config->adaptive_digital_controller; + adjusted_config.gain_controller2.adaptive_digital.enabled = true; + } + } + + return adjusted_config; +} + +TransientSuppressor::VadMode AudioProcessingImpl::GetTransientSuppressorVadMode( + const absl::optional& + params) { + if (params.has_value() && params->agc2_config.has_value() && + !params->disallow_transient_suppressor_usage) { + // When the experiment is active, the gain control switches to AGC2 and TS + // can be active, use the RNN VAD to control TS. This choice will also + // disable the internal RNN VAD in AGC2. + return TransientSuppressor::VadMode::kRnnVad; + } + // If TS is disabled, the returned value does not matter. If enabled, use the + // default VAD. + return TransientSuppressor::VadMode::kDefault; +} + AudioProcessingImpl::SubmoduleStates::SubmoduleStates( bool capture_post_processor_enabled, bool render_pre_processor_enabled, @@ -267,15 +663,17 @@ AudioProcessingImpl::AudioProcessingImpl( : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), use_setup_specific_default_aec3_config_( UseSetupSpecificDefaultAec3Congfig()), + gain_controller2_experiment_params_(GetGainController2ExperimentParams()), use_denormal_disabler_( !field_trial::IsEnabled("WebRTC-ApmDenormalDisablerKillSwitch")), - transient_suppressor_vad_mode_(GetTransientSuppressorVadMode()), + transient_suppressor_vad_mode_( + GetTransientSuppressorVadMode(gain_controller2_experiment_params_)), capture_runtime_settings_(RuntimeSettingQueueSize()), render_runtime_settings_(RuntimeSettingQueueSize()), capture_runtime_settings_enqueuer_(&capture_runtime_settings_), render_runtime_settings_enqueuer_(&render_runtime_settings_), echo_control_factory_(std::move(echo_control_factory)), - config_(config), + config_(AdjustConfig(config, gain_controller2_experiment_params_)), submodule_states_(!!capture_post_processor, !!render_pre_processor, !!capture_analyzer), @@ -305,9 +703,11 @@ AudioProcessingImpl::AudioProcessingImpl( << !!submodules_.capture_post_processor << "\nRender pre processor: " << !!submodules_.render_pre_processor; - RTC_LOG(LS_INFO) << "Denormal disabler: " - << (DenormalDisabler::IsSupported() ? "supported" - : "unsupported"); + if (!DenormalDisabler::IsSupported()) { + RTC_LOG(LS_INFO) << "Denormal disabler unsupported"; + } + + RTC_LOG(LS_INFO) << "AudioProcessing: " << config_.ToString(); // Mark Echo Controller enabled if a factory is injected. capture_nonlocked_.echo_controller_enabled = @@ -330,18 +730,23 @@ int AudioProcessingImpl::Initialize(const ProcessingConfig& processing_config) { // Run in a single-threaded manner during initialization. MutexLock lock_render(&mutex_render_); MutexLock lock_capture(&mutex_capture_); - return InitializeLocked(processing_config); + InitializeLocked(processing_config); + return kNoError; } -int AudioProcessingImpl::MaybeInitializeRender( - const ProcessingConfig& processing_config) { - // Called from both threads. Thread check is therefore not possible. +void AudioProcessingImpl::MaybeInitializeRender( + const StreamConfig& input_config, + const StreamConfig& output_config) { + ProcessingConfig processing_config = formats_.api_format; + processing_config.reverse_input_stream() = input_config; + processing_config.reverse_output_stream() = output_config; + if (processing_config == formats_.api_format) { - return kNoError; + return; } MutexLock lock_capture(&mutex_capture_); - return InitializeLocked(processing_config); + InitializeLocked(processing_config); } void AudioProcessingImpl::InitializeLocked() { @@ -381,6 +786,8 @@ void AudioProcessingImpl::InitializeLocked() { formats_.api_format.output_stream().num_channels(), formats_.api_format.output_stream().sample_rate_hz(), formats_.api_format.output_stream().num_channels())); + SetDownmixMethod(*capture_.capture_audio, + config_.pipeline.capture_downmix_method); if (capture_nonlocked_.capture_processing_format.sample_rate_hz() < formats_.api_format.output_stream().sample_rate_hz() && @@ -392,6 +799,8 @@ void AudioProcessingImpl::InitializeLocked() { formats_.api_format.output_stream().num_channels(), formats_.api_format.output_stream().sample_rate_hz(), formats_.api_format.output_stream().num_channels())); + SetDownmixMethod(*capture_.capture_fullband_audio, + config_.pipeline.capture_downmix_method); } else { capture_.capture_fullband_audio.reset(); } @@ -416,25 +825,9 @@ void AudioProcessingImpl::InitializeLocked() { } } -int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) { +void AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) { UpdateActiveSubmoduleStates(); - for (const auto& stream : config.streams) { - if (stream.num_channels() > 0 && stream.sample_rate_hz() <= 0) { - return kBadSampleRateError; - } - } - - const size_t num_in_channels = config.input_stream().num_channels(); - const size_t num_out_channels = config.output_stream().num_channels(); - - // Need at least one input channel. - // Need either one output channel or as many outputs as there are inputs. - if (num_in_channels == 0 || - !(num_out_channels == 1 || num_out_channels == num_in_channels)) { - return kBadNumberChannelsError; - } - formats_.api_format = config; // Choose maximum rate to use for the split filtering. @@ -508,50 +901,59 @@ int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) { } InitializeLocked(); - return kNoError; } void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { - RTC_LOG(LS_INFO) << "AudioProcessing::ApplyConfig: " << config.ToString(); - // Run in a single-threaded manner when applying the settings. MutexLock lock_render(&mutex_render_); MutexLock lock_capture(&mutex_capture_); + const auto adjusted_config = + AdjustConfig(config, gain_controller2_experiment_params_); + RTC_LOG(LS_INFO) << "AudioProcessing::ApplyConfig: " + << adjusted_config.ToString(); + const bool pipeline_config_changed = config_.pipeline.multi_channel_render != - config.pipeline.multi_channel_render || + adjusted_config.pipeline.multi_channel_render || config_.pipeline.multi_channel_capture != - config.pipeline.multi_channel_capture || + adjusted_config.pipeline.multi_channel_capture || config_.pipeline.maximum_internal_processing_rate != - config.pipeline.maximum_internal_processing_rate; + adjusted_config.pipeline.maximum_internal_processing_rate || + config_.pipeline.capture_downmix_method != + adjusted_config.pipeline.capture_downmix_method; const bool aec_config_changed = - config_.echo_canceller.enabled != config.echo_canceller.enabled || - config_.echo_canceller.mobile_mode != config.echo_canceller.mobile_mode; + config_.echo_canceller.enabled != + adjusted_config.echo_canceller.enabled || + config_.echo_canceller.mobile_mode != + adjusted_config.echo_canceller.mobile_mode; const bool agc1_config_changed = - config_.gain_controller1 != config.gain_controller1; + config_.gain_controller1 != adjusted_config.gain_controller1; const bool agc2_config_changed = - config_.gain_controller2 != config.gain_controller2; + config_.gain_controller2 != adjusted_config.gain_controller2; const bool ns_config_changed = - config_.noise_suppression.enabled != config.noise_suppression.enabled || - config_.noise_suppression.level != config.noise_suppression.level; + config_.noise_suppression.enabled != + adjusted_config.noise_suppression.enabled || + config_.noise_suppression.level != + adjusted_config.noise_suppression.level; const bool ts_config_changed = config_.transient_suppression.enabled != - config.transient_suppression.enabled; + adjusted_config.transient_suppression.enabled; const bool pre_amplifier_config_changed = - config_.pre_amplifier.enabled != config.pre_amplifier.enabled || + config_.pre_amplifier.enabled != adjusted_config.pre_amplifier.enabled || config_.pre_amplifier.fixed_gain_factor != - config.pre_amplifier.fixed_gain_factor; + adjusted_config.pre_amplifier.fixed_gain_factor; const bool gain_adjustment_config_changed = - config_.capture_level_adjustment != config.capture_level_adjustment; + config_.capture_level_adjustment != + adjusted_config.capture_level_adjustment; - config_ = config; + config_ = adjusted_config; if (aec_config_changed) { InitializeEchoController(); @@ -661,6 +1063,10 @@ void AudioProcessingImpl::HandleCaptureOutputUsedSetting( submodules_.noise_suppressor->SetCaptureOutputUsage( capture_.capture_output_used); } + if (submodules_.gain_controller2) { + submodules_.gain_controller2->SetCaptureOutputUsed( + capture_.capture_output_used); + } } void AudioProcessingImpl::SetRuntimeSetting(RuntimeSetting setting) { @@ -711,13 +1117,12 @@ bool AudioProcessingImpl::RuntimeSettingEnqueuer::Enqueue( const bool successful_insert = runtime_settings_.Insert(&setting); if (!successful_insert) { - RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.ApmRuntimeSettingCannotEnqueue", 1); RTC_LOG(LS_ERROR) << "Cannot enqueue a new runtime setting."; } return successful_insert; } -int AudioProcessingImpl::MaybeInitializeCapture( +void AudioProcessingImpl::MaybeInitializeCapture( const StreamConfig& input_config, const StreamConfig& output_config) { ProcessingConfig processing_config; @@ -746,9 +1151,8 @@ int AudioProcessingImpl::MaybeInitializeCapture( processing_config = formats_.api_format; processing_config.input_stream() = input_config; processing_config.output_stream() = output_config; - RETURN_ON_ERR(InitializeLocked(processing_config)); + InitializeLocked(processing_config); } - return kNoError; } int AudioProcessingImpl::ProcessStream(const float* const* src, @@ -756,14 +1160,12 @@ int AudioProcessingImpl::ProcessStream(const float* const* src, const StreamConfig& output_config, float* const* dest) { TRACE_EVENT0("webrtc", "AudioProcessing::ProcessStream_StreamConfig"); - if (!src || !dest) { - return kNullPointerError; - } - - RETURN_ON_ERR(MaybeInitializeCapture(input_config, output_config)); + DenormalDisabler denormal_disabler(use_denormal_disabler_); + RETURN_ON_ERR( + HandleUnsupportedAudioFormats(src, input_config, output_config, dest)); + MaybeInitializeCapture(input_config, output_config); MutexLock lock_capture(&mutex_capture_); - DenormalDisabler denormal_disabler(use_denormal_disabler_); if (aec_dump_) { RecordUnprocessedCaptureStream(src); @@ -836,7 +1238,9 @@ void AudioProcessingImpl::HandleCaptureRuntimeSettings() { // TODO(bugs.chromium.org/9138): Log setting handling by Aec Dump. break; case RuntimeSetting::Type::kCaptureCompressionGain: { - if (!submodules_.agc_manager) { + if (!submodules_.agc_manager && + !(submodules_.gain_controller2 && + config_.gain_controller2.input_volume_controller.enabled)) { float value; setting.GetFloat(&value); int int_value = static_cast(value + .5f); @@ -1055,7 +1459,10 @@ int AudioProcessingImpl::ProcessStream(const int16_t* const src, const StreamConfig& output_config, int16_t* const dest) { TRACE_EVENT0("webrtc", "AudioProcessing::ProcessStream_AudioFrame"); - RETURN_ON_ERR(MaybeInitializeCapture(input_config, output_config)); + + RETURN_ON_ERR( + HandleUnsupportedAudioFormats(src, input_config, output_config, dest)); + MaybeInitializeCapture(input_config, output_config); MutexLock lock_capture(&mutex_capture_); DenormalDisabler denormal_disabler(use_denormal_disabler_); @@ -1169,6 +1576,16 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { submodules_.agc_manager->AnalyzePreProcess(*capture_buffer); } + if (submodules_.gain_controller2 && + config_.gain_controller2.input_volume_controller.enabled) { + // Expect the volume to be available if the input controller is enabled. + RTC_DCHECK(capture_.applied_input_volume.has_value()); + if (capture_.applied_input_volume.has_value()) { + submodules_.gain_controller2->Analyze(*capture_.applied_input_volume, + *capture_buffer); + } + } + if (submodule_states_.CaptureMultiBandSubModulesActive() && SampleRateSupportsMultiBand( capture_nonlocked_.capture_processing_format.sample_rate_hz())) { @@ -1322,6 +1739,8 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { } if (submodules_.gain_controller2) { + // TODO(bugs.webrtc.org/7494): Let AGC2 detect applied input volume + // changes. submodules_.gain_controller2->Process( voice_probability, capture_.applied_input_volume_changed, capture_buffer); @@ -1412,6 +1831,15 @@ int AudioProcessingImpl::AnalyzeReverseStream( const StreamConfig& reverse_config) { TRACE_EVENT0("webrtc", "AudioProcessing::AnalyzeReverseStream_StreamConfig"); MutexLock lock(&mutex_render_); + DenormalDisabler denormal_disabler(use_denormal_disabler_); + RTC_DCHECK(data); + for (size_t i = 0; i < reverse_config.num_channels(); ++i) { + RTC_DCHECK(data[i]); + } + RETURN_ON_ERR( + AudioFormatValidityToErrorCode(ValidateAudioFormat(reverse_config))); + + MaybeInitializeRender(reverse_config, reverse_config); return AnalyzeReverseStreamLocked(data, reverse_config, reverse_config); } @@ -1422,8 +1850,13 @@ int AudioProcessingImpl::ProcessReverseStream(const float* const* src, TRACE_EVENT0("webrtc", "AudioProcessing::ProcessReverseStream_StreamConfig"); MutexLock lock(&mutex_render_); DenormalDisabler denormal_disabler(use_denormal_disabler_); + RETURN_ON_ERR( + HandleUnsupportedAudioFormats(src, input_config, output_config, dest)); + + MaybeInitializeRender(input_config, output_config); RETURN_ON_ERR(AnalyzeReverseStreamLocked(src, input_config, output_config)); + if (submodule_states_.RenderMultiBandProcessingActive() || submodule_states_.RenderFullBandProcessingActive()) { render_.render_audio->CopyTo(formats_.api_format.reverse_output_stream(), @@ -1444,24 +1877,6 @@ int AudioProcessingImpl::AnalyzeReverseStreamLocked( const float* const* src, const StreamConfig& input_config, const StreamConfig& output_config) { - if (src == nullptr) { - return kNullPointerError; - } - - if (input_config.num_channels() == 0) { - return kBadNumberChannelsError; - } - - ProcessingConfig processing_config = formats_.api_format; - processing_config.reverse_input_stream() = input_config; - processing_config.reverse_output_stream() = output_config; - - RETURN_ON_ERR(MaybeInitializeRender(processing_config)); - RTC_DCHECK_EQ(input_config.num_frames(), - formats_.api_format.reverse_input_stream().num_frames()); - - DenormalDisabler denormal_disabler(use_denormal_disabler_); - if (aec_dump_) { const size_t channel_size = formats_.api_format.reverse_input_stream().num_frames(); @@ -1481,28 +1896,12 @@ int AudioProcessingImpl::ProcessReverseStream(const int16_t* const src, int16_t* const dest) { TRACE_EVENT0("webrtc", "AudioProcessing::ProcessReverseStream_AudioFrame"); - if (input_config.num_channels() <= 0) { - return AudioProcessing::Error::kBadNumberChannelsError; - } - MutexLock lock(&mutex_render_); DenormalDisabler denormal_disabler(use_denormal_disabler_); - ProcessingConfig processing_config = formats_.api_format; - processing_config.reverse_input_stream().set_sample_rate_hz( - input_config.sample_rate_hz()); - processing_config.reverse_input_stream().set_num_channels( - input_config.num_channels()); - processing_config.reverse_output_stream().set_sample_rate_hz( - output_config.sample_rate_hz()); - processing_config.reverse_output_stream().set_num_channels( - output_config.num_channels()); - - RETURN_ON_ERR(MaybeInitializeRender(processing_config)); - if (input_config.num_frames() != - formats_.api_format.reverse_input_stream().num_frames()) { - return kBadDataLengthError; - } + RETURN_ON_ERR( + HandleUnsupportedAudioFormats(src, input_config, output_config, dest)); + MaybeInitializeRender(input_config, output_config); if (aec_dump_) { aec_dump_->WriteRenderStreamMessage(src, input_config.num_frames(), @@ -1610,12 +2009,6 @@ void AudioProcessingImpl::set_stream_key_pressed(bool key_pressed) { } void AudioProcessingImpl::set_stream_analog_level(int level) { - // Check that input volume emulation is disabled since, when enabled, there is - // no externally applied input volume to notify to APM. - RTC_DCHECK( - !submodules_.capture_levels_adjuster || - !config_.capture_level_adjustment.analog_mic_gain_emulation.enabled); - MutexLock lock_capture(&mutex_capture_); set_stream_analog_level_locked(level); } @@ -1677,6 +2070,13 @@ void AudioProcessingImpl::UpdateRecommendedInputVolumeLocked() { return; } + if (submodules_.gain_controller2 && + config_.gain_controller2.input_volume_controller.enabled) { + capture_.recommended_input_volume = + submodules_.gain_controller2->recommended_input_volume(); + return; + } + capture_.recommended_input_volume = capture_.applied_input_volume; } @@ -1875,6 +2275,16 @@ void AudioProcessingImpl::InitializeEchoController() { } void AudioProcessingImpl::InitializeGainController1() { + if (config_.gain_controller2.enabled && + config_.gain_controller2.input_volume_controller.enabled && + config_.gain_controller1.enabled && + (config_.gain_controller1.mode == + AudioProcessing::Config::GainController1::kAdaptiveAnalog || + config_.gain_controller1.analog_gain_controller.enabled)) { + RTC_LOG(LS_ERROR) << "APM configuration not valid: " + << "Multiple input volume controllers enabled."; + } + if (!config_.gain_controller1.enabled) { submodules_.agc_manager.reset(); submodules_.gain_control.reset(); @@ -1945,9 +2355,19 @@ void AudioProcessingImpl::InitializeGainController2(bool config_has_changed) { if (!submodules_.gain_controller2 || config_has_changed) { const bool use_internal_vad = transient_suppressor_vad_mode_ != TransientSuppressor::VadMode::kRnnVad; + const bool input_volume_controller_config_overridden = + gain_controller2_experiment_params_.has_value() && + gain_controller2_experiment_params_->agc2_config.has_value(); + const InputVolumeController::Config input_volume_controller_config = + input_volume_controller_config_overridden + ? gain_controller2_experiment_params_->agc2_config + ->input_volume_controller + : InputVolumeController::Config{}; submodules_.gain_controller2 = std::make_unique( - config_.gain_controller2, proc_fullband_sample_rate_hz(), - num_input_channels(), use_internal_vad); + config_.gain_controller2, input_volume_controller_config, + proc_fullband_sample_rate_hz(), num_proc_channels(), use_internal_vad); + submodules_.gain_controller2->SetCaptureOutputUsed( + capture_.capture_output_used); } } @@ -1959,7 +2379,8 @@ void AudioProcessingImpl::InitializeVoiceActivityDetector( const bool use_vad = transient_suppressor_vad_mode_ == TransientSuppressor::VadMode::kRnnVad && config_.gain_controller2.enabled && - config_.gain_controller2.adaptive_digital.enabled; + (config_.gain_controller2.adaptive_digital.enabled || + config_.gain_controller2.input_volume_controller.enabled); if (!use_vad) { submodules_.voice_activity_detector.reset(); return; @@ -1969,7 +2390,6 @@ void AudioProcessingImpl::InitializeVoiceActivityDetector( // TODO(bugs.webrtc.org/13663): Cache CPU features in APM and use here. submodules_.voice_activity_detector = std::make_unique( - config_.gain_controller2.adaptive_digital.vad_reset_period_ms, submodules_.gain_controller2->GetCpuFeatures(), proc_fullband_sample_rate_hz()); } diff --git a/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.h b/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.h index 5daea9088af2..8ee07edbe2a9 100644 --- a/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.h +++ b/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.h @@ -160,6 +160,9 @@ class AudioProcessingImpl : public AudioProcessing { ReinitializeTransientSuppressor); FRIEND_TEST_ALL_PREFIXES(ApmWithSubmodulesExcludedTest, BitexactWithDisabledModules); + FRIEND_TEST_ALL_PREFIXES( + AudioProcessingImplGainController2FieldTrialParametrizedTest, + ConfigAdjustedWhenExperimentEnabled); void set_stream_analog_level_locked(int level) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); @@ -188,6 +191,45 @@ class AudioProcessingImpl : public AudioProcessing { static std::atomic instance_count_; const bool use_setup_specific_default_aec3_config_; + // Parameters for the "GainController2" experiment which determines whether + // the following APM sub-modules are created and, if so, their configurations: + // AGC2 (`gain_controller2`), AGC1 (`gain_control`, `agc_manager`) and TS + // (`transient_suppressor`). + // TODO(bugs.webrtc.org/7494): Remove when the "WebRTC-Audio-GainController2" + // field trial is removed. + struct GainController2ExperimentParams { + struct Agc2Config { + InputVolumeController::Config input_volume_controller; + AudioProcessing::Config::GainController2::AdaptiveDigital + adaptive_digital_controller; + }; + // When `agc2_config` is specified, all gain control switches to AGC2 and + // the configuration is overridden. + absl::optional agc2_config; + // When true, the transient suppressor submodule is never created regardless + // of the APM configuration. + bool disallow_transient_suppressor_usage; + }; + // Specified when the "WebRTC-Audio-GainController2" field trial is specified. + // TODO(bugs.webrtc.org/7494): Remove when the "WebRTC-Audio-GainController2" + // field trial is removed. + const absl::optional + gain_controller2_experiment_params_; + + // Parses the "WebRTC-Audio-GainController2" field trial. If disabled, returns + // an unspecified value. + static absl::optional + GetGainController2ExperimentParams(); + + // When `experiment_params` is specified, returns an APM configuration + // modified according to the experiment parameters. Otherwise returns + // `config`. + static AudioProcessing::Config AdjustConfig( + const AudioProcessing::Config& config, + const absl::optional& experiment_params); + static TransientSuppressor::VadMode GetTransientSuppressorVadMode( + const absl::optional& experiment_params); + const bool use_denormal_disabler_; const TransientSuppressor::VadMode transient_suppressor_vad_mode_; @@ -248,12 +290,13 @@ class AudioProcessingImpl : public AudioProcessing { // capture thread blocks the render thread. // Called by render: Holds the render lock when reading the format struct and // acquires both locks if reinitialization is required. - int MaybeInitializeRender(const ProcessingConfig& processing_config) + void MaybeInitializeRender(const StreamConfig& input_config, + const StreamConfig& output_config) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_); - // Called by capture: Holds the capture lock when reading the format struct - // and acquires both locks if reinitialization is needed. - int MaybeInitializeCapture(const StreamConfig& input_config, - const StreamConfig& output_config); + // Called by capture: Acquires and releases the capture lock to read the + // format struct and acquires both locks if reinitialization is needed. + void MaybeInitializeCapture(const StreamConfig& input_config, + const StreamConfig& output_config); // Method for updating the state keeping track of the active submodules. // Returns a bool indicating whether the state has changed. @@ -262,7 +305,7 @@ class AudioProcessingImpl : public AudioProcessing { // Methods requiring APM running in a single-threaded manner, requiring both // the render and capture lock to be acquired. - int InitializeLocked(const ProcessingConfig& config) + void InitializeLocked(const ProcessingConfig& config) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_, mutex_capture_); void InitializeResidualEchoDetector() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_, mutex_capture_); @@ -321,7 +364,6 @@ class AudioProcessingImpl : public AudioProcessing { // Render-side exclusive methods possibly running APM in a multi-threaded // manner that are called with the render lock already acquired. - // TODO(ekm): Remove once all clients updated to new interface. int AnalyzeReverseStreamLocked(const float* const* src, const StreamConfig& input_config, const StreamConfig& output_config) diff --git a/third_party/libwebrtc/modules/audio_processing/audio_processing_impl_unittest.cc b/third_party/libwebrtc/modules/audio_processing/audio_processing_impl_unittest.cc index 729da345bc76..e48a5d88837f 100644 --- a/third_party/libwebrtc/modules/audio_processing/audio_processing_impl_unittest.cc +++ b/third_party/libwebrtc/modules/audio_processing/audio_processing_impl_unittest.cc @@ -10,6 +10,7 @@ #include "modules/audio_processing/audio_processing_impl.h" +#include #include #include #include @@ -131,24 +132,6 @@ class TestRenderPreProcessor : public CustomProcessing { static constexpr float ProcessSample(float x) { return 2.f * x; } }; -// Creates a simple `AudioProcessing` instance for APM input volume testing -// with analog and digital AGC enabled. -rtc::scoped_refptr CreateApmForInputVolumeTest() { - webrtc::AudioProcessing::Config config; - // Enable AGC1 analog. - config.gain_controller1.enabled = true; - config.gain_controller1.analog_gain_controller.enabled = true; - // Enable AGC2 adaptive digital. - config.gain_controller1.analog_gain_controller.enable_digital_adaptive = - false; - config.gain_controller2.enabled = true; - config.gain_controller2.adaptive_digital.enabled = true; - - auto apm(AudioProcessingBuilder().Create()); - apm->ApplyConfig(config); - return apm; -} - // Runs `apm` input processing for volume adjustments for `num_frames` random // frames starting from the volume `initial_volume`. This includes three steps: // 1) Set the input volume 2) Process the stream 3) Set the new recommended @@ -175,75 +158,6 @@ int ProcessInputVolume(AudioProcessing& apm, return recommended_input_volume; } -constexpr char kMinMicLevelFieldTrial[] = - "WebRTC-Audio-2ndAgcMinMicLevelExperiment"; -constexpr int kMinInputVolume = 12; - -std::string GetMinMicLevelExperimentFieldTrial(absl::optional value) { - char field_trial_buffer[64]; - rtc::SimpleStringBuilder builder(field_trial_buffer); - if (value.has_value()) { - RTC_DCHECK_GE(*value, 0); - RTC_DCHECK_LE(*value, 255); - builder << kMinMicLevelFieldTrial << "/Enabled-" << *value << "/"; - } else { - builder << kMinMicLevelFieldTrial << "/Disabled/"; - } - return builder.str(); -} - -// TODO(webrtc:7494): Remove the fieldtrial from the input volume tests when -// "WebRTC-Audio-2ndAgcMinMicLevelExperiment" is removed. -class InputVolumeStartupParameterizedTest - : public ::testing::TestWithParam>> { - protected: - InputVolumeStartupParameterizedTest() - : field_trials_( - GetMinMicLevelExperimentFieldTrial(std::get<1>(GetParam()))) {} - int GetStartupVolume() const { return std::get<0>(GetParam()); } - int GetMinVolume() const { - return std::get<1>(GetParam()).value_or(kMinInputVolume); - } - - private: - test::ScopedFieldTrials field_trials_; -}; - -class InputVolumeNotZeroParameterizedTest - : public ::testing::TestWithParam< - std::tuple>> { - protected: - InputVolumeNotZeroParameterizedTest() - : field_trials_( - GetMinMicLevelExperimentFieldTrial(std::get<2>(GetParam()))) {} - int GetStartupVolume() const { return std::get<0>(GetParam()); } - int GetVolume() const { return std::get<1>(GetParam()); } - int GetMinVolume() const { - return std::get<2>(GetParam()).value_or(kMinInputVolume); - } - bool GetMinMicLevelExperimentEnabled() { - return std::get<2>(GetParam()).has_value(); - } - - private: - test::ScopedFieldTrials field_trials_; -}; - -class InputVolumeZeroParameterizedTest - : public ::testing::TestWithParam>> { - protected: - InputVolumeZeroParameterizedTest() - : field_trials_( - GetMinMicLevelExperimentFieldTrial(std::get<1>(GetParam()))) {} - int GetStartupVolume() const { return std::get<0>(GetParam()); } - int GetMinVolume() const { - return std::get<1>(GetParam()).value_or(kMinInputVolume); - } - - private: - test::ScopedFieldTrials field_trials_; -}; - } // namespace TEST(AudioProcessingImplTest, AudioParameterChangeTriggersInit) { @@ -271,11 +185,9 @@ TEST(AudioProcessingImplTest, AudioParameterChangeTriggersInit) { EXPECT_NOERR(mock.ProcessStream(frame.data(), config, config, frame.data())); // New number of channels. - // TODO(peah): Investigate why this causes 2 inits. config = StreamConfig(32000, 2); EXPECT_CALL(mock, InitializeLocked).Times(2); EXPECT_NOERR(mock.ProcessStream(frame.data(), config, config, frame.data())); - // ProcessStream sets num_channels_ == num_output_channels. EXPECT_NOERR( mock.ProcessReverseStream(frame.data(), config, config, frame.data())); @@ -614,15 +526,16 @@ TEST(AudioProcessingImplTest, TEST(AudioProcessingImplTest, ProcessWithAgc2AndTransientSuppressorVadModeDefault) { webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-TransientSuppressorVadMode/Enabled-Default/"); - rtc::scoped_refptr apm = AudioProcessingBuilder().Create(); + "WebRTC-Audio-GainController2/Disabled/"); + auto apm = AudioProcessingBuilder() + .SetConfig({.gain_controller1{.enabled = false}}) + .Create(); ASSERT_EQ(apm->Initialize(), AudioProcessing::kNoError); webrtc::AudioProcessing::Config apm_config; - // Disable AGC1 analog. apm_config.gain_controller1.enabled = false; - // Enable AGC2 digital. apm_config.gain_controller2.enabled = true; apm_config.gain_controller2.adaptive_digital.enabled = true; + apm_config.transient_suppression.enabled = true; apm->ApplyConfig(apm_config); constexpr int kSampleRateHz = 48000; constexpr int kNumChannels = 1; @@ -646,15 +559,14 @@ TEST(AudioProcessingImplTest, TEST(AudioProcessingImplTest, ProcessWithAgc2AndTransientSuppressorVadModeRnnVad) { webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-TransientSuppressorVadMode/Enabled-RnnVad/"); + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true/"); rtc::scoped_refptr apm = AudioProcessingBuilder().Create(); ASSERT_EQ(apm->Initialize(), AudioProcessing::kNoError); webrtc::AudioProcessing::Config apm_config; - // Disable AGC1 analog. apm_config.gain_controller1.enabled = false; - // Enable AGC2 digital. apm_config.gain_controller2.enabled = true; apm_config.gain_controller2.adaptive_digital.enabled = true; + apm_config.transient_suppression.enabled = true; apm->ApplyConfig(apm_config); constexpr int kSampleRateHz = 48000; constexpr int kNumChannels = 1; @@ -930,111 +842,650 @@ TEST(ApmWithSubmodulesExcludedTest, ToggleTransientSuppressor) { } } -// Tests that the minimum startup volume is applied at the startup. -TEST_P(InputVolumeStartupParameterizedTest, - VerifyStartupMinVolumeAppliedAtStartup) { - const int applied_startup_input_volume = GetStartupVolume(); - const int expected_volume = - std::max(applied_startup_input_volume, GetMinVolume()); - auto apm = CreateApmForInputVolumeTest(); +class ApmInputVolumeControllerParametrizedTest + : public ::testing::TestWithParam< + std::tuple> { + protected: + ApmInputVolumeControllerParametrizedTest() + : sample_rate_hz_(std::get<0>(GetParam())), + num_channels_(std::get<1>(GetParam())), + channels_(num_channels_), + channel_pointers_(num_channels_) { + const int frame_size = sample_rate_hz_ / 100; + for (int c = 0; c < num_channels_; ++c) { + channels_[c].resize(frame_size); + channel_pointers_[c] = channels_[c].data(); + std::fill(channels_[c].begin(), channels_[c].end(), 0.0f); + } + } - const int recommended_input_volume = - ProcessInputVolume(*apm, /*num_frames=*/1, applied_startup_input_volume); + int sample_rate_hz() const { return sample_rate_hz_; } + int num_channels() const { return num_channels_; } + AudioProcessing::Config GetConfig() const { return std::get<2>(GetParam()); } - ASSERT_EQ(recommended_input_volume, expected_volume); + float* const* channel_pointers() { return channel_pointers_.data(); } + + private: + const int sample_rate_hz_; + const int num_channels_; + std::vector> channels_; + std::vector channel_pointers_; +}; + +TEST_P(ApmInputVolumeControllerParametrizedTest, + EnforceMinInputVolumeAtStartupWithZeroVolume) { + const StreamConfig stream_config(sample_rate_hz(), num_channels()); + auto apm = AudioProcessingBuilder().SetConfig(GetConfig()).Create(); + + apm->set_stream_analog_level(0); + apm->ProcessStream(channel_pointers(), stream_config, stream_config, + channel_pointers()); + EXPECT_GT(apm->recommended_stream_analog_level(), 0); } -// Tests that the minimum input volume is applied if the volume is manually -// adjusted to a non-zero value only if -// "WebRTC-Audio-2ndAgcMinMicLevelExperiment" is enabled. -TEST_P(InputVolumeNotZeroParameterizedTest, - VerifyMinVolumeMaybeAppliedAfterManualVolumeAdjustments) { - const int applied_startup_input_volume = GetStartupVolume(); - const int applied_input_volume = GetVolume(); - const int expected_volume = std::max(applied_input_volume, GetMinVolume()); - auto apm = CreateApmForInputVolumeTest(); +TEST_P(ApmInputVolumeControllerParametrizedTest, + EnforceMinInputVolumeAtStartupWithNonZeroVolume) { + const StreamConfig stream_config(sample_rate_hz(), num_channels()); + auto apm = AudioProcessingBuilder().SetConfig(GetConfig()).Create(); - ProcessInputVolume(*apm, /*num_frames=*/1, applied_startup_input_volume); - const int recommended_input_volume = - ProcessInputVolume(*apm, /*num_frames=*/1, applied_input_volume); + constexpr int kStartupVolume = 3; + apm->set_stream_analog_level(kStartupVolume); + apm->ProcessStream(channel_pointers(), stream_config, stream_config, + channel_pointers()); + EXPECT_GT(apm->recommended_stream_analog_level(), kStartupVolume); +} - ASSERT_NE(applied_input_volume, 0); - if (GetMinMicLevelExperimentEnabled()) { - ASSERT_EQ(recommended_input_volume, expected_volume); - } else { - ASSERT_EQ(recommended_input_volume, applied_input_volume); +TEST_P(ApmInputVolumeControllerParametrizedTest, + EnforceMinInputVolumeAfterManualVolumeAdjustment) { + const auto config = GetConfig(); + if (config.gain_controller1.enabled) { + // After a downward manual adjustment, AGC1 slowly converges to the minimum + // input volume. + GTEST_SKIP() << "Does not apply to AGC1"; + } + const StreamConfig stream_config(sample_rate_hz(), num_channels()); + auto apm = AudioProcessingBuilder().SetConfig(GetConfig()).Create(); + + apm->set_stream_analog_level(20); + apm->ProcessStream(channel_pointers(), stream_config, stream_config, + channel_pointers()); + constexpr int kManuallyAdjustedVolume = 3; + apm->set_stream_analog_level(kManuallyAdjustedVolume); + apm->ProcessStream(channel_pointers(), stream_config, stream_config, + channel_pointers()); + EXPECT_GT(apm->recommended_stream_analog_level(), kManuallyAdjustedVolume); +} + +TEST_P(ApmInputVolumeControllerParametrizedTest, + DoNotEnforceMinInputVolumeAfterManualVolumeAdjustmentToZero) { + const StreamConfig stream_config(sample_rate_hz(), num_channels()); + auto apm = AudioProcessingBuilder().SetConfig(GetConfig()).Create(); + + apm->set_stream_analog_level(100); + apm->ProcessStream(channel_pointers(), stream_config, stream_config, + channel_pointers()); + apm->set_stream_analog_level(0); + apm->ProcessStream(channel_pointers(), stream_config, stream_config, + channel_pointers()); + EXPECT_EQ(apm->recommended_stream_analog_level(), 0); +} + +INSTANTIATE_TEST_SUITE_P( + AudioProcessingImplTest, + ApmInputVolumeControllerParametrizedTest, + ::testing::Combine( + ::testing::Values(8000, 16000, 32000, 48000), // Sample rates. + ::testing::Values(1, 2), // Number of channels. + ::testing::Values( + // Full AGC1. + AudioProcessing::Config{ + .gain_controller1 = {.enabled = true, + .analog_gain_controller = + {.enabled = true, + .enable_digital_adaptive = true}}, + .gain_controller2 = {.enabled = false}}, + // Hybrid AGC. + AudioProcessing::Config{ + .gain_controller1 = {.enabled = true, + .analog_gain_controller = + {.enabled = true, + .enable_digital_adaptive = false}}, + .gain_controller2 = {.enabled = true, + .adaptive_digital = {.enabled = true}}}))); + +// When the input volume is not emulated and no input volume controller is +// active, the recommended volume must always be the applied volume. +TEST(AudioProcessingImplTest, + RecommendAppliedInputVolumeWithNoAgcWithNoEmulation) { + auto apm = AudioProcessingBuilder() + .SetConfig({.capture_level_adjustment = {.enabled = false}, + .gain_controller1 = {.enabled = false}}) + .Create(); + + constexpr int kOneFrame = 1; + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/123), 123); + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/59), 59); + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/135), 135); +} + +// When the input volume is emulated, the recommended volume must always be the +// applied volume and at any time it must not be that set in the input volume +// emulator. +// TODO(bugs.webrtc.org/14581): Enable when APM fixed to let this test pass. +TEST(AudioProcessingImplTest, + DISABLED_RecommendAppliedInputVolumeWithNoAgcWithEmulation) { + auto apm = + AudioProcessingBuilder() + .SetConfig({.capture_level_adjustment = {.enabled = true, + .analog_mic_gain_emulation{ + .enabled = true, + .initial_level = 255}}, + .gain_controller1 = {.enabled = false}}) + .Create(); + + constexpr int kOneFrame = 1; + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/123), 123); + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/59), 59); + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/135), 135); +} + +// Even if there is an enabled input volume controller, when the input volume is +// emulated, the recommended volume is always the applied volume because the +// active controller must only adjust the internally emulated volume and leave +// the externally applied volume unchanged. +// TODO(bugs.webrtc.org/14581): Enable when APM fixed to let this test pass. +TEST(AudioProcessingImplTest, + DISABLED_RecommendAppliedInputVolumeWithAgcWithEmulation) { + auto apm = + AudioProcessingBuilder() + .SetConfig({.capture_level_adjustment = {.enabled = true, + .analog_mic_gain_emulation{ + .enabled = true}}, + .gain_controller1 = {.enabled = true, + .analog_gain_controller{ + .enabled = true, + }}}) + .Create(); + + constexpr int kOneFrame = 1; + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/123), 123); + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/59), 59); + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/135), 135); +} + +TEST(AudioProcessingImplTest, + Agc2FieldTrialDoNotSwitchToFullAgc2WhenNoAgcIsActive) { + constexpr AudioProcessing::Config kOriginal{ + .gain_controller1{.enabled = false}, + .gain_controller2{.enabled = false}, + }; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, kOriginal.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, kOriginal.gain_controller2); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, kOriginal.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, kOriginal.gain_controller2); +} + +TEST(AudioProcessingImplTest, + Agc2FieldTrialDoNotSwitchToFullAgc2WithAgc1Agc2InputVolumeControllers) { + constexpr AudioProcessing::Config kOriginal{ + .gain_controller1{.enabled = true, + .analog_gain_controller{.enabled = true}}, + .gain_controller2{.enabled = true, + .input_volume_controller{.enabled = true}}, + }; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, kOriginal.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, kOriginal.gain_controller2); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, kOriginal.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, kOriginal.gain_controller2); +} + +class Agc2FieldTrialParametrizedTest + : public ::testing::TestWithParam {}; + +TEST_P(Agc2FieldTrialParametrizedTest, DoNotChangeConfigIfDisabled) { + const AudioProcessing::Config original = GetParam(); + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Disabled/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(original); + adjusted = apm->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); +} + +TEST_P(Agc2FieldTrialParametrizedTest, DoNotChangeConfigIfNoOverride) { + const AudioProcessing::Config original = GetParam(); + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "switch_to_agc2:false," + "disallow_transient_suppressor_usage:false/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(original); + adjusted = apm->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); +} + +TEST_P(Agc2FieldTrialParametrizedTest, DoNotSwitchToFullAgc2) { + const AudioProcessing::Config original = GetParam(); + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:false/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(original); + adjusted = apm->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); +} + +TEST_P(Agc2FieldTrialParametrizedTest, SwitchToFullAgc2) { + const AudioProcessing::Config original = GetParam(); + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); + EXPECT_FALSE(adjusted.gain_controller1.enabled); + EXPECT_TRUE(adjusted.gain_controller2.enabled); + EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); + EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(original); + adjusted = apm->GetConfig(); + EXPECT_FALSE(adjusted.gain_controller1.enabled); + EXPECT_TRUE(adjusted.gain_controller2.enabled); + EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); + EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); +} + +TEST_P(Agc2FieldTrialParametrizedTest, + SwitchToFullAgc2AndOverrideInputVolumeControllerParameters) { + const AudioProcessing::Config original = GetParam(); + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true," + "min_input_volume:123," + "clipped_level_min:20," + "clipped_level_step:30," + "clipped_ratio_threshold:0.4," + "clipped_wait_frames:50," + "enable_clipping_predictor:true," + "target_range_max_dbfs:-6," + "target_range_min_dbfs:-70," + "update_input_volume_wait_frames:80," + "speech_probability_threshold:0.9," + "speech_ratio_threshold:1.0/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); + EXPECT_FALSE(adjusted.gain_controller1.enabled); + EXPECT_TRUE(adjusted.gain_controller2.enabled); + EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); + EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(original); + adjusted = apm->GetConfig(); + EXPECT_FALSE(adjusted.gain_controller1.enabled); + EXPECT_TRUE(adjusted.gain_controller2.enabled); + EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); + EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); +} + +TEST_P(Agc2FieldTrialParametrizedTest, + SwitchToFullAgc2AndOverrideAdaptiveDigitalControllerParameters) { + const AudioProcessing::Config original = GetParam(); + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true," + "headroom_db:10," + "max_gain_db:20," + "initial_gain_db:7," + "max_gain_change_db_per_second:5," + "max_output_noise_level_dbfs:-40/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); + EXPECT_FALSE(adjusted.gain_controller1.enabled); + EXPECT_TRUE(adjusted.gain_controller2.enabled); + EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); + EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); + ASSERT_NE(adjusted.gain_controller2.adaptive_digital, + original.gain_controller2.adaptive_digital); + EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.headroom_db, 10); + EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.max_gain_db, 20); + EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.initial_gain_db, 7); + EXPECT_EQ( + adjusted.gain_controller2.adaptive_digital.max_gain_change_db_per_second, + 5); + EXPECT_EQ( + adjusted.gain_controller2.adaptive_digital.max_output_noise_level_dbfs, + -40); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(original); + adjusted = apm->GetConfig(); + EXPECT_FALSE(adjusted.gain_controller1.enabled); + EXPECT_TRUE(adjusted.gain_controller2.enabled); + EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); + EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); + ASSERT_NE(adjusted.gain_controller2.adaptive_digital, + original.gain_controller2.adaptive_digital); + EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.headroom_db, 10); + EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.max_gain_db, 20); + EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.initial_gain_db, 7); + EXPECT_EQ( + adjusted.gain_controller2.adaptive_digital.max_gain_change_db_per_second, + 5); + EXPECT_EQ( + adjusted.gain_controller2.adaptive_digital.max_output_noise_level_dbfs, + -40); +} + +TEST_P(Agc2FieldTrialParametrizedTest, ProcessSucceedsWithTs) { + AudioProcessing::Config config = GetParam(); + config.transient_suppression.enabled = true; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Disabled/"); + auto apm = AudioProcessingBuilder().SetConfig(config).Create(); + + constexpr int kSampleRateHz = 48000; + constexpr int kNumChannels = 1; + std::array buffer; + float* channel_pointers[] = {buffer.data()}; + StreamConfig stream_config(kSampleRateHz, kNumChannels); + Random random_generator(2341U); + constexpr int kFramesToProcess = 10; + int volume = 100; + for (int i = 0; i < kFramesToProcess; ++i) { + SCOPED_TRACE(i); + RandomizeSampleVector(&random_generator, buffer); + apm->set_stream_analog_level(volume); + ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, + channel_pointers), + kNoErr); + volume = apm->recommended_stream_analog_level(); } } -// Tests that the minimum input volume is not applied if the volume is manually -// adjusted to zero. -TEST_P(InputVolumeZeroParameterizedTest, - VerifyMinVolumeNotAppliedAfterManualVolumeAdjustments) { - constexpr int kZeroVolume = 0; - const int applied_startup_input_volume = GetStartupVolume(); - auto apm = CreateApmForInputVolumeTest(); +TEST_P(Agc2FieldTrialParametrizedTest, ProcessSucceedsWithoutTs) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "switch_to_agc2:false," + "disallow_transient_suppressor_usage:true/"); + auto apm = AudioProcessingBuilder().SetConfig(GetParam()).Create(); - const int recommended_input_volume_after_startup = - ProcessInputVolume(*apm, /*num_frames=*/1, applied_startup_input_volume); - const int recommended_input_volume = - ProcessInputVolume(*apm, /*num_frames=*/1, kZeroVolume); - - ASSERT_NE(recommended_input_volume, recommended_input_volume_after_startup); - ASSERT_EQ(recommended_input_volume, kZeroVolume); -} - -// Tests that the minimum input volume is applied if the volume is not zero -// before it is automatically adjusted. -TEST_P(InputVolumeNotZeroParameterizedTest, - VerifyMinVolumeAppliedAfterAutomaticVolumeAdjustments) { - const int applied_startup_input_volume = GetStartupVolume(); - const int applied_input_volume = GetVolume(); - auto apm = CreateApmForInputVolumeTest(); - - ProcessInputVolume(*apm, /*num_frames=*/1, applied_startup_input_volume); - const int recommended_input_volume = - ProcessInputVolume(*apm, /*num_frames=*/400, applied_input_volume); - - ASSERT_NE(applied_input_volume, 0); - if (recommended_input_volume != applied_input_volume) { - ASSERT_GE(recommended_input_volume, GetMinVolume()); + constexpr int kSampleRateHz = 48000; + constexpr int kNumChannels = 1; + std::array buffer; + float* channel_pointers[] = {buffer.data()}; + StreamConfig stream_config(kSampleRateHz, kNumChannels); + Random random_generator(2341U); + constexpr int kFramesToProcess = 10; + int volume = 100; + for (int i = 0; i < kFramesToProcess; ++i) { + SCOPED_TRACE(i); + RandomizeSampleVector(&random_generator, buffer); + apm->set_stream_analog_level(volume); + ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, + channel_pointers), + kNoErr); + volume = apm->recommended_stream_analog_level(); } } -// Tests that the minimum input volume is not applied if the volume is zero -// before it is automatically adjusted. -TEST_P(InputVolumeZeroParameterizedTest, - VerifyMinVolumeNotAppliedAfterAutomaticVolumeAdjustments) { - constexpr int kZeroVolume = 0; - const int applied_startup_input_volume = GetStartupVolume(); - auto apm = CreateApmForInputVolumeTest(); +TEST_P(Agc2FieldTrialParametrizedTest, + ProcessSucceedsWhenSwitchToFullAgc2WithTs) { + AudioProcessing::Config config = GetParam(); + config.transient_suppression.enabled = true; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "switch_to_agc2:true," + "disallow_transient_suppressor_usage:false/"); + auto apm = AudioProcessingBuilder().SetConfig(config).Create(); - const int recommended_input_volume_after_startup = - ProcessInputVolume(*apm, /*num_frames=*/1, applied_startup_input_volume); - const int recommended_input_volume = - ProcessInputVolume(*apm, /*num_frames=*/400, kZeroVolume); - - ASSERT_NE(recommended_input_volume, recommended_input_volume_after_startup); - ASSERT_EQ(recommended_input_volume, kZeroVolume); + constexpr int kSampleRateHz = 48000; + constexpr int kNumChannels = 1; + std::array buffer; + float* channel_pointers[] = {buffer.data()}; + StreamConfig stream_config(kSampleRateHz, kNumChannels); + Random random_generator(2341U); + constexpr int kFramesToProcess = 10; + int volume = 100; + for (int i = 0; i < kFramesToProcess; ++i) { + SCOPED_TRACE(i); + RandomizeSampleVector(&random_generator, buffer); + apm->set_stream_analog_level(volume); + ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, + channel_pointers), + kNoErr); + volume = apm->recommended_stream_analog_level(); + } } -INSTANTIATE_TEST_SUITE_P(AudioProcessingImplTest, - InputVolumeStartupParameterizedTest, - ::testing::Combine(::testing::Values(0, 5, 30), - ::testing::Values(absl::nullopt, - 20))); +TEST_P(Agc2FieldTrialParametrizedTest, + ProcessSucceedsWhenSwitchToFullAgc2WithoutTs) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "switch_to_agc2:true," + "disallow_transient_suppressor_usage:true/"); + auto apm = AudioProcessingBuilder().SetConfig(GetParam()).Create(); -INSTANTIATE_TEST_SUITE_P(AudioProcessingImplTest, - InputVolumeNotZeroParameterizedTest, - ::testing::Combine(::testing::Values(0, 5, 15), - ::testing::Values(1, 5, 30), - ::testing::Values(absl::nullopt, - 20))); + constexpr int kSampleRateHz = 48000; + constexpr int kNumChannels = 1; + std::array buffer; + float* channel_pointers[] = {buffer.data()}; + StreamConfig stream_config(kSampleRateHz, kNumChannels); + Random random_generator(2341U); + constexpr int kFramesToProcess = 10; + int volume = 100; + for (int i = 0; i < kFramesToProcess; ++i) { + SCOPED_TRACE(i); + RandomizeSampleVector(&random_generator, buffer); + apm->set_stream_analog_level(volume); + ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, + channel_pointers), + kNoErr); + volume = apm->recommended_stream_analog_level(); + } +} -INSTANTIATE_TEST_SUITE_P(AudioProcessingImplTest, - InputVolumeZeroParameterizedTest, - ::testing::Combine(::testing::Values(0, 5, 15), - ::testing::Values(absl::nullopt, - 20))); +INSTANTIATE_TEST_SUITE_P( + AudioProcessingImplTest, + Agc2FieldTrialParametrizedTest, + ::testing::Values( + // Full AGC1. + AudioProcessing::Config{ + .gain_controller1 = + {.enabled = true, + .analog_gain_controller = {.enabled = true, + .enable_digital_adaptive = true}}, + .gain_controller2 = {.enabled = false}}, + // Hybrid AGC. + AudioProcessing::Config{ + .gain_controller1 = + {.enabled = true, + .analog_gain_controller = {.enabled = true, + .enable_digital_adaptive = false}}, + .gain_controller2 = {.enabled = true, + .adaptive_digital = {.enabled = true}}})); + +TEST(AudioProcessingImplTest, CanDisableTransientSuppressor) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = false}}; + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_FALSE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_FALSE(apm->GetConfig().transient_suppression.enabled); +} + +TEST(AudioProcessingImplTest, CanEnableTs) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = true}}; + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_TRUE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_TRUE(adjusted.transient_suppression.enabled); +} + +TEST(AudioProcessingImplTest, CanDisableTsWithAgc2FieldTrialDisabled) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = false}}; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Disabled/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_FALSE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_FALSE(apm->GetConfig().transient_suppression.enabled); +} + +TEST(AudioProcessingImplTest, CanEnableTsWithAgc2FieldTrialDisabled) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = true}}; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Disabled/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_TRUE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_TRUE(adjusted.transient_suppression.enabled); +} + +TEST(AudioProcessingImplTest, + CanDisableTsWithAgc2FieldTrialEnabledAndUsageAllowed) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = false}}; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "disallow_transient_suppressor_usage:false/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_FALSE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_FALSE(adjusted.transient_suppression.enabled); +} + +TEST(AudioProcessingImplTest, + CanEnableTsWithAgc2FieldTrialEnabledAndUsageAllowed) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = true}}; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "disallow_transient_suppressor_usage:false/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_TRUE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_TRUE(adjusted.transient_suppression.enabled); +} + +TEST(AudioProcessingImplTest, + CannotEnableTsWithAgc2FieldTrialEnabledAndUsageDisallowed) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = true}}; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "disallow_transient_suppressor_usage:true/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_FALSE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_FALSE(apm->GetConfig().transient_suppression.enabled); +} } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_processing/audio_processing_unittest.cc b/third_party/libwebrtc/modules/audio_processing/audio_processing_unittest.cc index 326ae4871ef6..e320e71405b3 100644 --- a/third_party/libwebrtc/modules/audio_processing/audio_processing_unittest.cc +++ b/third_party/libwebrtc/modules/audio_processing/audio_processing_unittest.cc @@ -3062,10 +3062,6 @@ TEST(AudioProcessing, GainController2ConfigEqual) { b_adaptive.enabled = a_adaptive.enabled; EXPECT_EQ(a, b); - Toggle(a_adaptive.dry_run); - b_adaptive.dry_run = a_adaptive.dry_run; - EXPECT_EQ(a, b); - a_adaptive.headroom_db += 1.0f; b_adaptive.headroom_db = a_adaptive.headroom_db; EXPECT_EQ(a, b); @@ -3078,15 +3074,6 @@ TEST(AudioProcessing, GainController2ConfigEqual) { b_adaptive.initial_gain_db = a_adaptive.initial_gain_db; EXPECT_EQ(a, b); - a_adaptive.vad_reset_period_ms++; - b_adaptive.vad_reset_period_ms = a_adaptive.vad_reset_period_ms; - EXPECT_EQ(a, b); - - a_adaptive.adjacent_speech_frames_threshold++; - b_adaptive.adjacent_speech_frames_threshold = - a_adaptive.adjacent_speech_frames_threshold; - EXPECT_EQ(a, b); - a_adaptive.max_gain_change_db_per_second += 1.0f; b_adaptive.max_gain_change_db_per_second = a_adaptive.max_gain_change_db_per_second; @@ -3119,10 +3106,6 @@ TEST(AudioProcessing, GainController2ConfigNotEqual) { EXPECT_NE(a, b); a_adaptive = b_adaptive; - Toggle(a_adaptive.dry_run); - EXPECT_NE(a, b); - a_adaptive = b_adaptive; - a_adaptive.headroom_db += 1.0f; EXPECT_NE(a, b); a_adaptive = b_adaptive; @@ -3135,14 +3118,6 @@ TEST(AudioProcessing, GainController2ConfigNotEqual) { EXPECT_NE(a, b); a_adaptive = b_adaptive; - a_adaptive.vad_reset_period_ms++; - EXPECT_NE(a, b); - a_adaptive = b_adaptive; - - a_adaptive.adjacent_speech_frames_threshold++; - EXPECT_NE(a, b); - a_adaptive = b_adaptive; - a_adaptive.max_gain_change_db_per_second += 1.0f; EXPECT_NE(a, b); a_adaptive = b_adaptive; @@ -3152,4 +3127,315 @@ TEST(AudioProcessing, GainController2ConfigNotEqual) { a_adaptive = b_adaptive; } +struct ApmFormatHandlingTestParams { + enum class ExpectedOutput { + kErrorAndUnmodified, + kErrorAndSilence, + kErrorAndCopyOfFirstChannel, + kErrorAndExactCopy, + kNoError + }; + + StreamConfig input_config; + StreamConfig output_config; + ExpectedOutput expected_output; +}; + +class ApmFormatHandlingTest + : public ::testing::TestWithParam< + std::tuple> { + public: + ApmFormatHandlingTest() + : stream_direction_(std::get<0>(GetParam())), + test_params_(std::get<1>(GetParam())) {} + + protected: + ::testing::Message ProduceDebugMessage() { + return ::testing::Message() + << "input sample_rate_hz=" + << test_params_.input_config.sample_rate_hz() + << " num_channels=" << test_params_.input_config.num_channels() + << ", output sample_rate_hz=" + << test_params_.output_config.sample_rate_hz() + << " num_channels=" << test_params_.output_config.num_channels() + << ", stream_direction=" << stream_direction_ << ", expected_output=" + << static_cast(test_params_.expected_output); + } + + StreamDirection stream_direction_; + ApmFormatHandlingTestParams test_params_; +}; + +INSTANTIATE_TEST_SUITE_P( + FormatValidation, + ApmFormatHandlingTest, + testing::Combine( + ::testing::Values(kForward, kReverse), + ::testing::Values( + // Test cases with values on the boundary of legal ranges. + ApmFormatHandlingTestParams{ + StreamConfig(16000, 1), StreamConfig(8000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kNoError}, + ApmFormatHandlingTestParams{ + StreamConfig(8000, 1), StreamConfig(16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kNoError}, + ApmFormatHandlingTestParams{ + StreamConfig(384000, 1), StreamConfig(16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kNoError}, + ApmFormatHandlingTestParams{ + StreamConfig(16000, 1), StreamConfig(384000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kNoError}, + ApmFormatHandlingTestParams{ + StreamConfig(16000, 2), StreamConfig(16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kNoError}, + ApmFormatHandlingTestParams{ + StreamConfig(16000, 3), StreamConfig(16000, 3), + ApmFormatHandlingTestParams::ExpectedOutput::kNoError}, + + // Supported but incompatible formats. + ApmFormatHandlingTestParams{ + StreamConfig(16000, 3), StreamConfig(16000, 2), + ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndCopyOfFirstChannel}, + ApmFormatHandlingTestParams{ + StreamConfig(16000, 3), StreamConfig(16000, 4), + ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndCopyOfFirstChannel}, + + // Unsupported format and input / output mismatch. + ApmFormatHandlingTestParams{ + StreamConfig(7900, 1), StreamConfig(16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence}, + ApmFormatHandlingTestParams{ + StreamConfig(16000, 1), StreamConfig(7900, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence}, + ApmFormatHandlingTestParams{ + StreamConfig(390000, 1), StreamConfig(16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence}, + ApmFormatHandlingTestParams{ + StreamConfig(16000, 1), StreamConfig(390000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence}, + ApmFormatHandlingTestParams{ + StreamConfig(-16000, 1), StreamConfig(16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence}, + + // Unsupported format but input / output formats match. + ApmFormatHandlingTestParams{StreamConfig(7900, 1), + StreamConfig(7900, 1), + ApmFormatHandlingTestParams:: + ExpectedOutput::kErrorAndExactCopy}, + ApmFormatHandlingTestParams{StreamConfig(390000, 1), + StreamConfig(390000, 1), + ApmFormatHandlingTestParams:: + ExpectedOutput::kErrorAndExactCopy}, + + // Unsupported but identical sample rate, channel mismatch. + ApmFormatHandlingTestParams{ + StreamConfig(7900, 1), StreamConfig(7900, 2), + ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndCopyOfFirstChannel}, + ApmFormatHandlingTestParams{ + StreamConfig(7900, 2), StreamConfig(7900, 1), + ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndCopyOfFirstChannel}, + + // Test cases with meaningless output format. + ApmFormatHandlingTestParams{ + StreamConfig(16000, 1), StreamConfig(-16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndUnmodified}, + ApmFormatHandlingTestParams{ + StreamConfig(-16000, 1), StreamConfig(-16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndUnmodified}))); + +TEST_P(ApmFormatHandlingTest, IntApi) { + SCOPED_TRACE(ProduceDebugMessage()); + + // Set up input and output data. + const size_t num_input_samples = + test_params_.input_config.num_channels() * + std::abs(test_params_.input_config.sample_rate_hz() / 100); + const size_t num_output_samples = + test_params_.output_config.num_channels() * + std::abs(test_params_.output_config.sample_rate_hz() / 100); + std::vector input_block(num_input_samples); + for (int i = 0; i < static_cast(input_block.size()); ++i) { + input_block[i] = i; + } + std::vector output_block(num_output_samples); + constexpr int kUnlikelyOffset = 37; + for (int i = 0; i < static_cast(output_block.size()); ++i) { + output_block[i] = i - kUnlikelyOffset; + } + + // Call APM. + rtc::scoped_refptr ap = + AudioProcessingBuilderForTesting().Create(); + int error; + if (stream_direction_ == kForward) { + error = ap->ProcessStream(input_block.data(), test_params_.input_config, + test_params_.output_config, output_block.data()); + } else { + error = ap->ProcessReverseStream( + input_block.data(), test_params_.input_config, + test_params_.output_config, output_block.data()); + } + + // Check output. + switch (test_params_.expected_output) { + case ApmFormatHandlingTestParams::ExpectedOutput::kNoError: + EXPECT_EQ(error, AudioProcessing::kNoError); + break; + case ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndUnmodified: + EXPECT_NE(error, AudioProcessing::kNoError); + for (int i = 0; i < static_cast(output_block.size()); ++i) { + EXPECT_EQ(output_block[i], i - kUnlikelyOffset); + } + break; + case ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence: + EXPECT_NE(error, AudioProcessing::kNoError); + for (int i = 0; i < static_cast(output_block.size()); ++i) { + EXPECT_EQ(output_block[i], 0); + } + break; + case ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndCopyOfFirstChannel: + EXPECT_NE(error, AudioProcessing::kNoError); + for (size_t ch = 0; ch < test_params_.output_config.num_channels(); + ++ch) { + for (size_t i = 0; i < test_params_.output_config.num_frames(); ++i) { + EXPECT_EQ( + output_block[ch + i * test_params_.output_config.num_channels()], + static_cast(i * + test_params_.input_config.num_channels())); + } + } + break; + case ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndExactCopy: + EXPECT_NE(error, AudioProcessing::kNoError); + for (int i = 0; i < static_cast(output_block.size()); ++i) { + EXPECT_EQ(output_block[i], i); + } + break; + } +} + +TEST_P(ApmFormatHandlingTest, FloatApi) { + SCOPED_TRACE(ProduceDebugMessage()); + + // Set up input and output data. + const size_t input_samples_per_channel = + std::abs(test_params_.input_config.sample_rate_hz()) / 100; + const size_t output_samples_per_channel = + std::abs(test_params_.output_config.sample_rate_hz()) / 100; + const size_t input_num_channels = test_params_.input_config.num_channels(); + const size_t output_num_channels = test_params_.output_config.num_channels(); + ChannelBuffer input_block(input_samples_per_channel, + input_num_channels); + ChannelBuffer output_block(output_samples_per_channel, + output_num_channels); + for (size_t ch = 0; ch < input_num_channels; ++ch) { + for (size_t i = 0; i < input_samples_per_channel; ++i) { + input_block.channels()[ch][i] = ch + i * input_num_channels; + } + } + constexpr int kUnlikelyOffset = 37; + for (size_t ch = 0; ch < output_num_channels; ++ch) { + for (size_t i = 0; i < output_samples_per_channel; ++i) { + output_block.channels()[ch][i] = + ch + i * output_num_channels - kUnlikelyOffset; + } + } + + // Call APM. + rtc::scoped_refptr ap = + AudioProcessingBuilderForTesting().Create(); + int error; + if (stream_direction_ == kForward) { + error = + ap->ProcessStream(input_block.channels(), test_params_.input_config, + test_params_.output_config, output_block.channels()); + } else { + error = ap->ProcessReverseStream( + input_block.channels(), test_params_.input_config, + test_params_.output_config, output_block.channels()); + } + + // Check output. + switch (test_params_.expected_output) { + case ApmFormatHandlingTestParams::ExpectedOutput::kNoError: + EXPECT_EQ(error, AudioProcessing::kNoError); + break; + case ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndUnmodified: + EXPECT_NE(error, AudioProcessing::kNoError); + for (size_t ch = 0; ch < output_num_channels; ++ch) { + for (size_t i = 0; i < output_samples_per_channel; ++i) { + EXPECT_EQ(output_block.channels()[ch][i], + ch + i * output_num_channels - kUnlikelyOffset); + } + } + break; + case ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence: + EXPECT_NE(error, AudioProcessing::kNoError); + for (size_t ch = 0; ch < output_num_channels; ++ch) { + for (size_t i = 0; i < output_samples_per_channel; ++i) { + EXPECT_EQ(output_block.channels()[ch][i], 0); + } + } + break; + case ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndCopyOfFirstChannel: + EXPECT_NE(error, AudioProcessing::kNoError); + for (size_t ch = 0; ch < output_num_channels; ++ch) { + for (size_t i = 0; i < output_samples_per_channel; ++i) { + EXPECT_EQ(output_block.channels()[ch][i], + input_block.channels()[0][i]); + } + } + break; + case ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndExactCopy: + EXPECT_NE(error, AudioProcessing::kNoError); + for (size_t ch = 0; ch < output_num_channels; ++ch) { + for (size_t i = 0; i < output_samples_per_channel; ++i) { + EXPECT_EQ(output_block.channels()[ch][i], + input_block.channels()[ch][i]); + } + } + break; + } +} + +TEST(ApmAnalyzeReverseStreamFormatTest, AnalyzeReverseStream) { + for (auto&& [input_config, expect_error] : + {std::tuple(StreamConfig(16000, 2), /*expect_error=*/false), + std::tuple(StreamConfig(8000, 1), /*expect_error=*/false), + std::tuple(StreamConfig(384000, 1), /*expect_error=*/false), + std::tuple(StreamConfig(7900, 1), /*expect_error=*/true), + std::tuple(StreamConfig(390000, 1), /*expect_error=*/true), + std::tuple(StreamConfig(16000, 0), /*expect_error=*/true), + std::tuple(StreamConfig(-16000, 0), /*expect_error=*/true)}) { + SCOPED_TRACE(::testing::Message() + << "sample_rate_hz=" << input_config.sample_rate_hz() + << " num_channels=" << input_config.num_channels()); + + // Set up input data. + ChannelBuffer input_block( + std::abs(input_config.sample_rate_hz()) / 100, + input_config.num_channels()); + + // Call APM. + rtc::scoped_refptr ap = + AudioProcessingBuilderForTesting().Create(); + int error = ap->AnalyzeReverseStream(input_block.channels(), input_config); + + // Check output. + if (expect_error) { + EXPECT_NE(error, AudioProcessing::kNoError); + } else { + EXPECT_EQ(error, AudioProcessing::kNoError); + } + } +} + } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_processing/gain_control_impl.cc b/third_party/libwebrtc/modules/audio_processing/gain_control_impl.cc index 3fac1f7f56e0..edc49d14011e 100644 --- a/third_party/libwebrtc/modules/audio_processing/gain_control_impl.cc +++ b/third_party/libwebrtc/modules/audio_processing/gain_control_impl.cc @@ -39,12 +39,8 @@ int16_t MapSetting(GainControl::Mode mode) { return -1; } -// Checks whether the legacy digital gain application should be used. -bool UseLegacyDigitalGainApplier() { - return field_trial::IsEnabled("WebRTC-UseLegacyDigitalGainApplier"); -} - -// Floating point variant of WebRtcAgc_Process. +// Applies the sub-frame `gains` to all the bands in `out` and clamps the output +// in the signed 16 bit range. void ApplyDigitalGain(const int32_t gains[11], size_t num_bands, float* const* out) { @@ -97,7 +93,6 @@ int GainControlImpl::instance_counter_ = 0; GainControlImpl::GainControlImpl() : data_dumper_(new ApmDataDumper(instance_counter_)), - use_legacy_gain_applier_(UseLegacyDigitalGainApplier()), mode_(kAdaptiveAnalog), minimum_capture_level_(0), maximum_capture_level_(255), @@ -236,26 +231,9 @@ int GainControlImpl::ProcessCaptureAudio(AudioBuffer* audio, } } - if (use_legacy_gain_applier_) { - for (size_t ch = 0; ch < mono_agcs_.size(); ++ch) { - int16_t split_band_data[AudioBuffer::kMaxNumBands] - [AudioBuffer::kMaxSplitFrameLength]; - int16_t* split_bands[AudioBuffer::kMaxNumBands] = { - split_band_data[0], split_band_data[1], split_band_data[2]}; - audio->ExportSplitChannelData(ch, split_bands); - - int err_process = WebRtcAgc_Process( - mono_agcs_[ch]->state, mono_agcs_[index_to_apply]->gains, split_bands, - audio->num_bands(), split_bands); - RTC_DCHECK_EQ(err_process, 0); - - audio->ImportSplitChannelData(ch, split_bands); - } - } else { - for (size_t ch = 0; ch < mono_agcs_.size(); ++ch) { - ApplyDigitalGain(mono_agcs_[index_to_apply]->gains, audio->num_bands(), - audio->split_bands(ch)); - } + for (size_t ch = 0; ch < mono_agcs_.size(); ++ch) { + ApplyDigitalGain(mono_agcs_[index_to_apply]->gains, audio->num_bands(), + audio->split_bands(ch)); } RTC_DCHECK_LT(0ul, *num_proc_channels_); diff --git a/third_party/libwebrtc/modules/audio_processing/gain_control_impl.h b/third_party/libwebrtc/modules/audio_processing/gain_control_impl.h index b65d697945ce..8aea8f2e9575 100644 --- a/third_party/libwebrtc/modules/audio_processing/gain_control_impl.h +++ b/third_party/libwebrtc/modules/audio_processing/gain_control_impl.h @@ -68,7 +68,6 @@ class GainControlImpl : public GainControl { std::unique_ptr data_dumper_; - const bool use_legacy_gain_applier_; Mode mode_; int minimum_capture_level_; int maximum_capture_level_; diff --git a/third_party/libwebrtc/modules/audio_processing/gain_controller2.cc b/third_party/libwebrtc/modules/audio_processing/gain_controller2.cc index 8b8231e59c3b..9beaf008239c 100644 --- a/third_party/libwebrtc/modules/audio_processing/gain_controller2.cc +++ b/third_party/libwebrtc/modules/audio_processing/gain_controller2.cc @@ -14,6 +14,7 @@ #include #include "common_audio/include/audio_util.h" +#include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/agc2/cpu_features.h" #include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/include/audio_frame_view.h" @@ -27,6 +28,7 @@ namespace webrtc { namespace { using Agc2Config = AudioProcessing::Config::GainController2; +using InputVolumeControllerConfig = InputVolumeController::Config; constexpr int kLogLimiterStatsPeriodMs = 30'000; constexpr int kFrameLengthMs = 10; @@ -48,66 +50,85 @@ AvailableCpuFeatures GetAllowedCpuFeatures() { return features; } -// Creates an adaptive digital gain controller if enabled. -std::unique_ptr CreateAdaptiveDigitalController( - const Agc2Config::AdaptiveDigital& config, - int sample_rate_hz, - int num_channels, - ApmDataDumper* data_dumper) { - if (config.enabled) { - return std::make_unique( - data_dumper, config, sample_rate_hz, num_channels); - } - return nullptr; -} +// Peak and RMS audio levels in dBFS. +struct AudioLevels { + float peak_dbfs; + float rms_dbfs; +}; -// Creates an input volume controller if `enabled` is true. -std::unique_ptr CreateInputVolumeController( - bool enabled, - int num_channels) { - if (enabled) { - return std::make_unique( - num_channels, InputVolumeController::Config{.enabled = enabled}); +// Speech level info. +struct SpeechLevel { + bool is_confident; + float rms_dbfs; +}; + +// Computes the audio levels for the first channel in `frame`. +AudioLevels ComputeAudioLevels(AudioFrameView frame, + ApmDataDumper& data_dumper) { + float peak = 0.0f; + float rms = 0.0f; + for (const auto& x : frame.channel(0)) { + peak = std::max(std::fabs(x), peak); + rms += x * x; } - return nullptr; + AudioLevels levels{ + FloatS16ToDbfs(peak), + FloatS16ToDbfs(std::sqrt(rms / frame.samples_per_channel()))}; + data_dumper.DumpRaw("agc2_input_rms_dbfs", levels.rms_dbfs); + data_dumper.DumpRaw("agc2_input_peak_dbfs", levels.peak_dbfs); + return levels; } } // namespace std::atomic GainController2::instance_count_(0); -GainController2::GainController2(const Agc2Config& config, - int sample_rate_hz, - int num_channels, - bool use_internal_vad) +GainController2::GainController2( + const Agc2Config& config, + const InputVolumeControllerConfig& input_volume_controller_config, + int sample_rate_hz, + int num_channels, + bool use_internal_vad) : cpu_features_(GetAllowedCpuFeatures()), data_dumper_(instance_count_.fetch_add(1) + 1), fixed_gain_applier_( /*hard_clip_samples=*/false, /*initial_gain_factor=*/DbToRatio(config.fixed_digital.gain_db)), - adaptive_digital_controller_( - CreateAdaptiveDigitalController(config.adaptive_digital, - sample_rate_hz, - num_channels, - &data_dumper_)), - input_volume_controller_( - CreateInputVolumeController(config.input_volume_controller.enabled, - num_channels)), limiter_(sample_rate_hz, &data_dumper_, /*histogram_name_prefix=*/"Agc2"), calls_since_last_limiter_log_(0) { RTC_DCHECK(Validate(config)); data_dumper_.InitiateNewSetOfRecordings(); - const bool use_vad = config.adaptive_digital.enabled; - if (use_vad && use_internal_vad) { - // TODO(bugs.webrtc.org/7494): Move `vad_reset_period_ms` from adaptive - // digital to gain controller 2 config. - vad_ = std::make_unique( - config.adaptive_digital.vad_reset_period_ms, cpu_features_, - sample_rate_hz); + + if (config.input_volume_controller.enabled || + config.adaptive_digital.enabled) { + // Create dependencies. + speech_level_estimator_ = std::make_unique( + &data_dumper_, config.adaptive_digital, kAdjacentSpeechFramesThreshold); + if (use_internal_vad) + vad_ = std::make_unique( + kVadResetPeriodMs, cpu_features_, sample_rate_hz); } - if (input_volume_controller_) { + + if (config.input_volume_controller.enabled) { + // Create controller. + input_volume_controller_ = std::make_unique( + num_channels, input_volume_controller_config); + // TODO(bugs.webrtc.org/7494): Call `Initialize` in ctor and remove method. input_volume_controller_->Initialize(); } + + if (config.adaptive_digital.enabled) { + // Create dependencies. + noise_level_estimator_ = CreateNoiseFloorEstimator(&data_dumper_); + saturation_protector_ = CreateSaturationProtector( + kSaturationProtectorInitialHeadroomDb, kAdjacentSpeechFramesThreshold, + &data_dumper_); + // Create controller. + adaptive_digital_controller_ = + std::make_unique( + &data_dumper_, config.adaptive_digital, + kAdjacentSpeechFramesThreshold); + } } GainController2::~GainController2() = default; @@ -132,65 +153,110 @@ void GainController2::SetFixedGainDb(float gain_db) { void GainController2::Analyze(int applied_input_volume, const AudioBuffer& audio_buffer) { + recommended_input_volume_ = absl::nullopt; + RTC_DCHECK_GE(applied_input_volume, 0); RTC_DCHECK_LE(applied_input_volume, 255); if (input_volume_controller_) { - input_volume_controller_->set_stream_analog_level(applied_input_volume); - input_volume_controller_->AnalyzePreProcess(audio_buffer); + input_volume_controller_->AnalyzeInputAudio(applied_input_volume, + audio_buffer); } } -absl::optional GainController2::GetRecommendedInputVolume() const { - return input_volume_controller_ - ? absl::optional( - input_volume_controller_->recommended_analog_level()) - : absl::nullopt; -} - void GainController2::Process(absl::optional speech_probability, bool input_volume_changed, AudioBuffer* audio) { + recommended_input_volume_ = absl::nullopt; + data_dumper_.DumpRaw("agc2_applied_input_volume_changed", input_volume_changed); - if (input_volume_changed && !!adaptive_digital_controller_) { - adaptive_digital_controller_->HandleInputGainChange(); + if (input_volume_changed) { + // Handle input volume changes. + if (speech_level_estimator_) + speech_level_estimator_->Reset(); + if (saturation_protector_) + saturation_protector_->Reset(); } AudioFrameView float_frame(audio->channels(), audio->num_channels(), audio->num_frames()); + // Compute speech probability. if (vad_) { speech_probability = vad_->Analyze(float_frame); } else if (speech_probability.has_value()) { - RTC_DCHECK_GE(speech_probability.value(), 0.0f); - RTC_DCHECK_LE(speech_probability.value(), 1.0f); - } - if (speech_probability.has_value()) { - data_dumper_.DumpRaw("agc2_speech_probability", speech_probability.value()); + RTC_DCHECK_GE(*speech_probability, 0.0f); + RTC_DCHECK_LE(*speech_probability, 1.0f); } + // The speech probability may not be defined at this step (e.g., when the + // fixed digital controller alone is enabled). + if (speech_probability.has_value()) + data_dumper_.DumpRaw("agc2_speech_probability", *speech_probability); - if (input_volume_controller_) { - absl::optional speech_level; - if (adaptive_digital_controller_) { - speech_level = - adaptive_digital_controller_->GetSpeechLevelDbfsIfConfident(); - } - input_volume_controller_->Process(speech_probability, speech_level); + // Compute audio, noise and speech levels. + AudioLevels audio_levels = ComputeAudioLevels(float_frame, data_dumper_); + absl::optional noise_rms_dbfs; + if (noise_level_estimator_) { + // TODO(bugs.webrtc.org/7494): Pass `audio_levels` to remove duplicated + // computation in `noise_level_estimator_`. + noise_rms_dbfs = noise_level_estimator_->Analyze(float_frame); } - - fixed_gain_applier_.ApplyGain(float_frame); - if (adaptive_digital_controller_) { + absl::optional speech_level; + if (speech_level_estimator_) { RTC_DCHECK(speech_probability.has_value()); - adaptive_digital_controller_->Process( - float_frame, speech_probability.value(), limiter_.LastAudioLevel()); + speech_level_estimator_->Update( + audio_levels.rms_dbfs, audio_levels.peak_dbfs, *speech_probability); + speech_level = + SpeechLevel{.is_confident = speech_level_estimator_->is_confident(), + .rms_dbfs = speech_level_estimator_->level_dbfs()}; } + + // Update the recommended input volume. + if (input_volume_controller_) { + RTC_DCHECK(speech_level.has_value()); + RTC_DCHECK(speech_probability.has_value()); + if (speech_probability.has_value()) { + recommended_input_volume_ = + input_volume_controller_->RecommendInputVolume( + *speech_probability, + speech_level->is_confident + ? absl::optional(speech_level->rms_dbfs) + : absl::nullopt); + } + } + + if (adaptive_digital_controller_) { + RTC_DCHECK(saturation_protector_); + RTC_DCHECK(speech_probability.has_value()); + RTC_DCHECK(speech_level.has_value()); + saturation_protector_->Analyze(*speech_probability, audio_levels.peak_dbfs, + speech_level->rms_dbfs); + float headroom_db = saturation_protector_->HeadroomDb(); + data_dumper_.DumpRaw("agc2_headroom_db", headroom_db); + float limiter_envelope_dbfs = FloatS16ToDbfs(limiter_.LastAudioLevel()); + data_dumper_.DumpRaw("agc2_limiter_envelope_dbfs", limiter_envelope_dbfs); + RTC_DCHECK(noise_rms_dbfs.has_value()); + adaptive_digital_controller_->Process( + /*info=*/{.speech_probability = *speech_probability, + .speech_level_dbfs = speech_level->rms_dbfs, + .speech_level_reliable = speech_level->is_confident, + .noise_rms_dbfs = *noise_rms_dbfs, + .headroom_db = headroom_db, + .limiter_envelope_dbfs = limiter_envelope_dbfs}, + float_frame); + } + + // TODO(bugs.webrtc.org/7494): Pass `audio_levels` to remove duplicated + // computation in `limiter_`. + fixed_gain_applier_.ApplyGain(float_frame); + limiter_.Process(float_frame); // Periodically log limiter stats. if (++calls_since_last_limiter_log_ == kLogLimiterStatsPeriodNumFrames) { calls_since_last_limiter_log_ = 0; InterpolatedGainCurve::Stats stats = limiter_.GetGainCurveStats(); - RTC_LOG(LS_INFO) << "AGC2 limiter stats" + RTC_LOG(LS_INFO) << "[AGC2] limiter stats" << " | identity: " << stats.look_ups_identity_region << " | knee: " << stats.look_ups_knee_region << " | limiter: " << stats.look_ups_limiter_region @@ -202,7 +268,7 @@ bool GainController2::Validate( const AudioProcessing::Config::GainController2& config) { const auto& fixed = config.fixed_digital; const auto& adaptive = config.adaptive_digital; - return fixed.gain_db >= 0.0f && fixed.gain_db < 50.f && + return fixed.gain_db >= 0.0f && fixed.gain_db < 50.0f && adaptive.headroom_db >= 0.0f && adaptive.max_gain_db > 0.0f && adaptive.initial_gain_db >= 0.0f && adaptive.max_gain_change_db_per_second > 0.0f && diff --git a/third_party/libwebrtc/modules/audio_processing/gain_controller2.h b/third_party/libwebrtc/modules/audio_processing/gain_controller2.h index 3341cd22d002..43b5828d3538 100644 --- a/third_party/libwebrtc/modules/audio_processing/gain_controller2.h +++ b/third_party/libwebrtc/modules/audio_processing/gain_controller2.h @@ -20,6 +20,9 @@ #include "modules/audio_processing/agc2/gain_applier.h" #include "modules/audio_processing/agc2/input_volume_controller.h" #include "modules/audio_processing/agc2/limiter.h" +#include "modules/audio_processing/agc2/noise_level_estimator.h" +#include "modules/audio_processing/agc2/saturation_protector.h" +#include "modules/audio_processing/agc2/speech_level_estimator.h" #include "modules/audio_processing/agc2/vad_wrapper.h" #include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/logging/apm_data_dumper.h" @@ -34,10 +37,12 @@ class GainController2 { public: // Ctor. If `use_internal_vad` is true, an internal voice activity // detector is used for digital adaptive gain. - GainController2(const AudioProcessing::Config::GainController2& config, - int sample_rate_hz, - int num_channels, - bool use_internal_vad); + GainController2( + const AudioProcessing::Config::GainController2& config, + const InputVolumeController::Config& input_volume_controller_config, + int sample_rate_hz, + int num_channels, + bool use_internal_vad); GainController2(const GainController2&) = delete; GainController2& operator=(const GainController2&) = delete; ~GainController2(); @@ -56,12 +61,13 @@ class GainController2 { // [0, 255]. void Analyze(int applied_input_volume, const AudioBuffer& audio_buffer); - // Applies fixed and adaptive digital gains to `audio` and runs a limiter. - // If the internal VAD is used, `speech_probability` is ignored. Otherwise - // `speech_probability` is used for digital adaptive gain if it's available - // (limited to values [0.0, 1.0]). Handles input volume changes; if the caller - // cannot determine whether an input volume change occurred, set - // `input_volume_changed` to false. + // Updates the recommended input volume, applies the adaptive digital and the + // fixed digital gains and runs a limiter on `audio`. + // When the internal VAD is not used, `speech_probability` should be specified + // and in the [0, 1] range. Otherwise ignores `speech_probability` and + // computes the speech probability via `vad_`. + // Handles input volume changes; if the caller cannot determine whether an + // input volume change occurred, set `input_volume_changed` to false. void Process(absl::optional speech_probability, bool input_volume_changed, AudioBuffer* audio); @@ -70,20 +76,33 @@ class GainController2 { AvailableCpuFeatures GetCpuFeatures() const { return cpu_features_; } - // Returns the recommended input volume if input volume controller is enabled - // and if a volume recommendation is available. - absl::optional GetRecommendedInputVolume() const; + absl::optional recommended_input_volume() const { + return recommended_input_volume_; + } private: static std::atomic instance_count_; const AvailableCpuFeatures cpu_features_; ApmDataDumper data_dumper_; + GainApplier fixed_gain_applier_; + std::unique_ptr noise_level_estimator_; std::unique_ptr vad_; - std::unique_ptr adaptive_digital_controller_; + std::unique_ptr speech_level_estimator_; std::unique_ptr input_volume_controller_; + // TODO(bugs.webrtc.org/7494): Rename to `CrestFactorEstimator`. + std::unique_ptr saturation_protector_; + std::unique_ptr adaptive_digital_controller_; Limiter limiter_; + int calls_since_last_limiter_log_; + + // TODO(bugs.webrtc.org/7494): Remove intermediate storing at this level once + // APM refactoring is completed. + // Recommended input volume from `InputVolumecontroller`. Non-empty after + // `Process()` if input volume controller is enabled and + // `InputVolumeController::Process()` has returned a non-empty value. + absl::optional recommended_input_volume_; }; } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_processing/gain_controller2_unittest.cc b/third_party/libwebrtc/modules/audio_processing/gain_controller2_unittest.cc index eaf0859d3a40..c3d0e5947a2c 100644 --- a/third_party/libwebrtc/modules/audio_processing/gain_controller2_unittest.cc +++ b/third_party/libwebrtc/modules/audio_processing/gain_controller2_unittest.cc @@ -33,6 +33,7 @@ using ::testing::Eq; using ::testing::Optional; using Agc2Config = AudioProcessing::Config::GainController2; +using InputVolumeControllerConfig = InputVolumeController::Config; // Sets all the samples in `ab` to `value`. void SetAudioBufferSamples(float value, AudioBuffer& ab) { @@ -54,10 +55,9 @@ float RunAgc2WithConstantInput(GainController2& agc2, // Give time to the level estimator to converge. for (int i = 0; i < num_frames + 1; ++i) { SetAudioBufferSamples(input_level, ab); - const auto applied_volume = agc2.GetRecommendedInputVolume(); - agc2.Analyze(i > 0 && applied_volume.has_value() ? *applied_volume - : applied_initial_volume, - ab); + const auto applied_volume = agc2.recommended_input_volume(); + agc2.Analyze(applied_volume.value_or(applied_initial_volume), ab); + agc2.Process(/*speech_probability=*/absl::nullopt, /*input_volume_changed=*/false, &ab); } @@ -73,11 +73,25 @@ std::unique_ptr CreateAgc2FixedDigitalMode( config.adaptive_digital.enabled = false; config.fixed_digital.gain_db = fixed_gain_db; EXPECT_TRUE(GainController2::Validate(config)); - return std::make_unique(config, sample_rate_hz, - /*num_channels=*/1, - /*use_internal_vad=*/true); + return std::make_unique( + config, InputVolumeControllerConfig{}, sample_rate_hz, + /*num_channels=*/1, + /*use_internal_vad=*/true); } +constexpr InputVolumeControllerConfig kTestInputVolumeControllerConfig{ + .clipped_level_min = 20, + .clipped_level_step = 30, + .clipped_ratio_threshold = 0.4, + .clipped_wait_frames = 50, + .enable_clipping_predictor = true, + .target_range_max_dbfs = -6, + .target_range_min_dbfs = -70, + .update_input_volume_wait_frames = 100, + .speech_probability_threshold = 0.9, + .speech_ratio_threshold = 1, +}; + } // namespace TEST(GainController2, CheckDefaultConfig) { @@ -159,23 +173,56 @@ TEST(GainController2, Agc2Config config; config.input_volume_controller.enabled = false; - auto gain_controller = - std::make_unique(config, kSampleRateHz, kNumChannels, - /*use_internal_vad=*/true); - EXPECT_FALSE(gain_controller->GetRecommendedInputVolume().has_value()); + auto gain_controller = std::make_unique( + config, InputVolumeControllerConfig{}, kSampleRateHz, kNumChannels, + /*use_internal_vad=*/true); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); // Run AGC for a signal with no clipping or detected speech. RunAgc2WithConstantInput(*gain_controller, kLowInputLevel, kNumFrames, kSampleRateHz, kNumChannels, kInitialInputVolume); - EXPECT_FALSE(gain_controller->GetRecommendedInputVolume().has_value()); + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); // Run AGC for a signal with clipping. RunAgc2WithConstantInput(*gain_controller, kHighInputLevel, kNumFrames, kSampleRateHz, kNumChannels, kInitialInputVolume); - EXPECT_FALSE(gain_controller->GetRecommendedInputVolume().has_value()); + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); +} + +TEST( + GainController2, + CheckGetRecommendedInputVolumeWhenInputVolumeControllerNotEnabledAndSpecificConfigUsed) { + constexpr float kHighInputLevel = 32767.0f; + constexpr float kLowInputLevel = 1000.0f; + constexpr int kInitialInputVolume = 100; + constexpr int kNumChannels = 2; + constexpr int kNumFrames = 5; + constexpr int kSampleRateHz = 16000; + + Agc2Config config; + config.input_volume_controller.enabled = false; + + auto gain_controller = std::make_unique( + config, kTestInputVolumeControllerConfig, kSampleRateHz, kNumChannels, + /*use_internal_vad=*/true); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); + + // Run AGC for a signal with no clipping or detected speech. + RunAgc2WithConstantInput(*gain_controller, kLowInputLevel, kNumFrames, + kSampleRateHz, kNumChannels, kInitialInputVolume); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); + + // Run AGC for a signal with clipping. + RunAgc2WithConstantInput(*gain_controller, kHighInputLevel, kNumFrames, + kSampleRateHz, kNumChannels, kInitialInputVolume); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); } TEST(GainController2, @@ -189,29 +236,65 @@ TEST(GainController2, Agc2Config config; config.input_volume_controller.enabled = true; - auto gain_controller = - std::make_unique(config, kSampleRateHz, kNumChannels, - /*use_internal_vad=*/true); + config.adaptive_digital.enabled = true; - EXPECT_TRUE(gain_controller->GetRecommendedInputVolume().has_value()); + auto gain_controller = std::make_unique( + config, InputVolumeControllerConfig{}, kSampleRateHz, kNumChannels, + /*use_internal_vad=*/true); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); // Run AGC for a signal with no clipping or detected speech. RunAgc2WithConstantInput(*gain_controller, kLowInputLevel, kNumFrames, kSampleRateHz, kNumChannels, kInitialInputVolume); - EXPECT_TRUE(gain_controller->GetRecommendedInputVolume().has_value()); + EXPECT_TRUE(gain_controller->recommended_input_volume().has_value()); // Run AGC for a signal with clipping. RunAgc2WithConstantInput(*gain_controller, kHighInputLevel, kNumFrames, kSampleRateHz, kNumChannels, kInitialInputVolume); - EXPECT_TRUE(gain_controller->GetRecommendedInputVolume().has_value()); + EXPECT_TRUE(gain_controller->recommended_input_volume().has_value()); +} + +TEST( + GainController2, + CheckGetRecommendedInputVolumeWhenInputVolumeControllerEnabledAndSpecificConfigUsed) { + constexpr float kHighInputLevel = 32767.0f; + constexpr float kLowInputLevel = 1000.0f; + constexpr int kInitialInputVolume = 100; + constexpr int kNumChannels = 2; + constexpr int kNumFrames = 5; + constexpr int kSampleRateHz = 16000; + + Agc2Config config; + config.input_volume_controller.enabled = true; + config.adaptive_digital.enabled = true; + + auto gain_controller = std::make_unique( + config, kTestInputVolumeControllerConfig, kSampleRateHz, kNumChannels, + /*use_internal_vad=*/true); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); + + // Run AGC for a signal with no clipping or detected speech. + RunAgc2WithConstantInput(*gain_controller, kLowInputLevel, kNumFrames, + kSampleRateHz, kNumChannels, kInitialInputVolume); + + EXPECT_TRUE(gain_controller->recommended_input_volume().has_value()); + + // Run AGC for a signal with clipping. + RunAgc2WithConstantInput(*gain_controller, kHighInputLevel, kNumFrames, + kSampleRateHz, kNumChannels, kInitialInputVolume); + + EXPECT_TRUE(gain_controller->recommended_input_volume().has_value()); } // Checks that the default config is applied. TEST(GainController2, ApplyDefaultConfig) { auto gain_controller2 = std::make_unique( - Agc2Config{}, /*sample_rate_hz=*/16000, /*num_channels=*/2, + Agc2Config{}, InputVolumeControllerConfig{}, + /*sample_rate_hz=*/16000, /*num_channels=*/2, /*use_internal_vad=*/true); EXPECT_TRUE(gain_controller2.get()); } @@ -327,7 +410,8 @@ TEST(GainController2, CheckFinalGainWithAdaptiveDigitalController) { Agc2Config config; config.fixed_digital.gain_db = 0.0f; config.adaptive_digital.enabled = true; - GainController2 agc2(config, kSampleRateHz, kStereo, + GainController2 agc2(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/true); test::InputAudioFile input_file( @@ -382,9 +466,11 @@ TEST(GainController2, Agc2Config config; config.fixed_digital.gain_db = 0.0f; config.adaptive_digital.enabled = true; - GainController2 agc2(config, kSampleRateHz, kStereo, + GainController2 agc2(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/true); - GainController2 agc2_reference(config, kSampleRateHz, kStereo, + GainController2 agc2_reference(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/true); test::InputAudioFile input_file( @@ -449,9 +535,11 @@ TEST(GainController2, Agc2Config config; config.fixed_digital.gain_db = 0.0f; config.adaptive_digital.enabled = true; - GainController2 agc2(config, kSampleRateHz, kStereo, + GainController2 agc2(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/false); - GainController2 agc2_reference(config, kSampleRateHz, kStereo, + GainController2 agc2_reference(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/true); test::InputAudioFile input_file( @@ -518,12 +606,13 @@ TEST(GainController2, Agc2Config config; config.fixed_digital.gain_db = 0.0f; config.adaptive_digital.enabled = true; - GainController2 agc2(config, kSampleRateHz, kStereo, + GainController2 agc2(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/false); - GainController2 agc2_reference(config, kSampleRateHz, kStereo, + GainController2 agc2_reference(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/true); - VoiceActivityDetectorWrapper vad(config.adaptive_digital.vad_reset_period_ms, - GetAvailableCpuFeatures(), kSampleRateHz); + VoiceActivityDetectorWrapper vad(GetAvailableCpuFeatures(), kSampleRateHz); test::InputAudioFile input_file( test::GetApmCaptureTestVectorFileName(kSampleRateHz), /*loop_at_end=*/true); diff --git a/third_party/libwebrtc/modules/audio_processing/include/audio_processing.cc b/third_party/libwebrtc/modules/audio_processing/include/audio_processing.cc index 83917c25c55f..13ddcc588ae4 100644 --- a/third_party/libwebrtc/modules/audio_processing/include/audio_processing.cc +++ b/third_party/libwebrtc/modules/audio_processing/include/audio_processing.cc @@ -87,12 +87,9 @@ bool Agc1Config::operator==(const Agc1Config& rhs) const { bool Agc2Config::AdaptiveDigital::operator==( const Agc2Config::AdaptiveDigital& rhs) const { - return enabled == rhs.enabled && dry_run == rhs.dry_run && - headroom_db == rhs.headroom_db && max_gain_db == rhs.max_gain_db && + return enabled == rhs.enabled && headroom_db == rhs.headroom_db && + max_gain_db == rhs.max_gain_db && initial_gain_db == rhs.initial_gain_db && - vad_reset_period_ms == rhs.vad_reset_period_ms && - adjacent_speech_frames_threshold == - rhs.adjacent_speech_frames_threshold && max_gain_change_db_per_second == rhs.max_gain_change_db_per_second && max_output_noise_level_dbfs == rhs.max_output_noise_level_dbfs; } @@ -197,15 +194,10 @@ std::string AudioProcessing::Config::ToString() const { << gain_controller2.fixed_digital.gain_db << " }, adaptive_digital: { enabled: " << gain_controller2.adaptive_digital.enabled - << ", dry_run: " << gain_controller2.adaptive_digital.dry_run << ", headroom_db: " << gain_controller2.adaptive_digital.headroom_db << ", max_gain_db: " << gain_controller2.adaptive_digital.max_gain_db << ", initial_gain_db: " << gain_controller2.adaptive_digital.initial_gain_db - << ", vad_reset_period_ms: " - << gain_controller2.adaptive_digital.vad_reset_period_ms - << ", adjacent_speech_frames_threshold: " - << gain_controller2.adaptive_digital.adjacent_speech_frames_threshold << ", max_gain_change_db_per_second: " << gain_controller2.adaptive_digital.max_gain_change_db_per_second << ", max_output_noise_level_dbfs: " diff --git a/third_party/libwebrtc/modules/audio_processing/include/audio_processing.h b/third_party/libwebrtc/modules/audio_processing/include/audio_processing.h index ae20d9cd6228..d9f631ee1c46 100644 --- a/third_party/libwebrtc/modules/audio_processing/include/audio_processing.h +++ b/third_party/libwebrtc/modules/audio_processing/include/audio_processing.h @@ -81,11 +81,12 @@ class CustomProcessing; // setter. // // APM accepts only linear PCM audio data in chunks of ~10 ms (see -// AudioProcessing::GetFrameSize() for details). The int16 interfaces use -// interleaved data, while the float interfaces use deinterleaved data. +// AudioProcessing::GetFrameSize() for details) and sample rates ranging from +// 8000 Hz to 384000 Hz. The int16 interfaces use interleaved data, while the +// float interfaces use deinterleaved data. // // Usage example, omitting error checking: -// AudioProcessing* apm = AudioProcessingBuilder().Create(); +// rtc::scoped_refptr apm = AudioProcessingBuilder().Create(); // // AudioProcessing::Config config; // config.echo_canceller.enabled = true; @@ -103,9 +104,6 @@ class CustomProcessing; // // apm->ApplyConfig(config) // -// apm->noise_reduction()->set_level(kHighSuppression); -// apm->noise_reduction()->Enable(true); -// // // Start a voice call... // // // ... Render frame arrives bound for the audio HAL ... @@ -127,7 +125,7 @@ class CustomProcessing; // apm->Initialize(); // // // Close the application... -// delete apm; +// apm.reset(); // class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { public: @@ -148,6 +146,12 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { struct RTC_EXPORT Config { // Sets the properties of the audio processing pipeline. struct RTC_EXPORT Pipeline { + // Ways to downmix a multi-channel track to mono. + enum class DownmixMethod { + kAverageChannels, // Average across channels. + kUseFirstChannel // Use the first channel. + }; + // Maximum allowed processing rate used internally. May only be set to // 32000 or 48000 and any differing values will be treated as 48000. int maximum_internal_processing_rate = 48000; @@ -156,6 +160,9 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // Allow multi-channel processing of capture audio when AEC3 is active // or a custom AEC is injected.. bool multi_channel_capture = false; + // Indicates how to downmix multi-channel capture audio to mono (when + // needed). + DownmixMethod capture_downmix_method = DownmixMethod::kAverageChannels; } pipeline; // Enabled the pre-amplifier. It amplifies the capture signal @@ -321,44 +328,24 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { } analog_gain_controller; } gain_controller1; - // Enables the next generation AGC functionality. This feature replaces the - // standard methods of gain control in the previous AGC. Enabling this - // submodule enables an adaptive digital AGC followed by a limiter. By - // setting `fixed_gain_db`, the limiter can be turned into a compressor that - // first applies a fixed gain. The adaptive digital AGC can be turned off by - // setting |adaptive_digital_mode=false|. + // Parameters for AGC2, an Automatic Gain Control (AGC) sub-module which + // replaces the AGC sub-module parametrized by `gain_controller1`. + // AGC2 brings the captured audio signal to the desired level by combining + // three different controllers (namely, input volume controller, adapative + // digital controller and fixed digital controller) and a limiter. + // TODO(bugs.webrtc.org:7494): Name `GainController` when AGC1 removed. struct RTC_EXPORT GainController2 { bool operator==(const GainController2& rhs) const; bool operator!=(const GainController2& rhs) const { return !(*this == rhs); } + // AGC2 must be created if and only if `enabled` is true. bool enabled = false; - struct FixedDigital { - float gain_db = 0.0f; - } fixed_digital; - struct RTC_EXPORT AdaptiveDigital { - bool operator==(const AdaptiveDigital& rhs) const; - bool operator!=(const AdaptiveDigital& rhs) const { - return !(*this == rhs); - } - bool enabled = false; - // When true, the adaptive digital controller runs but the signal is not - // modified. - bool dry_run = false; - float headroom_db = 6.0f; - // TODO(bugs.webrtc.org/7494): Consider removing and inferring from - // `max_output_noise_level_dbfs`. - float max_gain_db = 30.0f; - float initial_gain_db = 8.0f; - int vad_reset_period_ms = 1500; - int adjacent_speech_frames_threshold = 12; - float max_gain_change_db_per_second = 3.0f; - float max_output_noise_level_dbfs = -50.0f; - } adaptive_digital; - - // Enables input volume control in AGC2. + // Parameters for the input volume controller, which adjusts the input + // volume applied when the audio is captured (e.g., microphone volume on + // a soundcard, input volume on HAL). struct InputVolumeController { bool operator==(const InputVolumeController& rhs) const; bool operator!=(const InputVolumeController& rhs) const { @@ -366,6 +353,31 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { } bool enabled = false; } input_volume_controller; + + // Parameters for the adaptive digital controller, which adjusts and + // applies a digital gain after echo cancellation and after noise + // suppression. + struct RTC_EXPORT AdaptiveDigital { + bool operator==(const AdaptiveDigital& rhs) const; + bool operator!=(const AdaptiveDigital& rhs) const { + return !(*this == rhs); + } + bool enabled = false; + float headroom_db = 6.0f; + float max_gain_db = 30.0f; + float initial_gain_db = 8.0f; + float max_gain_change_db_per_second = 3.0f; + float max_output_noise_level_dbfs = -50.0f; + } adaptive_digital; + + // Parameters for the fixed digital controller, which applies a fixed + // digital gain after the adaptive digital controller and before the + // limiter. + struct FixedDigital { + // By setting `gain_db` to a value greater than zero, the limiter can be + // turned into a compressor that first applies a fixed gain. + float gain_db = 0.0f; + } fixed_digital; } gain_controller2; std::string ToString() const; diff --git a/third_party/libwebrtc/modules/congestion_controller/BUILD.gn b/third_party/libwebrtc/modules/congestion_controller/BUILD.gn index 774fc84d67d5..13b0d539b846 100644 --- a/third_party/libwebrtc/modules/congestion_controller/BUILD.gn +++ b/third_party/libwebrtc/modules/congestion_controller/BUILD.gn @@ -27,7 +27,6 @@ rtc_library("congestion_controller") { ] deps = [ - "../../api/transport:field_trial_based_config", "../../api/transport:network_control", "../../api/units:data_rate", "../../api/units:time_delta", diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h b/third_party/libwebrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h index 21dff35735fc..e91a1dff54a9 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h @@ -82,6 +82,7 @@ class DelayBasedBwe { DataRate TriggerOveruse(Timestamp at_time, absl::optional link_capacity); DataRate last_estimate() const { return prev_bitrate_; } + BandwidthUsage last_state() const { return prev_state_; } private: friend class GoogCcStatePrinter; diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc b/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc index 7cfa9d912660..3a9de8c4dccf 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc @@ -22,6 +22,7 @@ #include #include "absl/strings/match.h" +#include "api/network_state_predictor.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -60,6 +61,26 @@ bool IsEnabled(const FieldTrialsView* config, absl::string_view key) { bool IsNotDisabled(const FieldTrialsView* config, absl::string_view key) { return !absl::StartsWith(config->Lookup(key), "Disabled"); } + +BandwidthLimitedCause GetBandwidthLimitedCause( + LossBasedState loss_based_state, + BandwidthUsage bandwidth_usage, + bool not_probe_if_delay_increased) { + if (not_probe_if_delay_increased && + (bandwidth_usage == BandwidthUsage::kBwOverusing || + bandwidth_usage == BandwidthUsage::kBwUnderusing)) { + return BandwidthLimitedCause::kDelayBasedLimitedDelayIncreased; + } + switch (loss_based_state) { + case LossBasedState::kDecreasing: + return BandwidthLimitedCause::kLossLimitedBweDecreasing; + case LossBasedState::kIncreasing: + return BandwidthLimitedCause::kLossLimitedBweIncreasing; + default: + return BandwidthLimitedCause::kDelayBasedLimited; + } +} + } // namespace GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, @@ -80,13 +101,9 @@ GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, "WebRTC-Bwe-LimitProbesLowerThanThroughputEstimate")), rate_control_settings_( RateControlSettings::ParseFromKeyValueConfig(key_value_config_)), - loss_based_stable_rate_( - IsEnabled(key_value_config_, "WebRTC-Bwe-LossBasedStableRate")), pace_at_max_of_bwe_and_lower_link_capacity_( IsEnabled(key_value_config_, "WebRTC-Bwe-PaceAtMaxOfBweAndLowerLinkCapacity")), - pace_at_loss_based_bwe_when_loss_( - IsEnabled(key_value_config_, "WebRTC-Bwe-PaceAtLossBaseBweWhenLoss")), probe_controller_( new ProbeController(key_value_config_, config.event_log)), congestion_window_pushback_controller_( @@ -543,7 +560,8 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( result.target_bitrate); } bandwidth_estimation_->UpdateLossBasedEstimator( - report, result.delay_detector_state, probe_bitrate); + report, result.delay_detector_state, probe_bitrate, + estimate_ ? estimate_->link_capacity_upper : DataRate::PlusInfinity()); if (result.updated) { // Update the estimate in the ProbeController, in case we want to probe. MaybeTriggerOnNetworkChanged(&update, report.feedback_time); @@ -631,11 +649,7 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( } DataRate stable_target_rate = bandwidth_estimation_->GetEstimatedLinkCapacity(); - if (loss_based_stable_rate_) { - stable_target_rate = std::min(stable_target_rate, loss_based_target_rate); - } else { - stable_target_rate = std::min(stable_target_rate, pushback_target_rate); - } + stable_target_rate = std::min(stable_target_rate, pushback_target_rate); if ((loss_based_target_rate != last_loss_based_target_rate_) || (fraction_loss != last_estimated_fraction_loss_) || @@ -670,9 +684,10 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( auto probes = probe_controller_->SetEstimatedBitrate( loss_based_target_rate, - /*bwe_limited_due_to_packet_loss=*/ - bandwidth_estimation_->loss_based_state() != - LossBasedState::kDelayBasedEstimate, + GetBandwidthLimitedCause( + bandwidth_estimation_->loss_based_state(), + delay_based_bwe_->last_state(), + probe_controller_->DontProbeIfDelayIncreased()), at_time); update->probe_cluster_configs.insert(update->probe_cluster_configs.end(), probes.begin(), probes.end()); @@ -687,10 +702,7 @@ PacerConfig GoogCcNetworkController::GetPacingRates(Timestamp at_time) const { // Pacing rate is based on target rate before congestion window pushback, // because we don't want to build queues in the pacer when pushback occurs. DataRate pacing_rate = DataRate::Zero(); - if ((pace_at_max_of_bwe_and_lower_link_capacity_ || - (pace_at_loss_based_bwe_when_loss_ && - last_loss_based_target_rate_ >= delay_based_bwe_->last_estimate())) && - estimate_) { + if (pace_at_max_of_bwe_and_lower_link_capacity_ && estimate_) { pacing_rate = std::max({min_total_allocated_bitrate_, estimate_->link_capacity_lower, last_loss_based_target_rate_}) * diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h b/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h index 1b9f96273284..37a064e37cef 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h @@ -93,9 +93,7 @@ class GoogCcNetworkController : public NetworkControllerInterface { const bool ignore_probes_lower_than_network_estimate_; const bool limit_probes_lower_than_throughput_estimate_; const RateControlSettings rate_control_settings_; - const bool loss_based_stable_rate_; const bool pace_at_max_of_bwe_and_lower_link_capacity_; - const bool pace_at_loss_based_bwe_when_loss_; const std::unique_ptr probe_controller_; const std::unique_ptr diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc b/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc index 44054f10db2a..7e051f505b76 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc @@ -140,7 +140,6 @@ absl::optional PacketTransmissionAndFeedbackBlock( // Scenarios: void UpdatesTargetRateBasedOnLinkCapacity(absl::string_view test_name = "") { - ScopedFieldTrials trial("WebRTC-SendSideBwe-WithOverhead/Enabled/"); auto factory = CreateFeedbackOnlyFactory(); Scenario s("googcc_unit/target_capacity" + std::string(test_name), false); CallClientConfig config; @@ -665,55 +664,6 @@ DataRate AverageBitrateAfterCrossInducedLoss(absl::string_view name) { s.TimeSinceStart(); } -TEST(GoogCcScenario, LossBasedRecoversFasterAfterCrossInducedLoss) { - // This test acts as a reference for the test below, showing that without the - // trial, we have worse behavior. - DataRate average_bitrate_without_loss_based = - AverageBitrateAfterCrossInducedLoss("googcc_unit/no_cross_loss_based"); - - // We recover bitrate better when subject to loss spikes from cross traffic - // when loss based controller is used. - ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/"); - DataRate average_bitrate_with_loss_based = - AverageBitrateAfterCrossInducedLoss("googcc_unit/cross_loss_based"); - - EXPECT_GT(average_bitrate_with_loss_based, - average_bitrate_without_loss_based); -} - -TEST(GoogCcScenario, LossBasedEstimatorCapsRateAtModerateLoss) { - ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/"); - Scenario s("googcc_unit/moderate_loss_channel", false); - CallClientConfig config; - config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); - config.transport.rates.max_rate = DataRate::KilobitsPerSec(5000); - config.transport.rates.start_rate = DataRate::KilobitsPerSec(1000); - - NetworkSimulationConfig network; - network.bandwidth = DataRate::KilobitsPerSec(2000); - network.delay = TimeDelta::Millis(100); - // 3% loss rate is in the moderate loss rate region at 2000 kbps, limiting the - // bitrate increase. - network.loss_rate = 0.03; - auto send_net = s.CreateMutableSimulationNode(network); - auto* client = s.CreateClient("send", std::move(config)); - auto* route = s.CreateRoutes(client, {send_net->node()}, - s.CreateClient("return", CallClientConfig()), - {s.CreateSimulationNode(network)}); - s.CreateVideoStream(route->forward(), VideoStreamConfig()); - // Allow the controller to stabilize at the lower bitrate. - s.RunFor(TimeDelta::Seconds(1)); - // This increase in capacity would cause the target bitrate to increase to - // over 4000 kbps without LossBasedControl. - send_net->UpdateConfig([](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::KilobitsPerSec(5000); - }); - s.RunFor(TimeDelta::Seconds(20)); - // Using LossBasedControl, the bitrate will not increase over 2500 kbps since - // we have detected moderate loss. - EXPECT_LT(client->target_rate().kbps(), 2500); -} - TEST(GoogCcScenario, MaintainsLowRateInSafeResetTrial) { const DataRate kLinkCapacity = DataRate::KilobitsPerSec(200); const DataRate kStartRate = DataRate::KilobitsPerSec(300); @@ -769,9 +719,7 @@ TEST(GoogCcScenario, CutsHighRateInSafeResetTrial) { } TEST(GoogCcScenario, DetectsHighRateInSafeResetTrial) { - ScopedFieldTrials trial( - "WebRTC-Bwe-SafeResetOnRouteChange/Enabled,ack/" - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); + ScopedFieldTrials trial("WebRTC-Bwe-SafeResetOnRouteChange/Enabled,ack/"); const DataRate kInitialLinkCapacity = DataRate::KilobitsPerSec(200); const DataRate kNewLinkCapacity = DataRate::KilobitsPerSec(800); const DataRate kStartRate = DataRate::KilobitsPerSec(300); @@ -947,11 +895,39 @@ TEST(GoogCcScenario, FastRampupOnRembCapLifted) { EXPECT_GT(final_estimate.kbps(), 1500); } -TEST(GoogCcScenario, SlowRampupOnRembCapLiftedWithFieldTrial) { - ScopedFieldTrials trial("WebRTC-Bwe-ReceiverLimitCapsOnly/Disabled/"); - DataRate final_estimate = - RunRembDipScenario("googcc_unit/legacy_slow_rampup_on_remb_cap_lifted"); - EXPECT_LT(final_estimate.kbps(), 1000); +TEST(GoogCcScenario, FallbackToLossBasedBweWithoutPacketFeedback) { + const DataRate kLinkCapacity = DataRate::KilobitsPerSec(1000); + const DataRate kStartRate = DataRate::KilobitsPerSec(1000); + + Scenario s("googcc_unit/high_loss_channel", false); + auto* net = s.CreateMutableSimulationNode([&](NetworkSimulationConfig* c) { + c->bandwidth = kLinkCapacity; + c->delay = TimeDelta::Millis(100); + }); + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + c->transport.rates.start_rate = kStartRate; + }); + auto* route = s.CreateRoutes( + client, {net->node()}, s.CreateClient("return", CallClientConfig()), + {s.CreateSimulationNode(NetworkSimulationConfig())}); + + // Create a config without packet feedback. + VideoStreamConfig video_config; + video_config.stream.packet_feedback = false; + s.CreateVideoStream(route->forward(), video_config); + + s.RunFor(TimeDelta::Seconds(20)); + // Bandwith does not backoff because network is normal. + EXPECT_GE(client->target_rate().kbps(), 500); + + // Update the network to create high loss ratio + net->UpdateConfig([](NetworkSimulationConfig* c) { + c->loss_rate = 0.15; + }); + s.RunFor(TimeDelta::Seconds(20)); + + // Bandwidth decreases thanks to loss based bwe v0. + EXPECT_LE(client->target_rate().kbps(), 300); } } // namespace test diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc index 80292190d40c..b6efdeee9e1d 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc @@ -219,8 +219,10 @@ void LossBasedBweV2::UpdateBandwidthEstimate( rtc::ArrayView packet_results, DataRate delay_based_estimate, BandwidthUsage delay_detector_state, - absl::optional probe_bitrate) { + absl::optional probe_bitrate, + DataRate upper_link_capacity) { delay_based_estimate_ = delay_based_estimate; + upper_link_capacity_ = upper_link_capacity; if (!IsEnabled()) { RTC_LOG(LS_WARNING) << "The estimator must be enabled before it can be used."; @@ -307,13 +309,12 @@ void LossBasedBweV2::UpdateBandwidthEstimate( } } - if (IsEstimateIncreasingWhenLossLimited(best_candidate)) { + if (IsEstimateIncreasingWhenLossLimited(best_candidate) && + best_candidate.loss_limited_bandwidth < delay_based_estimate) { current_state_ = LossBasedState::kIncreasing; - } else if (IsValid(delay_based_estimate_) && - best_candidate.loss_limited_bandwidth < delay_based_estimate_) { + } else if (best_candidate.loss_limited_bandwidth < delay_based_estimate_) { current_state_ = LossBasedState::kDecreasing; - } else if (IsValid(delay_based_estimate_) && - best_candidate.loss_limited_bandwidth == delay_based_estimate_) { + } else if (best_candidate.loss_limited_bandwidth >= delay_based_estimate_) { current_state_ = LossBasedState::kDelayBasedEstimate; } current_estimate_ = best_candidate; @@ -344,64 +345,64 @@ bool LossBasedBweV2::IsEstimateIncreasingWhenLossLimited( // configuration for the `LossBasedBweV2` which is explicitly enabled. absl::optional LossBasedBweV2::CreateConfig( const FieldTrialsView* key_value_config) { - FieldTrialParameter enabled("Enabled", false); + FieldTrialParameter enabled("Enabled", true); FieldTrialParameter bandwidth_rampup_upper_bound_factor( - "BwRampupUpperBoundFactor", 1.1); + "BwRampupUpperBoundFactor", 1000000.0); FieldTrialParameter rampup_acceleration_max_factor( "BwRampupAccelMaxFactor", 0.0); FieldTrialParameter rampup_acceleration_maxout_time( "BwRampupAccelMaxoutTime", TimeDelta::Seconds(60)); FieldTrialList candidate_factors("CandidateFactors", - {1.05, 1.0, 0.95}); + {1.02, 1.0, 0.95}); FieldTrialParameter higher_bandwidth_bias_factor("HigherBwBiasFactor", - 0.00001); + 0.0002); FieldTrialParameter higher_log_bandwidth_bias_factor( - "HigherLogBwBiasFactor", 0.001); + "HigherLogBwBiasFactor", 0.02); FieldTrialParameter inherent_loss_lower_bound( "InherentLossLowerBound", 1.0e-3); FieldTrialParameter loss_threshold_of_high_bandwidth_preference( - "LossThresholdOfHighBandwidthPreference", 0.99); + "LossThresholdOfHighBandwidthPreference", 0.15); FieldTrialParameter bandwidth_preference_smoothing_factor( "BandwidthPreferenceSmoothingFactor", 0.002); FieldTrialParameter inherent_loss_upper_bound_bandwidth_balance( - "InherentLossUpperBoundBwBalance", DataRate::KilobitsPerSec(15.0)); + "InherentLossUpperBoundBwBalance", DataRate::KilobitsPerSec(75.0)); FieldTrialParameter inherent_loss_upper_bound_offset( "InherentLossUpperBoundOffset", 0.05); FieldTrialParameter initial_inherent_loss_estimate( "InitialInherentLossEstimate", 0.01); FieldTrialParameter newton_iterations("NewtonIterations", 1); - FieldTrialParameter newton_step_size("NewtonStepSize", 0.5); + FieldTrialParameter newton_step_size("NewtonStepSize", 0.75); FieldTrialParameter append_acknowledged_rate_candidate( "AckedRateCandidate", true); FieldTrialParameter append_delay_based_estimate_candidate( - "DelayBasedCandidate", false); + "DelayBasedCandidate", true); FieldTrialParameter observation_duration_lower_bound( - "ObservationDurationLowerBound", TimeDelta::Seconds(1)); + "ObservationDurationLowerBound", TimeDelta::Millis(250)); FieldTrialParameter observation_window_size("ObservationWindowSize", 20); FieldTrialParameter sending_rate_smoothing_factor( "SendingRateSmoothingFactor", 0.0); FieldTrialParameter instant_upper_bound_temporal_weight_factor( - "InstantUpperBoundTemporalWeightFactor", 0.99); + "InstantUpperBoundTemporalWeightFactor", 0.9); FieldTrialParameter instant_upper_bound_bandwidth_balance( - "InstantUpperBoundBwBalance", DataRate::KilobitsPerSec(15.0)); + "InstantUpperBoundBwBalance", DataRate::KilobitsPerSec(75.0)); FieldTrialParameter instant_upper_bound_loss_offset( "InstantUpperBoundLossOffset", 0.05); FieldTrialParameter temporal_weight_factor("TemporalWeightFactor", - 0.99); + 0.9); FieldTrialParameter bandwidth_backoff_lower_bound_factor( "BwBackoffLowerBoundFactor", 1.0); FieldTrialParameter trendline_integration_enabled( "TrendlineIntegrationEnabled", false); FieldTrialParameter trendline_observations_window_size( "TrendlineObservationsWindowSize", 20); - FieldTrialParameter max_increase_factor("MaxIncreaseFactor", 1000.0); + FieldTrialParameter max_increase_factor("MaxIncreaseFactor", 1.3); FieldTrialParameter delayed_increase_window( "DelayedIncreaseWindow", TimeDelta::Millis(300)); FieldTrialParameter use_acked_bitrate_only_when_overusing( "UseAckedBitrateOnlyWhenOverusing", false); FieldTrialParameter not_increase_if_inherent_loss_less_than_average_loss( - "NotIncreaseIfInherentLossLessThanAverageLoss", false); + "NotIncreaseIfInherentLossLessThanAverageLoss", true); FieldTrialParameter high_loss_rate_threshold("HighLossRateThreshold", 1.0); FieldTrialParameter bandwidth_cap_at_high_loss_rate( @@ -410,6 +411,8 @@ absl::optional LossBasedBweV2::CreateConfig( "SlopeOfBweHighLossFunc", 1000); FieldTrialParameter probe_integration_enabled("ProbeIntegrationEnabled", false); + FieldTrialParameter bound_by_upper_link_capacity_when_loss_limited( + "BoundByUpperLinkCapacityWhenLossLimited", true); if (key_value_config) { ParseFieldTrial({&enabled, &bandwidth_rampup_upper_bound_factor, @@ -445,7 +448,8 @@ absl::optional LossBasedBweV2::CreateConfig( &probe_integration_enabled, &high_loss_rate_threshold, &bandwidth_cap_at_high_loss_rate, - &slope_of_bwe_high_loss_func}, + &slope_of_bwe_high_loss_func, + &bound_by_upper_link_capacity_when_loss_limited}, key_value_config->Lookup("WebRTC-Bwe-LossBasedBweV2")); } @@ -506,6 +510,8 @@ absl::optional LossBasedBweV2::CreateConfig( bandwidth_cap_at_high_loss_rate.Get(); config->slope_of_bwe_high_loss_func = slope_of_bwe_high_loss_func.Get(); config->probe_integration_enabled = probe_integration_enabled.Get(); + config->bound_by_upper_link_capacity_when_loss_limited = + bound_by_upper_link_capacity_when_loss_limited.Get(); return config; } @@ -946,6 +952,12 @@ void LossBasedBweV2::CalculateInstantUpperBound() { } } + if (IsBandwidthLimitedDueToLoss()) { + if (IsValid(upper_link_capacity_) && + config_->bound_by_upper_link_capacity_when_loss_limited) { + instant_limit = std::min(instant_limit, upper_link_capacity_); + } + } cached_instant_upper_bound_ = instant_limit; } diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h index 2318fbc77262..9ff9cb74c640 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h @@ -66,7 +66,8 @@ class LossBasedBweV2 { rtc::ArrayView packet_results, DataRate delay_based_estimate, BandwidthUsage delay_detector_state, - absl::optional probe_bitrate); + absl::optional probe_bitrate, + DataRate upper_link_capacity); private: struct ChannelParameters { @@ -110,6 +111,7 @@ class LossBasedBweV2 { DataRate bandwidth_cap_at_high_loss_rate = DataRate::MinusInfinity(); double slope_of_bwe_high_loss_func = 1000.0; bool probe_integration_enabled = false; + bool bound_by_upper_link_capacity_when_loss_limited = false; }; struct Derivatives { @@ -193,6 +195,7 @@ class LossBasedBweV2 { LossBasedState current_state_ = LossBasedState::kDelayBasedEstimate; DataRate probe_bitrate_ = DataRate::PlusInfinity(); DataRate delay_based_estimate_ = DataRate::PlusInfinity(); + DataRate upper_link_capacity_ = DataRate::PlusInfinity(); }; } // namespace webrtc diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc index e90a50765a47..c303c29d682a 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc @@ -205,7 +205,8 @@ TEST_P(LossBasedBweV2Test, ReturnsDelayBasedEstimateWhenDisabled) { /*packet_results=*/{}, /*delay_based_estimate=*/DataRate::KilobitsPerSec(100), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(100)); @@ -221,7 +222,8 @@ TEST_P(LossBasedBweV2Test, /*packet_results=*/{}, /*delay_based_estimate=*/DataRate::KilobitsPerSec(100), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(100)); @@ -241,8 +243,10 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_TRUE(loss_based_bandwidth_estimator.IsReady()); EXPECT_TRUE(loss_based_bandwidth_estimator.GetLossBasedResult() @@ -259,8 +263,10 @@ TEST_P(LossBasedBweV2Test, NoBandwidthEstimateGivenNoInitialization) { LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_FALSE(loss_based_bandwidth_estimator.IsReady()); EXPECT_TRUE(loss_based_bandwidth_estimator.GetLossBasedResult() @@ -294,8 +300,10 @@ TEST_P(LossBasedBweV2Test, NoBandwidthEstimateGivenNotEnoughFeedback) { .bandwidth_estimate.IsPlusInfinity()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - not_enough_feedback, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + not_enough_feedback, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_FALSE(loss_based_bandwidth_estimator.IsReady()); EXPECT_TRUE(loss_based_bandwidth_estimator.GetLossBasedResult() @@ -320,8 +328,10 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_NE( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, @@ -335,8 +345,10 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_NE( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, @@ -364,11 +376,15 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator_2.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator_1.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); loss_based_bandwidth_estimator_2.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_EQ( loss_based_bandwidth_estimator_1.GetLossBasedResult().bandwidth_estimate, @@ -382,11 +398,15 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(660)); loss_based_bandwidth_estimator_1.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); loss_based_bandwidth_estimator_2.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_NE( loss_based_bandwidth_estimator_1.GetLossBasedResult().bandwidth_estimate, @@ -407,8 +427,10 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_no_received_packets, DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + enough_feedback_no_received_packets, + /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, @@ -436,14 +458,17 @@ TEST_P(LossBasedBweV2Test, BandwidthEstimateNotIncreaseWhenNetworkUnderusing) { loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), - BandwidthUsage::kBwUnderusing, /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwUnderusing, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_LE( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_LE( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(600)); @@ -470,8 +495,10 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // If the delay based estimate is infinity, then loss based estimate increases // and not bounded by delay based estimate. EXPECT_GT( @@ -480,7 +507,8 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, /*delay_based_estimate=*/DataRate::KilobitsPerSec(500), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // If the delay based estimate is not infinity, then loss based estimate is // bounded by delay based estimate. EXPECT_EQ( @@ -512,13 +540,17 @@ TEST_P(LossBasedBweV2Test, UseAckedBitrateForEmegencyBackOff) { loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_bitrate); // Update estimate when network is overusing, and 50% loss rate. loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwOverusing, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwOverusing, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Update estimate again when network is continuously overusing, and 100% // loss rate. loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwOverusing, - /*probe_estimate=*/absl::nullopt); + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwOverusing, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The estimate bitrate now is backed off based on acked bitrate. EXPECT_LE( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, @@ -541,15 +573,19 @@ TEST_P(LossBasedBweV2Test, NoBweChangeIfObservationDurationUnchanged) { DataRate::KilobitsPerSec(300)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); DataRate estimate_1 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; // Use the same feedback and check if the estimate is unchanged. loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); DataRate estimate_2 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; EXPECT_EQ(estimate_2, estimate_1); @@ -574,14 +610,18 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); DataRate estimate_1 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); DataRate estimate_2 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; EXPECT_EQ(estimate_2, estimate_1); @@ -606,14 +646,17 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); DataRate estimate_1 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), - BandwidthUsage::kBwUnderusing, /*probe_estimate=*/absl::nullopt); + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwUnderusing, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); DataRate estimate_2 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; EXPECT_LE(estimate_2, estimate_1); @@ -645,14 +688,18 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(300)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); DataRate estimate_1 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwOverusing, - /*probe_estimate=*/absl::nullopt); + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwOverusing, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); DataRate estimate_2 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; EXPECT_LT(estimate_2, estimate_1); @@ -677,13 +724,15 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, delay_based_estimate); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, delay_based_estimate); @@ -697,7 +746,7 @@ TEST_P(LossBasedBweV2Test, "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," "InstantUpperBoundBwBalance:10000kbps," "DelayBasedCandidate:true,MaxIncreaseFactor:1.5,BwRampupUpperBoundFactor:" - "2.0/"); + "2.0,NotIncreaseIfInherentLossLessThanAverageLoss:false/"); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); DataRate acked_rate = DataRate::KilobitsPerSec(300); @@ -711,7 +760,8 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); LossBasedBweV2::Result result_at_loss = loss_based_bandwidth_estimator.GetLossBasedResult(); @@ -724,7 +774,8 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); LossBasedBweV2::Result result_after_recovery = loss_based_bandwidth_estimator.GetLossBasedResult(); @@ -732,10 +783,115 @@ TEST_P(LossBasedBweV2Test, result_at_loss.bandwidth_estimate * 1.5); } +TEST_P(LossBasedBweV2Test, + LossBasedStateIsDelayBasedEstimateAfterNetworkRecovering) { + ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-LossBasedBweV2/" + "Enabled:true,CandidateFactors:100|1|0.5,AckedRateCandidate:true," + "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," + "InstantUpperBoundBwBalance:10000kbps," + "DelayBasedCandidate:true,MaxIncreaseFactor:100," + "BwRampupUpperBoundFactor:" + "2.0,NotIncreaseIfInherentLossLessThanAverageLoss:false/"); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + DataRate delay_based_estimate = DataRate::KilobitsPerSec(600); + DataRate acked_rate = DataRate::KilobitsPerSec(300); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + + // Create some loss to create the loss limited scenario. + std::vector enough_feedback_1 = + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + + // Network recovers after loss. + std::vector enough_feedback_2 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDelayBasedEstimate); + + // Network recovers continuing. + std::vector enough_feedback_3 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * 2); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_3, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDelayBasedEstimate); +} + +TEST_P(LossBasedBweV2Test, + LossBasedStateIsNotDelayBasedEstimateIfDelayBasedEsimtateInfinite) { + ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-LossBasedBweV2/" + "Enabled:true,CandidateFactors:100|1|0.5,AckedRateCandidate:true," + "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," + "InstantUpperBoundBwBalance:10000kbps," + "DelayBasedCandidate:true,MaxIncreaseFactor:100," + "BwRampupUpperBoundFactor:" + "2.0/"); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + DataRate delay_based_estimate = DataRate::PlusInfinity(); + DataRate acked_rate = DataRate::KilobitsPerSec(300); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + + // Create some loss to create the loss limited scenario. + std::vector enough_feedback_1 = + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + + // Network recovers after loss. + std::vector enough_feedback_2 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + EXPECT_NE(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDelayBasedEstimate); +} + // After loss based bwe backs off, the next estimate is capped by // a factor of acked bitrate. TEST_P(LossBasedBweV2Test, IncreaseByFactorOfAckedBitrateAfterLossBasedBweBacksOff) { + ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-LossBasedBweV2/" + "Enabled:true,LossThresholdOfHighBandwidthPreference:0.99," + "BwRampupUpperBoundFactor:1.2," + "InherentLossUpperBoundOffset:0.9,ObservationDurationLowerBound:200ms/"); std::vector enough_feedback_1 = CreatePacketResultsWith100pLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); @@ -743,9 +899,6 @@ TEST_P(LossBasedBweV2Test, CreatePacketResultsWith10pLossRate( /*first_packet_timestamp=*/Timestamp::Zero() + kObservationDurationLowerBound); - ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); @@ -755,7 +908,8 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(300)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Change the acked bitrate to make sure that the estimate is bounded by a // factor of acked bitrate. @@ -763,7 +917,8 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_bitrate); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The estimate is capped by acked_bitrate * BwRampupUpperBoundFactor. DataRate estimate_2 = @@ -798,14 +953,16 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(300)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Increase the acknowledged bitrate to make sure that the estimate is not // capped too low. loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(5000)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The estimate is capped by current_estimate * kMaxIncreaseFactor because // it recently backed off. @@ -814,7 +971,8 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_3, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The latest estimate is the same as the previous estimate since the sent // packets were sent within the DelayedIncreaseWindow. EXPECT_EQ( @@ -847,14 +1005,16 @@ TEST_P(LossBasedBweV2Test, KeepIncreasingEstimateAfterDelayedIncreaseWindow) { DataRate::KilobitsPerSec(300)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Increase the acknowledged bitrate to make sure that the estimate is not // capped too low. loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(5000)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The estimate is capped by current_estimate * kMaxIncreaseFactor because it // recently backed off. @@ -863,7 +1023,8 @@ TEST_P(LossBasedBweV2Test, KeepIncreasingEstimateAfterDelayedIncreaseWindow) { loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_3, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The estimate can continue increasing after the DelayedIncreaseWindow. EXPECT_GE( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, @@ -889,7 +1050,8 @@ TEST_P(LossBasedBweV2Test, NotIncreaseIfInherentLossLessThanAverageLoss) { /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_10p_loss_2 = CreatePacketResultsWith10pLossRate( @@ -897,7 +1059,8 @@ TEST_P(LossBasedBweV2Test, NotIncreaseIfInherentLossLessThanAverageLoss) { kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Do not increase the bitrate because inherent loss is less than average loss EXPECT_EQ( @@ -914,7 +1077,7 @@ TEST_P(LossBasedBweV2Test, "DelayBasedCandidate:true,InstantUpperBoundBwBalance:100kbps," "ObservationDurationLowerBound:200ms,HigherBwBiasFactor:1000," "HigherLogBwBiasFactor:1000,LossThresholdOfHighBandwidthPreference:0." - "20/"); + "20,NotIncreaseIfInherentLossLessThanAverageLoss:false/"); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); @@ -926,7 +1089,8 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_10p_loss_2 = CreatePacketResultsWith10pLossRate( @@ -934,7 +1098,8 @@ TEST_P(LossBasedBweV2Test, kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Because LossThresholdOfHighBandwidthPreference is 20%, the average loss is // 10%, bandwidth estimate should increase. @@ -964,7 +1129,8 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_10p_loss_2 = CreatePacketResultsWith10pLossRate( @@ -972,7 +1138,8 @@ TEST_P(LossBasedBweV2Test, kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Because LossThresholdOfHighBandwidthPreference is 5%, the average loss is // 10%, bandwidth estimate should decrease. @@ -1002,7 +1169,8 @@ TEST_P(LossBasedBweV2Test, UseProbeResultWhenRecoveringFromLoss) { /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Network recovers after loss. DataRate probe_estimate = DataRate::KilobitsPerSec(300); @@ -1012,13 +1180,139 @@ TEST_P(LossBasedBweV2Test, UseProbeResultWhenRecoveringFromLoss) { kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - probe_estimate); + probe_estimate, /*upper_link_capacity=*/DataRate::PlusInfinity()); LossBasedBweV2::Result result_after_recovery = loss_based_bandwidth_estimator.GetLossBasedResult(); EXPECT_EQ(result_after_recovery.bandwidth_estimate, probe_estimate); } +// If BoundByUpperLinkCapacityWhenLossLimited is enabled, the estimate is +// bounded by the upper link capacity when bandwidth is loss limited. +TEST_P(LossBasedBweV2Test, BoundEstimateByUpperLinkCapacityWhenLossLimited) { + ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-LossBasedBweV2/" + "Enabled:true,CandidateFactors:1.2|1|0.5,AckedRateCandidate:true," + "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," + "InstantUpperBoundBwBalance:10000kbps," + "DelayBasedCandidate:true,MaxIncreaseFactor:1000," + "BwRampupUpperBoundFactor:2.0,BoundByUpperLinkCapacityWhenLossLimited:" + "true/"); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); + DataRate acked_rate = DataRate::KilobitsPerSec(300); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + + // Create some loss to create the loss limited scenario. + std::vector enough_feedback_1 = + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + + // Network recovers after loss. + DataRate upper_link_capacity = DataRate::KilobitsPerSec(10); + std::vector enough_feedback_2 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, upper_link_capacity); + + LossBasedBweV2::Result result_after_recovery = + loss_based_bandwidth_estimator.GetLossBasedResult(); + EXPECT_EQ(result_after_recovery.bandwidth_estimate, upper_link_capacity); +} + +// If BoundByUpperLinkCapacityWhenLossLimited is enabled, the estimate is not +// bounded by the upper link capacity when bandwidth is not loss limited. +TEST_P(LossBasedBweV2Test, + NotBoundEstimateByUpperLinkCapacityWhenNotLossLimited) { + ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-LossBasedBweV2/" + "Enabled:true,CandidateFactors:1.2|1|0.5,AckedRateCandidate:true," + "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," + "InstantUpperBoundBwBalance:10000kbps," + "DelayBasedCandidate:true,MaxIncreaseFactor:1000," + "BwRampupUpperBoundFactor:2.0,BoundByUpperLinkCapacityWhenLossLimited:" + "true/"); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); + DataRate acked_rate = DataRate::KilobitsPerSec(300); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + + // Create a normal network without loss + std::vector enough_feedback_1 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero()); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + + DataRate upper_link_capacity = DataRate::KilobitsPerSec(10); + std::vector enough_feedback_2 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, upper_link_capacity); + + LossBasedBweV2::Result loss_based_result = + loss_based_bandwidth_estimator.GetLossBasedResult(); + EXPECT_GT(loss_based_result.bandwidth_estimate, upper_link_capacity); +} + +// If BoundByUpperLinkCapacityWhenLossLimited is disabled, the estimate is not +// bounded by the upper link capacity. +TEST_P(LossBasedBweV2Test, NotBoundEstimateByUpperLinkCapacity) { + ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-LossBasedBweV2/" + "Enabled:true,CandidateFactors:1.2|1|0.5,AckedRateCandidate:true," + "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," + "InstantUpperBoundBwBalance:10000kbps," + "DelayBasedCandidate:true,MaxIncreaseFactor:1000," + "BwRampupUpperBoundFactor:2.0,BoundByUpperLinkCapacityWhenLossLimited:" + "false/"); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); + DataRate acked_rate = DataRate::KilobitsPerSec(300); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + + // Create some loss to create the loss limited scenario. + std::vector enough_feedback_1 = + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + + // Network recovers after loss. + DataRate upper_link_capacity = DataRate::KilobitsPerSec(10); + std::vector enough_feedback_2 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, upper_link_capacity); + + LossBasedBweV2::Result result_after_recovery = + loss_based_bandwidth_estimator.GetLossBasedResult(); + EXPECT_GT(result_after_recovery.bandwidth_estimate, upper_link_capacity); +} + TEST_P(LossBasedBweV2Test, StricterBoundUsingHighLossRateThresholdAt10pLossRate) { ExplicitKeyValueConfig key_value_config( @@ -1042,7 +1336,8 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_10p_loss_2 = CreatePacketResultsWith10pLossRate( @@ -1050,7 +1345,8 @@ TEST_P(LossBasedBweV2Test, kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // At 10% loss rate and high loss rate threshold to be 10%, cap the estimate // to be 500 * 1000-0.1 = 400kbps. @@ -1082,7 +1378,8 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_50p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_50p_loss_2 = CreatePacketResultsWith50pLossRate( @@ -1090,7 +1387,8 @@ TEST_P(LossBasedBweV2Test, kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_50p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // At 50% loss rate and high loss rate threshold to be 30%, cap the estimate // to be the min bitrate. @@ -1122,7 +1420,8 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_100p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_100p_loss_2 = CreatePacketResultsWith100pLossRate( @@ -1130,7 +1429,8 @@ TEST_P(LossBasedBweV2Test, kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_100p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // At 100% loss rate and high loss rate threshold to be 30%, cap the estimate // to be the min bitrate. @@ -1161,7 +1461,8 @@ TEST_P(LossBasedBweV2Test, EstimateRecoversAfterHighLoss) { /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_100p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Make sure that the estimate is set to min bitrate because of 100% loss // rate. @@ -1176,7 +1477,8 @@ TEST_P(LossBasedBweV2Test, EstimateRecoversAfterHighLoss) { kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_0p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_0p_loss_2 = CreatePacketResultsWithReceivedPackets( @@ -1184,7 +1486,8 @@ TEST_P(LossBasedBweV2Test, EstimateRecoversAfterHighLoss) { kObservationDurationLowerBound * 2); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_0p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The estimate increases as network recovers. EXPECT_GT( @@ -1207,7 +1510,8 @@ TEST_P(LossBasedBweV2Test, EstimateIsNotHigherThanMaxBitrate) { /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_LE( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller.cc b/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller.cc index ac5a9154a424..1af943c4cb0c 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller.cc +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller.cc @@ -106,22 +106,33 @@ ProbeControllerConfig::ProbeControllerConfig( min_probe_duration("min_probe_duration", TimeDelta::Millis(15)), limit_probe_target_rate_to_loss_bwe("limit_probe_target_rate_to_loss_bwe", false), + loss_limited_probe_scale("loss_limited_scale", 1.5), skip_if_estimate_larger_than_fraction_of_max( "skip_if_est_larger_than_fraction_of_max", - 0.0) { - ParseFieldTrial( - {&first_exponential_probe_scale, &second_exponential_probe_scale, - &further_exponential_probe_scale, &further_probe_threshold, - &alr_probing_interval, &alr_probe_scale, - &probe_on_max_allocated_bitrate_change, &first_allocation_probe_scale, - &second_allocation_probe_scale, &allocation_allow_further_probing, - &min_probe_duration, &network_state_estimate_probing_interval, - &probe_if_estimate_lower_than_network_state_estimate_ratio, - &estimate_lower_than_network_state_estimate_probing_interval, - &network_state_probe_scale, &network_state_probe_duration, - &min_probe_packets_sent, &limit_probe_target_rate_to_loss_bwe, - &skip_if_estimate_larger_than_fraction_of_max}, - key_value_config->Lookup("WebRTC-Bwe-ProbingConfiguration")); + 0.0), + not_probe_if_delay_increased("not_probe_if_delay_increased", false) { + ParseFieldTrial({&first_exponential_probe_scale, + &second_exponential_probe_scale, + &further_exponential_probe_scale, + &further_probe_threshold, + &alr_probing_interval, + &alr_probe_scale, + &probe_on_max_allocated_bitrate_change, + &first_allocation_probe_scale, + &second_allocation_probe_scale, + &allocation_allow_further_probing, + &min_probe_duration, + &network_state_estimate_probing_interval, + &probe_if_estimate_lower_than_network_state_estimate_ratio, + &estimate_lower_than_network_state_estimate_probing_interval, + &network_state_probe_scale, + &network_state_probe_duration, + &min_probe_packets_sent, + &limit_probe_target_rate_to_loss_bwe, + &loss_limited_probe_scale, + &skip_if_estimate_larger_than_fraction_of_max, + ¬_probe_if_delay_increased}, + key_value_config->Lookup("WebRTC-Bwe-ProbingConfiguration")); // Specialized keys overriding subsets of WebRTC-Bwe-ProbingConfiguration ParseFieldTrial( @@ -129,8 +140,9 @@ ProbeControllerConfig::ProbeControllerConfig( key_value_config->Lookup("WebRTC-Bwe-InitialProbing")); ParseFieldTrial({&further_exponential_probe_scale, &further_probe_threshold}, key_value_config->Lookup("WebRTC-Bwe-ExponentialProbing")); - ParseFieldTrial({&alr_probing_interval, &alr_probe_scale}, - key_value_config->Lookup("WebRTC-Bwe-AlrProbing")); + ParseFieldTrial( + {&alr_probing_interval, &alr_probe_scale, &loss_limited_probe_scale}, + key_value_config->Lookup("WebRTC-Bwe-AlrProbing")); ParseFieldTrial( {&first_allocation_probe_scale, &second_allocation_probe_scale, &allocation_allow_further_probing, &allocation_probe_max}, @@ -188,17 +200,6 @@ std::vector ProbeController::SetBitrates( // estimate then initiate probing. if (!estimated_bitrate_.IsZero() && old_max_bitrate < max_bitrate_ && estimated_bitrate_ < max_bitrate_) { - // The assumption is that if we jump more than 20% in the bandwidth - // estimate or if the bandwidth estimate is within 90% of the new - // max bitrate then the probing attempt was successful. - mid_call_probing_succcess_threshold_ = - std::min(estimated_bitrate_ * 1.2, max_bitrate_ * 0.9); - mid_call_probing_waiting_for_result_ = true; - mid_call_probing_bitrate_ = max_bitrate_; - - RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.MidCallProbing.Initiated", - max_bitrate_.kbps()); - return InitiateProbing(at_time, {max_bitrate_}, false); } break; @@ -277,42 +278,36 @@ std::vector ProbeController::InitiateExponentialProbing( std::vector ProbeController::SetEstimatedBitrate( DataRate bitrate, - bool bwe_limited_due_to_packet_loss, + BandwidthLimitedCause bandwidth_limited_cause, Timestamp at_time) { - if (bwe_limited_due_to_packet_loss != bwe_limited_due_to_packet_loss_ && - config_.limit_probe_target_rate_to_loss_bwe) { - state_ = State::kProbingComplete; - } - bwe_limited_due_to_packet_loss_ = bwe_limited_due_to_packet_loss; + bandwidth_limited_cause_ = bandwidth_limited_cause; if (bitrate < kBitrateDropThreshold * estimated_bitrate_) { time_of_last_large_drop_ = at_time; bitrate_before_last_large_drop_ = estimated_bitrate_; } estimated_bitrate_ = bitrate; - if (mid_call_probing_waiting_for_result_ && - bitrate >= mid_call_probing_succcess_threshold_) { - RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.MidCallProbing.Success", - mid_call_probing_bitrate_.kbps()); - RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.MidCallProbing.ProbedKbps", - bitrate.kbps()); - mid_call_probing_waiting_for_result_ = false; - } - std::vector pending_probes; if (state_ == State::kWaitingForProbingResult) { // Continue probing if probing results indicate channel has greater // capacity. + DataRate network_state_estimate_probe_further_limit = + config_.network_state_estimate_probing_interval->IsFinite() && + network_estimate_ + ? network_estimate_->link_capacity_upper * + config_.further_probe_threshold + : DataRate::PlusInfinity(); RTC_LOG(LS_INFO) << "Measured bitrate: " << bitrate << " Minimum to probe further: " - << min_bitrate_to_probe_further_; + << min_bitrate_to_probe_further_ << " upper limit: " + << network_state_estimate_probe_further_limit; - if (bitrate > min_bitrate_to_probe_further_) { - pending_probes = InitiateProbing( + if (bitrate > min_bitrate_to_probe_further_ && + bitrate <= network_state_estimate_probe_further_limit) { + return InitiateProbing( at_time, {config_.further_exponential_probe_scale * bitrate}, true); } } - - return pending_probes; + return {}; } void ProbeController::EnablePeriodicAlrProbing(bool enable) { @@ -374,7 +369,7 @@ void ProbeController::SetNetworkStateEstimate( void ProbeController::Reset(Timestamp at_time) { network_available_ = true; - bwe_limited_due_to_packet_loss_ = false; + bandwidth_limited_cause_ = BandwidthLimitedCause::kDelayBasedLimited; state_ = State::kInit; min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); time_last_probing_initiated_ = Timestamp::Zero(); @@ -385,7 +380,6 @@ void ProbeController::Reset(Timestamp at_time) { Timestamp now = at_time; last_bwe_drop_probing_time_ = now; alr_end_time_.reset(); - mid_call_probing_waiting_for_result_ = false; time_of_last_large_drop_ = now; bitrate_before_last_large_drop_ = DataRate::Zero(); max_total_allocated_bitrate_ = DataRate::Zero(); @@ -408,7 +402,7 @@ bool ProbeController::TimeForNetworkStateProbe(Timestamp at_time) const { } bool probe_due_to_low_estimate = - !bwe_limited_due_to_packet_loss_ && + bandwidth_limited_cause_ == BandwidthLimitedCause::kDelayBasedLimited && estimated_bitrate_ < config_.probe_if_estimate_lower_than_network_state_estimate_ratio * network_estimate_->link_capacity_upper; @@ -436,8 +430,6 @@ bool ProbeController::TimeForNetworkStateProbe(Timestamp at_time) const { std::vector ProbeController::Process(Timestamp at_time) { if (at_time - time_last_probing_initiated_ > kMaxWaitingTimeForProbingResult) { - mid_call_probing_waiting_for_result_ = false; - if (state_ == State::kWaitingForProbingResult) { RTC_LOG(LS_INFO) << "kWaitingForProbingResult: timeout"; state_ = State::kProbingComplete; @@ -487,10 +479,29 @@ std::vector ProbeController::InitiateProbing( } DataRate estimate_capped_bitrate = DataRate::PlusInfinity(); - if (bwe_limited_due_to_packet_loss_ && - config_.limit_probe_target_rate_to_loss_bwe) { - estimate_capped_bitrate = std::min(estimated_bitrate_, max_probe_bitrate); + if (config_.limit_probe_target_rate_to_loss_bwe) { + switch (bandwidth_limited_cause_) { + case BandwidthLimitedCause::kLossLimitedBweDecreasing: + // If bandwidth estimate is decreasing because of packet loss, do not + // send probes. + return {}; + case BandwidthLimitedCause::kLossLimitedBweIncreasing: + estimate_capped_bitrate = + std::min(max_probe_bitrate, + estimated_bitrate_ * config_.loss_limited_probe_scale); + break; + case BandwidthLimitedCause::kDelayBasedLimited: + break; + default: + break; + } } + if (config_.not_probe_if_delay_increased && + bandwidth_limited_cause_ == + BandwidthLimitedCause::kDelayBasedLimitedDelayIncreased) { + return {}; + } + if (config_.network_state_estimate_probing_interval->IsFinite() && network_estimate_ && network_estimate_->link_capacity_upper.IsFinite()) { if (network_estimate_->link_capacity_upper.IsZero()) { @@ -532,6 +543,8 @@ std::vector ProbeController::InitiateProbing( time_last_probing_initiated_ = now; if (probe_further) { state_ = State::kWaitingForProbingResult; + // Dont expect probe results to be larger than a fraction of the actual + // probe rate. min_bitrate_to_probe_further_ = std::min(estimate_capped_bitrate, (*(bitrates_to_probe.end() - 1))) * config_.further_probe_threshold; diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller.h b/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller.h index 6538b0eecdc0..aa8b526ab020 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller.h +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller.h @@ -72,12 +72,24 @@ struct ProbeControllerConfig { FieldTrialParameter min_probe_packets_sent; // The minimum probing duration. FieldTrialParameter min_probe_duration; - // Max limit the target rate of a probe to current estimate if BWE is loss - // limited. + // Periodically probe when bandwidth estimate is loss limited. FieldTrialParameter limit_probe_target_rate_to_loss_bwe; + FieldTrialParameter loss_limited_probe_scale; // Dont send a probe if min(estimate, network state estimate) is larger than // this fraction of the set max bitrate. FieldTrialParameter skip_if_estimate_larger_than_fraction_of_max; + // Do not send probes if network is either overusing or underusing. + FieldTrialParameter not_probe_if_delay_increased; +}; + +// Reason that bandwidth estimate is limited. Bandwidth estimate can be limited +// by either delay based bwe, or loss based bwe when it increases/decreases the +// estimate. +enum class BandwidthLimitedCause { + kLossLimitedBweIncreasing = 0, + kLossLimitedBweDecreasing = 1, + kDelayBasedLimited = 2, + kDelayBasedLimitedDelayIncreased = 3, }; // This class controls initiation of probing to estimate initial channel @@ -109,7 +121,7 @@ class ProbeController { ABSL_MUST_USE_RESULT std::vector SetEstimatedBitrate( DataRate bitrate, - bool bwe_limited_due_to_packet_loss, + BandwidthLimitedCause bandwidth_limited_cause, Timestamp at_time); void EnablePeriodicAlrProbing(bool enable); @@ -129,6 +141,11 @@ class ProbeController { ABSL_MUST_USE_RESULT std::vector Process( Timestamp at_time); + // Gets the value of field trial not_probe_if_delay_increased. + bool DontProbeIfDelayIncreased() { + return config_.not_probe_if_delay_increased; + } + private: enum class State { // Initial state where no probing has been triggered yet. @@ -149,7 +166,8 @@ class ProbeController { bool TimeForNetworkStateProbe(Timestamp at_time) const; bool network_available_; - bool bwe_limited_due_to_packet_loss_; + BandwidthLimitedCause bandwidth_limited_cause_ = + BandwidthLimitedCause::kDelayBasedLimited; State state_; DataRate min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); Timestamp time_last_probing_initiated_ = Timestamp::MinusInfinity(); @@ -166,10 +184,6 @@ class ProbeController { DataRate max_total_allocated_bitrate_ = DataRate::Zero(); const bool in_rapid_recovery_experiment_; - // For WebRTC.BWE.MidCallProbing.* metric. - bool mid_call_probing_waiting_for_result_; - DataRate mid_call_probing_bitrate_ = DataRate::Zero(); - DataRate mid_call_probing_succcess_threshold_ = DataRate::Zero(); RtcEventLog* event_log_; int32_t next_probe_cluster_id_ = 1; diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller_unittest.cc b/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller_unittest.cc index 5b1d79bd4944..e6a5c8ceef1b 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller_unittest.cc +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller_unittest.cc @@ -102,7 +102,7 @@ TEST(ProbeControllerTest, ProbeOnlyWhenNetworkIsUp) { {.at_time = fixture.CurrentTime(), .network_available = false}); probes = probe_controller->SetBitrates(kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + EXPECT_TRUE(probes.empty()); probes = probe_controller->OnNetworkAvailability( {.at_time = fixture.CurrentTime(), .network_available = true}); EXPECT_GE(probes.size(), 2u); @@ -138,7 +138,7 @@ TEST(ProbeControllerTest, InitiatesProbingOnMaxBitrateIncrease) { // Long enough to time out exponential probing. fixture.AdvanceTime(kExponentialProbingTimeout); probes = probe_controller->SetEstimatedBitrate( - kStartBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetBitrates( @@ -156,18 +156,19 @@ TEST(ProbeControllerTest, ProbesOnMaxAllocatedBitrateIncreaseOnlyWhenInAlr) { kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( kMaxBitrate - DataRate::BitsPerSec(1), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); // Wait long enough to time out exponential probing. fixture.AdvanceTime(kExponentialProbingTimeout); probes = probe_controller->Process(fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + EXPECT_TRUE(probes.empty()); // Probe when in alr. probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); probes = probe_controller->OnMaxTotalAllocatedBitrate( kMaxBitrate + DataRate::BitsPerSec(1), fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); + EXPECT_EQ(probes.at(0).target_data_rate, kMaxBitrate); // Do not probe when not in alr. probe_controller->SetAlrStartTimeMs(absl::nullopt); @@ -187,7 +188,7 @@ TEST(ProbeControllerTest, CanDisableProbingOnMaxTotalAllocatedBitrateIncrease) { kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( kMaxBitrate - DataRate::BitsPerSec(1), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); fixture.AdvanceTime(kExponentialProbingTimeout); probes = probe_controller->Process(fixture.CurrentTime()); ASSERT_TRUE(probes.empty()); @@ -209,11 +210,11 @@ TEST(ProbeControllerTest, InitiatesProbingOnMaxBitrateIncreaseAtMaxBitrate) { // Long enough to time out exponential probing. fixture.AdvanceTime(kExponentialProbingTimeout); probes = probe_controller->SetEstimatedBitrate( - kStartBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - kMaxBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kMaxBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate + DataRate::BitsPerSec(100), @@ -233,13 +234,13 @@ TEST(ProbeControllerTest, TestExponentialProbing) { // Repeated probe should only be sent when estimated bitrate climbs above // 0.7 * 6 * kStartBitrate = 1260. probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(1000), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + DataRate::BitsPerSec(1000), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(1800), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(1800), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); EXPECT_EQ(probes.size(), 1u); EXPECT_EQ(probes[0].target_data_rate.bps(), 2 * 1800); } @@ -255,9 +256,9 @@ TEST(ProbeControllerTest, TestExponentialProbingTimeout) { probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(1800), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + DataRate::BitsPerSec(1800), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); } TEST(ProbeControllerTest, RequestProbeInAlr) { @@ -268,15 +269,15 @@ TEST(ProbeControllerTest, RequestProbeInAlr) { kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_GE(probes.size(), 2u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(250), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(250), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probes = probe_controller->RequestProbe(fixture.CurrentTime()); EXPECT_EQ(probes.size(), 1u); @@ -291,15 +292,15 @@ TEST(ProbeControllerTest, RequestProbeWhenAlrEndedRecently) { kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probe_controller->SetAlrStartTimeMs(absl::nullopt); fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(250), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(250), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probe_controller->SetAlrEndedTimeMs(fixture.CurrentTime().ms()); fixture.AdvanceTime(kAlrEndedTimeout - TimeDelta::Millis(1)); probes = probe_controller->RequestProbe(fixture.CurrentTime()); @@ -316,19 +317,19 @@ TEST(ProbeControllerTest, RequestProbeWhenAlrNotEndedRecently) { kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probe_controller->SetAlrStartTimeMs(absl::nullopt); fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(250), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(250), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probe_controller->SetAlrEndedTimeMs(fixture.CurrentTime().ms()); fixture.AdvanceTime(kAlrEndedTimeout + TimeDelta::Millis(1)); probes = probe_controller->RequestProbe(fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + EXPECT_TRUE(probes.empty()); } TEST(ProbeControllerTest, RequestProbeWhenBweDropNotRecent) { @@ -339,18 +340,18 @@ TEST(ProbeControllerTest, RequestProbeWhenBweDropNotRecent) { kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(250), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(250), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); fixture.AdvanceTime(kBitrateDropTimeout + TimeDelta::Millis(1)); probes = probe_controller->RequestProbe(fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + EXPECT_TRUE(probes.empty()); } TEST(ProbeControllerTest, PeriodicProbing) { @@ -362,8 +363,8 @@ TEST(ProbeControllerTest, PeriodicProbing) { kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); Timestamp start_time = fixture.CurrentTime(); @@ -375,26 +376,26 @@ TEST(ProbeControllerTest, PeriodicProbing) { EXPECT_EQ(probes[0].target_data_rate.bps(), 1000); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); // The following probe should be sent at 10s into ALR. probe_controller->SetAlrStartTimeMs(start_time.ms()); fixture.AdvanceTime(TimeDelta::Seconds(4)); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); probe_controller->SetAlrStartTimeMs(start_time.ms()); fixture.AdvanceTime(TimeDelta::Seconds(1)); probes = probe_controller->Process(fixture.CurrentTime()); EXPECT_EQ(probes.size(), 1u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); } TEST(ProbeControllerTest, PeriodicProbingAfterReset) { @@ -413,7 +414,7 @@ TEST(ProbeControllerTest, PeriodicProbingAfterReset) { probes = probe_controller->Process(fixture.CurrentTime()); // Since bitrates are not yet set, no probe is sent event though we are in ALR // mode. - EXPECT_EQ(probes.size(), 0u); + EXPECT_TRUE(probes.empty()); probes = probe_controller->SetBitrates(kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -437,15 +438,15 @@ TEST(ProbeControllerTest, TestExponentialProbingOverflow) { fixture.CurrentTime()); // Verify that probe bitrate is capped at the specified max bitrate. probes = probe_controller->SetEstimatedBitrate( - 60 * kMbpsMultiplier, /*bwe_limited_due_to_packet_loss=*/false, + 60 * kMbpsMultiplier, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 1u); EXPECT_EQ(probes[0].target_data_rate, 100 * kMbpsMultiplier); // Verify that repeated probes aren't sent. probes = probe_controller->SetEstimatedBitrate( - 100 * kMbpsMultiplier, /*bwe_limited_due_to_packet_loss=*/false, + 100 * kMbpsMultiplier, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + EXPECT_TRUE(probes.empty()); } TEST(ProbeControllerTest, TestAllocatedBitrateCap) { @@ -464,7 +465,7 @@ TEST(ProbeControllerTest, TestAllocatedBitrateCap) { DataRate estimated_bitrate = kMaxBitrate / 10; probes = probe_controller->SetEstimatedBitrate( - estimated_bitrate, /*bwe_limited_due_to_packet_loss=*/false, + estimated_bitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); // Set a max allocated bitrate below the current estimate. @@ -510,13 +511,13 @@ TEST(ProbeControllerTest, ConfigurableProbingFieldTrial) { // Repeated probe should only be sent when estimated bitrate climbs above // 0.8 * 5 * kStartBitrateBps = 1200. probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(1100), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(1100), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); EXPECT_EQ(probes.size(), 0u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(1250), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(1250), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); EXPECT_EQ(probes.size(), 1u); EXPECT_EQ(probes[0].target_data_rate.bps(), 3 * 1250); @@ -540,7 +541,7 @@ TEST(ProbeControllerTest, LimitAlrProbeWhenLossBasedBweLimited) { auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), /*bwe_limited_due_to_packet_loss=*/false, + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); // Expect the controller to send a new probe after 5s has passed. probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); @@ -549,20 +550,20 @@ TEST(ProbeControllerTest, LimitAlrProbeWhenLossBasedBweLimited) { ASSERT_EQ(probes.size(), 1u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), /*bwe_limited_due_to_packet_loss*/ true, - fixture.CurrentTime()); + DataRate::BitsPerSec(500), + BandwidthLimitedCause::kLossLimitedBweIncreasing, fixture.CurrentTime()); fixture.AdvanceTime(TimeDelta::Seconds(6)); probes = probe_controller->Process(fixture.CurrentTime()); ASSERT_EQ(probes.size(), 1u); - EXPECT_EQ(probes[0].target_data_rate, DataRate::BitsPerSec(500)); + EXPECT_EQ(probes[0].target_data_rate, 1.5 * DataRate::BitsPerSec(500)); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), /*bwe_limited_due_to_packet_loss=*/false, - fixture.CurrentTime()); + 1.5 * DataRate::BitsPerSec(500), + BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); fixture.AdvanceTime(TimeDelta::Seconds(6)); probes = probe_controller->Process(fixture.CurrentTime()); - ASSERT_TRUE(!probes.empty()); - EXPECT_GT(probes[0].target_data_rate, DataRate::BitsPerSec(500)); + ASSERT_FALSE(probes.empty()); + EXPECT_GT(probes[0].target_data_rate, 1.5 * 1.5 * DataRate::BitsPerSec(500)); } TEST(ProbeControllerTest, PeriodicProbeAtUpperNetworkStateEstimate) { @@ -574,7 +575,7 @@ TEST(ProbeControllerTest, PeriodicProbeAtUpperNetworkStateEstimate) { auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(5000), /*bwe_limited_due_to_packet_loss=*/false, + DataRate::BitsPerSec(5000), BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); // Expect the controller to send a new probe after 5s has passed. NetworkStateEstimate state_estimate; @@ -602,32 +603,24 @@ TEST(ProbeControllerTest, auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), /*bwe_limited_due_to_packet_loss=*/false, + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); // Expect the controller to send a new probe after 5s has passed. NetworkStateEstimate state_estimate; - state_estimate.link_capacity_upper = DataRate::KilobitsPerSec(600); + state_estimate.link_capacity_upper = DataRate::BitsPerSec(700); probe_controller->SetNetworkStateEstimate(state_estimate); fixture.AdvanceTime(TimeDelta::Seconds(5)); probes = probe_controller->Process(fixture.CurrentTime()); ASSERT_EQ(probes.size(), 1u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), /*bwe_limited_due_to_packet_loss=*/true, - fixture.CurrentTime()); + DataRate::BitsPerSec(500), + BandwidthLimitedCause::kLossLimitedBweIncreasing, fixture.CurrentTime()); // Expect the controller to send a new probe after 5s has passed. fixture.AdvanceTime(TimeDelta::Seconds(5)); probes = probe_controller->Process(fixture.CurrentTime()); - ASSERT_TRUE(!probes.empty()); - EXPECT_EQ(probes[0].target_data_rate, DataRate::BitsPerSec(500)); - - probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), /*bwe_limited_due_to_packet_loss=*/false, - fixture.CurrentTime()); - fixture.AdvanceTime(TimeDelta::Seconds(5)); - probes = probe_controller->Process(fixture.CurrentTime()); - ASSERT_TRUE(!probes.empty()); - EXPECT_GT(probes[0].target_data_rate, DataRate::BitsPerSec(500)); + ASSERT_FALSE(probes.empty()); + EXPECT_EQ(probes[0].target_data_rate, DataRate::BitsPerSec(700)); } TEST(ProbeControllerTest, AlrProbesLimitedByNetworkStateEstimate) { @@ -639,7 +632,7 @@ TEST(ProbeControllerTest, AlrProbesLimitedByNetworkStateEstimate) { auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::KilobitsPerSec(6), /*bwe_limited_due_to_packet_loss=*/false, + DataRate::KilobitsPerSec(6), BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); @@ -667,7 +660,7 @@ TEST(ProbeControllerTest, CanSetLongerProbeDurationAfterNetworkStateEstimate) { auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::KilobitsPerSec(5), /*bwe_limited_due_to_packet_loss=*/false, + DataRate::KilobitsPerSec(5), BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); ASSERT_FALSE(probes.empty()); EXPECT_LT(probes[0].target_duration, TimeDelta::Millis(100)); @@ -681,6 +674,147 @@ TEST(ProbeControllerTest, CanSetLongerProbeDurationAfterNetworkStateEstimate) { EXPECT_EQ(probes[0].target_duration, TimeDelta::Millis(100)); } +TEST(ProbeControllerTest, ProbeInAlrIfLossBasedIncreasing) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "limit_probe_target_rate_to_loss_bwe:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + probe_controller->EnablePeriodicAlrProbing(true); + probes = probe_controller->SetEstimatedBitrate( + kStartBitrate, BandwidthLimitedCause::kLossLimitedBweIncreasing, + fixture.CurrentTime()); + + // Wait long enough to time out exponential probing. + fixture.AdvanceTime(kExponentialProbingTimeout); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + // Probe when in alr. + probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); + fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_EQ(probes.size(), 1u); + EXPECT_EQ(probes.at(0).target_data_rate, 1.5 * kStartBitrate); +} + +TEST(ProbeControllerTest, ProbeFurtherInAlrIfLossBasedIncreasing) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "limit_probe_target_rate_to_loss_bwe:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + probe_controller->EnablePeriodicAlrProbing(true); + probes = probe_controller->SetEstimatedBitrate( + kStartBitrate, BandwidthLimitedCause::kLossLimitedBweIncreasing, + fixture.CurrentTime()); + + // Wait long enough to time out exponential probing. + fixture.AdvanceTime(kExponentialProbingTimeout); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + // Probe when in alr. + probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); + fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_EQ(probes.size(), 1u); + ASSERT_EQ(probes.at(0).target_data_rate, 1.5 * kStartBitrate); + + probes = probe_controller->SetEstimatedBitrate( + 1.5 * kStartBitrate, BandwidthLimitedCause::kLossLimitedBweIncreasing, + fixture.CurrentTime()); + ASSERT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate, 1.5 * 1.5 * kStartBitrate); +} + +TEST(ProbeControllerTest, NotProbeWhenInAlrIfLossBasedDecreases) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + probe_controller->EnablePeriodicAlrProbing(true); + probes = probe_controller->SetEstimatedBitrate( + kStartBitrate, BandwidthLimitedCause::kLossLimitedBweDecreasing, + fixture.CurrentTime()); + + // Wait long enough to time out exponential probing. + fixture.AdvanceTime(kExponentialProbingTimeout); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + // Not probe in alr when loss based estimate decreases. + probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); + fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); +} + +TEST(ProbeControllerTest, NotProbeIfLossBasedIncreasingOutsideAlr) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "limit_probe_target_rate_to_loss_bwe:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + probe_controller->EnablePeriodicAlrProbing(true); + probes = probe_controller->SetEstimatedBitrate( + kStartBitrate, BandwidthLimitedCause::kLossLimitedBweIncreasing, + fixture.CurrentTime()); + + // Wait long enough to time out exponential probing. + fixture.AdvanceTime(kExponentialProbingTimeout); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + probe_controller->SetAlrStartTimeMs(absl::nullopt); + fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); +} + +TEST(ProbeControllerTest, ProbeFurtherWhenLossBasedIsSameAsDelayBasedEstimate) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + ASSERT_FALSE(probes.empty()); + + // Need to wait at least one second before process can trigger a new probe. + fixture.AdvanceTime(TimeDelta::Millis(1100)); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + NetworkStateEstimate state_estimate; + state_estimate.link_capacity_upper = 5 * kStartBitrate; + probe_controller->SetNetworkStateEstimate(state_estimate); + fixture.AdvanceTime(TimeDelta::Seconds(5)); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_FALSE(probes.empty()); + + DataRate probe_target_rate = probes[0].target_data_rate; + EXPECT_LT(probe_target_rate, state_estimate.link_capacity_upper); + // Expect that more probes are sent if BWE is the same as delay based + // estimate. + probes = probe_controller->SetEstimatedBitrate( + probe_target_rate, BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + ASSERT_FALSE(probes.empty()); + EXPECT_EQ(probes[0].target_data_rate, 2 * probe_target_rate); +} + TEST(ProbeControllerTest, ProbeIfEstimateLowerThanNetworkStateEstimate) { // Periodic probe every 1 second if estimate is lower than 50% of the // NetworkStateEstimate. @@ -694,7 +828,7 @@ TEST(ProbeControllerTest, ProbeIfEstimateLowerThanNetworkStateEstimate) { auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - kStartBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); // Need to wait at least one second before process can trigger a new probe. fixture.AdvanceTime(TimeDelta::Millis(1100)); @@ -720,8 +854,8 @@ TEST(ProbeControllerTest, ProbeIfEstimateLowerThanNetworkStateEstimate) { // Stop probing if estimate increase. We might probe further here though. probes = probe_controller->SetEstimatedBitrate( - 2 * kStartBitrate, - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + 2 * kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); // No more periodic probes. fixture.AdvanceTime(TimeDelta::Millis(1100)); probes = probe_controller->Process(fixture.CurrentTime()); @@ -753,8 +887,8 @@ TEST(ProbeControllerTest, DontProbeFurtherWhenLossLimited) { EXPECT_LT(probes[0].target_data_rate, state_estimate.link_capacity_upper); // Expect that no more probes are sent immediately if BWE is loss limited. probes = probe_controller->SetEstimatedBitrate( - probes[0].target_data_rate, /*bwe_limited_due_to_packet_loss=*/true, - fixture.CurrentTime()); + probes[0].target_data_rate, + BandwidthLimitedCause::kLossLimitedBweDecreasing, fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); } @@ -783,7 +917,7 @@ TEST(ProbeControllerTest, ProbeFurtherWhenDelayBasedLimited) { EXPECT_LT(probes[0].target_data_rate, state_estimate.link_capacity_upper); // Since the probe was successfull, expect to continue probing. probes = probe_controller->SetEstimatedBitrate( - probes[0].target_data_rate, /*bwe_limited_due_to_packet_loss=*/false, + probes[0].target_data_rate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); EXPECT_FALSE(probes.empty()); EXPECT_EQ(probes[0].target_data_rate, state_estimate.link_capacity_upper); @@ -796,18 +930,15 @@ TEST(ProbeControllerTest, "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); std::unique_ptr probe_controller = fixture.CreateController(); - auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); ASSERT_FALSE(probes.empty()); - NetworkStateEstimate state_estimate; state_estimate.link_capacity_upper = 1.2 * probes[0].target_data_rate / 2; probe_controller->SetNetworkStateEstimate(state_estimate); - // No immediate further probing since probe result is low. probes = probe_controller->SetEstimatedBitrate( - probes[0].target_data_rate / 2, /*bwe_limited_due_to_packet_loss=*/false, + probes[0].target_data_rate / 2, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); ASSERT_TRUE(probes.empty()); @@ -820,9 +951,15 @@ TEST(ProbeControllerTest, state_estimate.link_capacity_upper = 3 * kStartBitrate; probe_controller->SetNetworkStateEstimate(state_estimate); probes = probe_controller->SetEstimatedBitrate( - probes[0].target_data_rate, /*bwe_limited_due_to_packet_loss=*/false, + probes[0].target_data_rate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); EXPECT_FALSE(probes.empty()); + + // But no more probes if estimate is close to the link capacity. + probes = probe_controller->SetEstimatedBitrate( + state_estimate.link_capacity_upper * 0.9, + BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); } TEST(ProbeControllerTest, SkipAlrProbeIfEstimateLargerThanMaxProbe) { @@ -837,7 +974,7 @@ TEST(ProbeControllerTest, SkipAlrProbeIfEstimateLargerThanMaxProbe) { ASSERT_FALSE(probes.empty()); probes = probe_controller->SetEstimatedBitrate( - kMaxBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kMaxBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); @@ -864,7 +1001,7 @@ TEST(ProbeControllerTest, kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); ASSERT_FALSE(probes.empty()); probes = probe_controller->SetEstimatedBitrate( - kMaxBitrate / 2, /*bwe_limited_due_to_packet_loss=*/false, + kMaxBitrate / 2, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); fixture.AdvanceTime(TimeDelta::Seconds(10)); @@ -896,7 +1033,7 @@ TEST(ProbeControllerTest, SkipNetworkStateProbeIfEstimateLargerThanMaxProbe) { probe_controller->SetNetworkStateEstimate( {.link_capacity_upper = 2 * kMaxBitrate}); probes = probe_controller->SetEstimatedBitrate( - kMaxBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kMaxBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); @@ -918,7 +1055,7 @@ TEST(ProbeControllerTest, SendsProbeIfNetworkStateEstimateLowerThanMaxProbe) { probe_controller->SetNetworkStateEstimate( {.link_capacity_upper = 2 * kMaxBitrate}); probes = probe_controller->SetEstimatedBitrate( - kMaxBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kMaxBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); @@ -926,7 +1063,7 @@ TEST(ProbeControllerTest, SendsProbeIfNetworkStateEstimateLowerThanMaxProbe) { fixture.AdvanceTime(TimeDelta::Millis(2100)); probes = probe_controller->SetEstimatedBitrate( - kStartBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); probe_controller->SetNetworkStateEstimate( @@ -938,14 +1075,13 @@ TEST(ProbeControllerTest, SendsProbeIfNetworkStateEstimateLowerThanMaxProbe) { TEST(ProbeControllerTest, DontSendProbeIfNetworkStateEstimateIsZero) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,limit_probe_" - "target_rate_to_loss_bwe:true/"); + "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); std::unique_ptr probe_controller = fixture.CreateController(); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - kStartBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); probe_controller->SetNetworkStateEstimate( {.link_capacity_upper = kStartBitrate}); @@ -962,5 +1098,34 @@ TEST(ProbeControllerTest, DontSendProbeIfNetworkStateEstimateIsZero) { probes = probe_controller->Process(fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); } + +TEST(ProbeControllerTest, DontProbeIfDelayIncreased) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "network_state_interval:5s,not_probe_if_delay_increased:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + ASSERT_FALSE(probes.empty()); + + // Need to wait at least one second before process can trigger a new probe. + fixture.AdvanceTime(TimeDelta::Millis(1100)); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + NetworkStateEstimate state_estimate; + state_estimate.link_capacity_upper = 3 * kStartBitrate; + probe_controller->SetNetworkStateEstimate(state_estimate); + probes = probe_controller->SetEstimatedBitrate( + kStartBitrate, BandwidthLimitedCause::kDelayBasedLimitedDelayIncreased, + fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + fixture.AdvanceTime(TimeDelta::Seconds(5)); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); +} } // namespace test } // namespace webrtc diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc b/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc index 7e5b2e3788a7..1e4db1ffaf7b 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc @@ -369,7 +369,8 @@ void SendSideBandwidthEstimation::SetAcknowledgedRate( void SendSideBandwidthEstimation::UpdateLossBasedEstimator( const TransportPacketsFeedback& report, BandwidthUsage delay_detector_state, - absl::optional probe_bitrate) { + absl::optional probe_bitrate, + DataRate upper_link_capacity) { if (LossBasedBandwidthEstimatorV1Enabled()) { loss_based_bandwidth_estimator_v1_.UpdateLossStatistics( report.packet_feedbacks, report.feedback_time); @@ -377,7 +378,7 @@ void SendSideBandwidthEstimation::UpdateLossBasedEstimator( if (LossBasedBandwidthEstimatorV2Enabled()) { loss_based_bandwidth_estimator_v2_.UpdateBandwidthEstimate( report.packet_feedbacks, delay_based_limit_, delay_detector_state, - probe_bitrate); + probe_bitrate, upper_link_capacity); UpdateEstimate(report.feedback_time); } } diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h b/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h index b033da1e87c5..77510236d396 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h @@ -120,7 +120,8 @@ class SendSideBandwidthEstimation { Timestamp at_time); void UpdateLossBasedEstimator(const TransportPacketsFeedback& report, BandwidthUsage delay_detector_state, - absl::optional probe_bitrate); + absl::optional probe_bitrate, + DataRate upper_link_capacity); private: friend class GoogCcStatePrinter; diff --git a/third_party/libwebrtc/modules/congestion_controller/include/receive_side_congestion_controller.h b/third_party/libwebrtc/modules/congestion_controller/include/receive_side_congestion_controller.h index 96ee8a6e3dcf..76963960161a 100644 --- a/third_party/libwebrtc/modules/congestion_controller/include/receive_side_congestion_controller.h +++ b/third_party/libwebrtc/modules/congestion_controller/include/receive_side_congestion_controller.h @@ -14,7 +14,6 @@ #include #include -#include "api/transport/field_trial_based_config.h" #include "api/transport/network_control.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" @@ -78,7 +77,6 @@ class ReceiveSideCongestionController : public CallStatsObserver { void PickEstimator() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); Clock& clock_; - const FieldTrialBasedConfig field_trial_config_; RembThrottler remb_throttler_; RemoteEstimatorProxy remote_estimator_proxy_; diff --git a/third_party/libwebrtc/modules/congestion_controller/receive_side_congestion_controller.cc b/third_party/libwebrtc/modules/congestion_controller/receive_side_congestion_controller.cc index 4f238835e499..e43b020f6e56 100644 --- a/third_party/libwebrtc/modules/congestion_controller/receive_side_congestion_controller.cc +++ b/third_party/libwebrtc/modules/congestion_controller/receive_side_congestion_controller.cc @@ -84,7 +84,6 @@ ReceiveSideCongestionController::ReceiveSideCongestionController( : clock_(*clock), remb_throttler_(std::move(remb_sender), clock), remote_estimator_proxy_(std::move(feedback_sender), - &field_trial_config_, network_state_estimator), rbe_(new RemoteBitrateEstimatorSingleStream(&remb_throttler_, clock)), using_absolute_send_time_(false), diff --git a/third_party/libwebrtc/modules/desktop_capture/BUILD.gn b/third_party/libwebrtc/modules/desktop_capture/BUILD.gn index 37e2355350a9..5db43e358993 100644 --- a/third_party/libwebrtc/modules/desktop_capture/BUILD.gn +++ b/third_party/libwebrtc/modules/desktop_capture/BUILD.gn @@ -7,9 +7,7 @@ # be found in the AUTHORS file in the root of the source tree. import("//build/config/linux/gtk/gtk.gni") -import("//build/config/linux/pkg_config.gni") import("//build/config/ui.gni") -import("//tools/generate_stubs/rules.gni") import("../../webrtc.gni") use_desktop_capture_differ_sse2 = target_cpu == "x86" || target_cpu == "x64" @@ -84,7 +82,7 @@ if (rtc_include_tests) { ] if ((is_linux || is_chromeos) && rtc_use_pipewire) { - configs += [ ":gio" ] + configs += [ "../portal:gio" ] } public_configs = [ ":x11_config" ] @@ -105,14 +103,7 @@ if (rtc_include_tests) { "linux/wayland/test/test_screencast_stream_provider.h", ] - configs += [ - ":gio", - ":pipewire", - ":gbm", - ":egl", - ":epoxy", - ":libdrm", - ] + configs += [ "../portal:pipewire_all" ] deps = [ ":desktop_capture", @@ -122,6 +113,7 @@ if (rtc_include_tests) { "../../rtc_base:logging", "../../rtc_base:random", "../../rtc_base:timeutils", + "../portal", # TODO(bugs.webrtc.org/9987): Remove this dep on rtc_base:rtc_base once # rtc_base:threading is fully defined. @@ -135,12 +127,8 @@ if (rtc_include_tests) { "//rtc_base:rtc_event", ] - if (!rtc_link_pipewire) { - deps += [ ":pipewire_stubs" ] - } - data = [ "../../third_party/pipewire" ] - public_configs = [ ":pipewire_config" ] + public_configs = [ "../portal:pipewire_config" ] } } @@ -167,7 +155,7 @@ if (rtc_include_tests) { ] if ((is_linux || is_chromeos) && rtc_use_pipewire) { - configs += [ ":gio" ] + configs += [ "../portal:gio" ] } deps = [ @@ -277,7 +265,7 @@ if (rtc_include_tests) { ] if ((is_linux || is_chromeos) && rtc_use_pipewire) { - configs += [ ":gio" ] + configs += [ "../portal:gio" ] } deps = [ @@ -288,75 +276,12 @@ if (rtc_include_tests) { } } -if (!build_with_mozilla) { -if (is_linux || is_chromeos) { - if (rtc_use_pipewire) { - pkg_config("gio") { - packages = [ - "gio-2.0", - "gio-unix-2.0", - ] - } - - pkg_config("pipewire") { - packages = [ "libpipewire-0.3" ] - if (!rtc_link_pipewire) { - ignore_libs = true - } - } - - pkg_config("gbm") { - packages = [ "gbm" ] - } - pkg_config("egl") { - packages = [ "egl" ] - } - pkg_config("epoxy") { - packages = [ "epoxy" ] - ignore_libs = true - } - pkg_config("libdrm") { - packages = [ "libdrm" ] - } - - if (!rtc_link_pipewire) { - # When libpipewire is not directly linked, use stubs to allow for dlopening of - # the binary. - generate_stubs("pipewire_stubs") { - configs = [ - "../../:common_config", - ":pipewire", - ] - deps = [ "../../rtc_base" ] - extra_header = "linux/wayland/pipewire_stub_header.fragment" - logging_function = "RTC_LOG(LS_VERBOSE)" - logging_include = "rtc_base/logging.h" - output_name = "linux/wayland/pipewire_stubs" - path_from_source = "modules/desktop_capture/linux/wayland" - sigs = [ "linux/wayland/pipewire.sigs" ] - if (!build_with_chromium) { - macro_include = "rtc_base/system/no_cfi_icall.h" - macro_deps = [ "../../rtc_base/system:no_cfi_icall" ] - } - } - } - - config("pipewire_config") { - defines = [ "WEBRTC_USE_PIPEWIRE" ] - if (!rtc_link_pipewire) { - defines += [ "WEBRTC_DLOPEN_PIPEWIRE" ] - } - - # Chromecast build config overrides `WEBRTC_USE_PIPEWIRE` even when - # `rtc_use_pipewire` is not set, which causes pipewire_config to not be - # included in targets. More details in: webrtc:13898 - if (is_linux && !is_castos) { - defines += [ "WEBRTC_USE_GIO" ] - } - } +# TODO(bugs.webrtc.org/14187): remove when all users are gone +if ((is_linux || is_chromeos) && rtc_use_pipewire) { + config("pipewire_config") { + configs = [ "../portal:pipewire_config" ] } } -} # !build_with_mozilla rtc_library("desktop_capture") { visibility = [ "*" ] @@ -608,12 +533,9 @@ rtc_library("desktop_capture") { "linux/wayland/egl_dmabuf.h", "linux/wayland/mouse_cursor_monitor_pipewire.cc", "linux/wayland/mouse_cursor_monitor_pipewire.h", - "linux/wayland/pipewire_utils.cc", - "linux/wayland/pipewire_utils.h", "linux/wayland/portal_request_response.h", "linux/wayland/restore_token_manager.cc", "linux/wayland/restore_token_manager.h", - "linux/wayland/scoped_glib.cc", "linux/wayland/scoped_glib.h", "linux/wayland/screen_capture_portal_interface.cc", "linux/wayland/screen_capture_portal_interface.h", @@ -623,34 +545,18 @@ rtc_library("desktop_capture") { "linux/wayland/screencast_stream_utils.h", "linux/wayland/shared_screencast_stream.cc", "linux/wayland/shared_screencast_stream.h", - "linux/wayland/xdg_desktop_portal_utils.cc", "linux/wayland/xdg_desktop_portal_utils.h", "linux/wayland/xdg_session_details.h", ] - if (build_with_mozilla) { - deps += ["//third_party/pipewire", - "//third_party/drm", - "//third_party/gbm", - "//third_party/libepoxy"] - } else { # build_with_mozilla else !build_with_mozilla - configs += [ - ":gio", - ":pipewire", - ":gbm", - ":egl", - ":epoxy", - ":libdrm", + configs += [ "../portal:pipewire_all" ] + + public_configs += [ "../portal:pipewire_config" ] + + deps += [ + "../../rtc_base:sanitizer", + "../portal", ] - - if (!rtc_link_pipewire) { - deps += [ ":pipewire_stubs" ] - } - - public_configs += [ ":pipewire_config" ] - } # !build_with_mozilla - - deps += [ "../../rtc_base:sanitizer" ] } if (rtc_enable_win_wgc) { diff --git a/third_party/libwebrtc/modules/desktop_capture/desktop_capture_gn/moz.build b/third_party/libwebrtc/modules/desktop_capture/desktop_capture_gn/moz.build index 790be2a5472c..98561c261af8 100644 --- a/third_party/libwebrtc/modules/desktop_capture/desktop_capture_gn/moz.build +++ b/third_party/libwebrtc/modules/desktop_capture/desktop_capture_gn/moz.build @@ -265,14 +265,11 @@ if CONFIG["CPU_ARCH"] == "arm": UNIFIED_SOURCES += [ "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/egl_dmabuf.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/mouse_cursor_monitor_pipewire.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire_utils.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/restore_token_manager.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/scoped_glib.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.cc" + "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc" ] if CONFIG["CPU_ARCH"] == "ppc64": @@ -343,14 +340,11 @@ if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": UNIFIED_SOURCES += [ "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/egl_dmabuf.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/mouse_cursor_monitor_pipewire.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire_utils.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/restore_token_manager.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/scoped_glib.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.cc" + "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc" ] if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": @@ -380,14 +374,11 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": UNIFIED_SOURCES += [ "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/egl_dmabuf.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/mouse_cursor_monitor_pipewire.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire_utils.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/restore_token_manager.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/scoped_glib.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.cc" + "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc" ] if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": @@ -413,14 +404,11 @@ if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": UNIFIED_SOURCES += [ "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/egl_dmabuf.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/mouse_cursor_monitor_pipewire.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire_utils.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/restore_token_manager.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/scoped_glib.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.cc", "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc", - "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.cc" + "/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc" ] if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": diff --git a/third_party/libwebrtc/modules/desktop_capture/desktop_capture_metadata.h b/third_party/libwebrtc/modules/desktop_capture/desktop_capture_metadata.h index faca156e338b..49a20e729c42 100644 --- a/third_party/libwebrtc/modules/desktop_capture/desktop_capture_metadata.h +++ b/third_party/libwebrtc/modules/desktop_capture/desktop_capture_metadata.h @@ -12,7 +12,7 @@ #define MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_METADATA_H_ #if defined(WEBRTC_USE_GIO) -#include "modules/desktop_capture/linux/wayland/xdg_session_details.h" +#include "modules/portal/xdg_session_details.h" #endif // defined(WEBRTC_USE_GIO) namespace webrtc { diff --git a/third_party/libwebrtc/modules/desktop_capture/desktop_capture_options.h b/third_party/libwebrtc/modules/desktop_capture/desktop_capture_options.h index 0364517405ce..67dffee08a47 100644 --- a/third_party/libwebrtc/modules/desktop_capture/desktop_capture_options.h +++ b/third_party/libwebrtc/modules/desktop_capture/desktop_capture_options.h @@ -105,6 +105,17 @@ class RTC_EXPORT DesktopCaptureOptions { detect_updated_region_ = detect_updated_region; } + // Indicates that the capturer should try to include the cursor in the frame. + // If it is able to do so it will set `DesktopFrame::may_contain_cursor()`. + // Not all capturers will support including the cursor. If this value is false + // or the cursor otherwise cannot be included in the frame, then cursor + // metadata will be sent, though the capturer may choose to always send cursor + // metadata. + bool prefer_cursor_embedded() const { return prefer_cursor_embedded_; } + void set_prefer_cursor_embedded(bool prefer_cursor_embedded) { + prefer_cursor_embedded_ = prefer_cursor_embedded; + } + #if defined(WEBRTC_WIN) // Enumerating windows owned by the current process on Windows has some // complications due to |GetWindowText*()| APIs potentially causing a @@ -237,6 +248,7 @@ class RTC_EXPORT DesktopCaptureOptions { #endif bool disable_effects_ = true; bool detect_updated_region_ = false; + bool prefer_cursor_embedded_ = false; #if defined(WEBRTC_USE_PIPEWIRE) bool allow_pipewire_ = false; bool pipewire_use_damage_region_ = true; diff --git a/third_party/libwebrtc/modules/desktop_capture/desktop_capture_types.h b/third_party/libwebrtc/modules/desktop_capture/desktop_capture_types.h index e42d49dd3ffc..eb124960f494 100644 --- a/third_party/libwebrtc/modules/desktop_capture/desktop_capture_types.h +++ b/third_party/libwebrtc/modules/desktop_capture/desktop_capture_types.h @@ -32,6 +32,8 @@ typedef intptr_t WindowId; const WindowId kNullWindowId = 0; +const int64_t kInvalidDisplayId = -1; + // Type used to identify screens on the desktop. Values are platform-specific: // - On Windows: integer display device index. // - On OSX: CGDirectDisplayID cast to intptr_t. @@ -40,9 +42,9 @@ const WindowId kNullWindowId = 0; // On Windows, ScreenId is implementation dependent: sending a ScreenId from one // implementation to another usually won't work correctly. #if defined(CHROMEOS) - typedef int64_t ScreenId; +typedef int64_t ScreenId; #else - typedef intptr_t ScreenId; +typedef intptr_t ScreenId; #endif // The screen id corresponds to all screen combined together. diff --git a/third_party/libwebrtc/modules/desktop_capture/desktop_capturer.h b/third_party/libwebrtc/modules/desktop_capture/desktop_capturer.h index a78fdcdb6920..40dc1ddd4afa 100644 --- a/third_party/libwebrtc/modules/desktop_capture/desktop_capturer.h +++ b/third_party/libwebrtc/modules/desktop_capture/desktop_capturer.h @@ -87,6 +87,10 @@ class RTC_EXPORT DesktopCapturer { // TODO(https://crbug.com/1369162): Remove or refactor this value. WindowId in_process_id = kNullWindowId; #endif + + // The display's unique ID. If no ID is defined, it will hold the value + // kInvalidDisplayId. + int64_t display_id = kInvalidDisplayId; }; typedef std::vector SourceList; diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc index a0af638d0066..fce023782fa4 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc @@ -12,9 +12,9 @@ #include "modules/desktop_capture/desktop_capture_options.h" #include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/linux/wayland/pipewire_utils.h" #include "modules/desktop_capture/linux/wayland/restore_token_manager.h" -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" +#include "modules/portal/pipewire_utils.h" +#include "modules/portal/xdg_desktop_portal_utils.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -72,7 +72,8 @@ void BaseCapturerPipeWire::OnScreenCastRequestResult(RequestResponse result, capturer_failed_ = false; if (result != RequestResponse::kSuccess || !options_.screencast_stream()->StartScreenCastStream( - stream_node_id, fd, options_.get_width(), options_.get_height())) { + stream_node_id, fd, options_.get_width(), options_.get_height(), + options_.prefer_cursor_embedded())) { capturer_failed_ = true; RTC_LOG(LS_ERROR) << "ScreenCastPortal failed: " << static_cast(result); diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/base_capturer_pipewire.h b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/base_capturer_pipewire.h index d84718aea444..c5c122c14ca0 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/base_capturer_pipewire.h +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/base_capturer_pipewire.h @@ -14,12 +14,12 @@ #include "modules/desktop_capture/delegated_source_list_controller.h" #include "modules/desktop_capture/desktop_capture_options.h" #include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/linux/wayland/portal_request_response.h" #include "modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h" #include "modules/desktop_capture/linux/wayland/screencast_portal.h" #include "modules/desktop_capture/linux/wayland/shared_screencast_stream.h" -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" -#include "modules/desktop_capture/linux/wayland/xdg_session_details.h" +#include "modules/portal/portal_request_response.h" +#include "modules/portal/xdg_desktop_portal_utils.h" +#include "modules/portal/xdg_session_details.h" namespace webrtc { diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/portal_request_response.h b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/portal_request_response.h index dde9ac5eff4c..2589479347a7 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/portal_request_response.h +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/portal_request_response.h @@ -11,24 +11,7 @@ #ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_PORTAL_REQUEST_RESPONSE_H_ #define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_PORTAL_REQUEST_RESPONSE_H_ -namespace webrtc { -namespace xdg_portal { +// TODO(bugs.webrtc.org/14187): remove when all users are gone +#include "modules/portal/portal_request_response.h" -// Contains type of responses that can be observed when making a request to -// a desktop portal interface. -enum class RequestResponse { - // Unknown, the initialized status. - kUnknown, - // Success, the request is carried out. - kSuccess, - // The user cancelled the interaction. - kUserCancelled, - // The user interaction was ended in some other way. - kError, - - kMaxValue = kError, -}; - -} // namespace xdg_portal -} // namespace webrtc #endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_PORTAL_REQUEST_RESPONSE_H_ diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/scoped_glib.h b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/scoped_glib.h index 908bd6f77d62..1361f843284e 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/scoped_glib.h +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/scoped_glib.h @@ -11,55 +11,7 @@ #ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCOPED_GLIB_H_ #define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCOPED_GLIB_H_ -#include - -#include "rtc_base/checks.h" - -namespace webrtc { - -template -class Scoped { - public: - Scoped() {} - explicit Scoped(T* val) { ptr_ = val; } - ~Scoped() { RTC_DCHECK_NOTREACHED(); } - - T* operator->() const { return ptr_; } - - explicit operator bool() const { return ptr_ != nullptr; } - - bool operator!() const { return ptr_ == nullptr; } - - T* get() const { return ptr_; } - - T** receive() { - RTC_CHECK(!ptr_); - return &ptr_; - } - - Scoped& operator=(T* val) { - RTC_DCHECK(val); - ptr_ = val; - return *this; - } - - protected: - T* ptr_ = nullptr; -}; - -template <> -Scoped::~Scoped(); -template <> -Scoped::~Scoped(); -template <> -Scoped::~Scoped(); -template <> -Scoped::~Scoped(); -template <> -Scoped::~Scoped(); -template <> -Scoped::~Scoped(); - -} // namespace webrtc +// TODO(bugs.webrtc.org/14187): remove when all users are gone +#include "modules/portal/scoped_glib.h" #endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCOPED_GLIB_H_ diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.cc b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.cc index 02d9d2e806a5..1c7cc379dfa7 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.cc +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.cc @@ -8,10 +8,10 @@ * be found in the AUTHORS file in the root of the source tree. */ #include "modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h" -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" #include +#include "modules/portal/xdg_desktop_portal_utils.h" #include "rtc_base/logging.h" namespace webrtc { diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h index 59aaf134e7f4..deb57a470725 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h @@ -15,10 +15,10 @@ #include -#include "modules/desktop_capture/linux/wayland/portal_request_response.h" -#include "modules/desktop_capture/linux/wayland/scoped_glib.h" -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" -#include "modules/desktop_capture/linux/wayland/xdg_session_details.h" +#include "modules/portal/portal_request_response.h" +#include "modules/portal/scoped_glib.h" +#include "modules/portal/xdg_desktop_portal_utils.h" +#include "modules/portal/xdg_session_details.h" namespace webrtc { namespace xdg_portal { diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.cc b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.cc index 3eb0b3914cdc..e7aaee001b07 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.cc +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.cc @@ -13,8 +13,8 @@ #include #include -#include "modules/desktop_capture/linux/wayland/scoped_glib.h" -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" +#include "modules/portal/scoped_glib.h" +#include "modules/portal/xdg_desktop_portal_utils.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -58,9 +58,12 @@ ScreenCastPortal::ScreenCastPortal( PortalNotifier* notifier, ProxyRequestResponseHandler proxy_request_response_handler, SourcesRequestResponseSignalHandler sources_request_response_signal_handler, - gpointer user_data) + gpointer user_data, + bool prefer_cursor_embedded) : notifier_(notifier), capture_source_type_(ToCaptureSourceType(type)), + cursor_mode_(prefer_cursor_embedded ? CursorMode::kEmbedded + : CursorMode::kMetadata), proxy_request_response_handler_(proxy_request_response_handler), sources_request_response_signal_handler_( sources_request_response_signal_handler), diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.h b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.h index 96e4ba3ac9e8..ffb198f387b9 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.h +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.h @@ -16,10 +16,10 @@ #include #include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/linux/wayland/portal_request_response.h" #include "modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h" -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" -#include "modules/desktop_capture/linux/wayland/xdg_session_details.h" +#include "modules/portal/portal_request_response.h" +#include "modules/portal/xdg_desktop_portal_utils.h" +#include "modules/portal/xdg_session_details.h" namespace webrtc { @@ -84,7 +84,10 @@ class ScreenCastPortal : public xdg_portal::ScreenCapturePortalInterface { ProxyRequestResponseHandler proxy_request_response_handler, SourcesRequestResponseSignalHandler sources_request_response_signal_handler, - gpointer user_data); + gpointer user_data, + // TODO(chromium:1291247): Remove the default option once + // downstream has been adjusted. + bool prefer_cursor_embedded = false); ~ScreenCastPortal(); @@ -140,7 +143,7 @@ class ScreenCastPortal : public xdg_portal::ScreenCapturePortalInterface { CaptureSourceType capture_source_type_ = ScreenCastPortal::CaptureSourceType::kScreen; - CursorMode cursor_mode_ = ScreenCastPortal::CursorMode::kMetadata; + CursorMode cursor_mode_ = CursorMode::kMetadata; PersistMode persist_mode_ = ScreenCastPortal::PersistMode::kDoNotPersist; diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc index dc0784791dbd..0c4900d1cd0a 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc @@ -27,15 +27,6 @@ namespace webrtc { -PipeWireThreadLoopLock::PipeWireThreadLoopLock(pw_thread_loop* loop) - : loop_(loop) { - pw_thread_loop_lock(loop_); -} - -PipeWireThreadLoopLock::~PipeWireThreadLoopLock() { - pw_thread_loop_unlock(loop_); -} - PipeWireVersion PipeWireVersion::Parse(const absl::string_view& version) { std::vector parsed_version = rtc::split(version, '.'); diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_stream_utils.h b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_stream_utils.h index 70262c2e390a..e04d7db93107 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_stream_utils.h +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_stream_utils.h @@ -18,23 +18,12 @@ #include "rtc_base/string_encode.h" -struct pw_thread_loop; struct spa_pod; struct spa_pod_builder; struct spa_rectangle; namespace webrtc { -// Locks pw_thread_loop in the current scope -class PipeWireThreadLoopLock { - public: - explicit PipeWireThreadLoopLock(pw_thread_loop* loop); - ~PipeWireThreadLoopLock(); - - private: - pw_thread_loop* const loop_; -}; - struct PipeWireVersion { static PipeWireVersion Parse(const absl::string_view& version); diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc index c9a6156692f9..bcd7e3a33cff 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc @@ -20,8 +20,8 @@ #include "absl/memory/memory.h" #include "modules/desktop_capture/linux/wayland/egl_dmabuf.h" -#include "modules/desktop_capture/linux/wayland/pipewire_utils.h" #include "modules/desktop_capture/linux/wayland/screencast_stream_utils.h" +#include "modules/portal/pipewire_utils.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/sanitizer.h" @@ -76,7 +76,8 @@ class SharedScreenCastStreamPrivate { bool StartScreenCastStream(uint32_t stream_node_id, int fd, uint32_t width = 0, - uint32_t height = 0); + uint32_t height = 0, + bool is_cursor_embedded = false); void UpdateScreenCastStreamResolution(uint32_t width, uint32_t height); void SetUseDamageRegion(bool use_damage_region) { use_damage_region_ = use_damage_region; @@ -142,6 +143,10 @@ class SharedScreenCastStreamPrivate { bool use_damage_region_ = true; + // Specifies whether the pipewire stream has been initialized with a request + // to embed cursor into the captured frames. + bool is_cursor_embedded_ = false; + // event handlers pw_core_events pw_core_events_ = {}; pw_stream_events pw_stream_events_ = {}; @@ -384,9 +389,11 @@ bool SharedScreenCastStreamPrivate::StartScreenCastStream( uint32_t stream_node_id, int fd, uint32_t width, - uint32_t height) { + uint32_t height, + bool is_cursor_embedded) { width_ = width; height_ = height; + is_cursor_embedded_ = is_cursor_embedded; if (!InitializePipeWire()) { RTC_LOG(LS_ERROR) << "Unable to open PipeWire library"; return false; @@ -799,6 +806,7 @@ void SharedScreenCastStreamPrivate::ProcessBuffer(pw_buffer* buffer) { queue_.current_frame()->mutable_updated_region()->SetRect( DesktopRect::MakeSize(queue_.current_frame()->size())); } + queue_.current_frame()->set_may_contain_cursor(is_cursor_embedded_); } RTC_NO_SANITIZE("cfi-icall") @@ -911,8 +919,10 @@ bool SharedScreenCastStream::StartScreenCastStream(uint32_t stream_node_id) { bool SharedScreenCastStream::StartScreenCastStream(uint32_t stream_node_id, int fd, uint32_t width, - uint32_t height) { - return private_->StartScreenCastStream(stream_node_id, fd, width, height); + uint32_t height, + bool is_cursor_embedded) { + return private_->StartScreenCastStream(stream_node_id, fd, width, height, + is_cursor_embedded); } void SharedScreenCastStream::UpdateScreenCastStreamResolution(uint32_t width, diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.h b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.h index ba29525224aa..9cdd3d89be70 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.h +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.h @@ -47,7 +47,8 @@ class RTC_EXPORT SharedScreenCastStream bool StartScreenCastStream(uint32_t stream_node_id, int fd, uint32_t width = 0, - uint32_t height = 0); + uint32_t height = 0, + bool is_cursor_embedded = false); void UpdateScreenCastStreamResolution(uint32_t width, uint32_t height); void SetUseDamageRegion(bool use_damage_region); void SetObserver(SharedScreenCastStream::Observer* observer); diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.cc b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.cc index bf212d06504a..3b829959aca8 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.cc +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.cc @@ -20,7 +20,7 @@ #include #include -#include "modules/desktop_capture/linux/wayland/pipewire_utils.h" +#include "modules/portal/pipewire_utils.h" #include "rtc_base/logging.h" namespace webrtc { diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h index f6ac92b5d16b..b213e5030864 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h @@ -11,101 +11,7 @@ #ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_DESKTOP_PORTAL_UTILS_H_ #define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_DESKTOP_PORTAL_UTILS_H_ -#include -#include - -#include -#include - -#include "absl/strings/string_view.h" -#include "modules/desktop_capture/linux/wayland/portal_request_response.h" -#include "modules/desktop_capture/linux/wayland/scoped_glib.h" -#include "modules/desktop_capture/linux/wayland/xdg_session_details.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace xdg_portal { - -constexpr char kDesktopBusName[] = "org.freedesktop.portal.Desktop"; -constexpr char kDesktopObjectPath[] = "/org/freedesktop/portal/desktop"; -constexpr char kDesktopRequestObjectPath[] = - "/org/freedesktop/portal/desktop/request"; -constexpr char kSessionInterfaceName[] = "org.freedesktop.portal.Session"; -constexpr char kRequestInterfaceName[] = "org.freedesktop.portal.Request"; -constexpr char kScreenCastInterfaceName[] = "org.freedesktop.portal.ScreenCast"; - -using ProxyRequestCallback = void (*)(GObject*, GAsyncResult*, gpointer); -using SessionRequestCallback = void (*)(GDBusProxy*, GAsyncResult*, gpointer); -using SessionRequestResponseSignalHandler = void (*)(GDBusConnection*, - const char*, - const char*, - const char*, - const char*, - GVariant*, - gpointer); -using StartRequestResponseSignalHandler = void (*)(GDBusConnection*, - const char*, - const char*, - const char*, - const char*, - GVariant*, - gpointer); -using SessionStartRequestedHandler = void (*)(GDBusProxy*, - GAsyncResult*, - gpointer); - -std::string RequestResponseToString(RequestResponse request); - -RequestResponse RequestResponseFromPortalResponse(uint32_t portal_response); - -// Returns a string path for signal handle based on the provided connection and -// token. -std::string PrepareSignalHandle(absl::string_view token, - GDBusConnection* connection); - -// Sets up the callback to execute when a response signal is received for the -// given object. -uint32_t SetupRequestResponseSignal(absl::string_view object_path, - const GDBusSignalCallback callback, - gpointer user_data, - GDBusConnection* connection); - -void RequestSessionProxy(absl::string_view interface_name, - const ProxyRequestCallback proxy_request_callback, - GCancellable* cancellable, - gpointer user_data); - -void SetupSessionRequestHandlers( - absl::string_view portal_prefix, - const SessionRequestCallback session_request_callback, - const SessionRequestResponseSignalHandler request_response_signale_handler, - GDBusConnection* connection, - GDBusProxy* proxy, - GCancellable* cancellable, - std::string& portal_handle, - guint& session_request_signal_id, - gpointer user_data); - -void StartSessionRequest( - absl::string_view prefix, - absl::string_view session_handle, - const StartRequestResponseSignalHandler signal_handler, - const SessionStartRequestedHandler session_started_handler, - GDBusProxy* proxy, - GDBusConnection* connection, - GCancellable* cancellable, - guint& start_request_signal_id, - std::string& start_handle, - gpointer user_data); - -// Tears down the portal session and cleans up related objects. -void TearDownSession(absl::string_view session_handle, - GDBusProxy* proxy, - GCancellable* cancellable, - GDBusConnection* connection); - -} // namespace xdg_portal -} // namespace webrtc +// TODO(bugs.webrtc.org/14187): remove when all users are gone +#include "modules/portal/xdg_desktop_portal_utils.h" #endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_DESKTOP_PORTAL_UTILS_H_ diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_session_details.h b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_session_details.h index b70ac4aa59c8..9feff5bdf776 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_session_details.h +++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_session_details.h @@ -11,23 +11,7 @@ #ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_SESSION_DETAILS_H_ #define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_SESSION_DETAILS_H_ -#include - -#include - -namespace webrtc { -namespace xdg_portal { - -// Details of the session associated with XDG desktop portal session. Portal API -// calls can be invoked by utilizing the information here. -struct SessionDetails { - GDBusProxy* proxy = nullptr; - GCancellable* cancellable = nullptr; - std::string session_handle; - uint32_t pipewire_stream_node_id = 0; -}; - -} // namespace xdg_portal -} // namespace webrtc +// TODO(bugs.webrtc.org/14187): remove when all users are gone +#include "modules/portal/xdg_session_details.h" #endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_SESSION_DETAILS_H_ diff --git a/third_party/libwebrtc/modules/desktop_capture/screen_capturer_fuchsia.cc b/third_party/libwebrtc/modules/desktop_capture/screen_capturer_fuchsia.cc index c0ad841c058e..19c4c735ad8b 100644 --- a/third_party/libwebrtc/modules/desktop_capture/screen_capturer_fuchsia.cc +++ b/third_party/libwebrtc/modules/desktop_capture/screen_capturer_fuchsia.cc @@ -65,8 +65,7 @@ std::unique_ptr DesktopCapturer::CreateRawScreenCapturer( } ScreenCapturerFuchsia::ScreenCapturerFuchsia() - : component_context_( - sys::ComponentContext::CreateAndServeOutgoingDirectory()) { + : component_context_(sys::ComponentContext::Create()) { RTC_DCHECK(CheckRequirements()); } @@ -86,7 +85,7 @@ ScreenCapturerFuchsia::~ScreenCapturerFuchsia() { // TODO(fxbug.dev/100303): Remove this function when Flatland is the only API. bool ScreenCapturerFuchsia::CheckRequirements() { std::unique_ptr component_context = - sys::ComponentContext::CreateAndServeOutgoingDirectory(); + sys::ComponentContext::Create(); fuchsia::ui::scenic::ScenicSyncPtr scenic; zx_status_t status = component_context->svc()->Connect(scenic.NewRequest()); if (status != ZX_OK) { @@ -163,6 +162,9 @@ void ScreenCapturerFuchsia::CaptureFrame() { uint32_t stride = kFuchsiaBytesPerPixel * pixels_per_row; frame->CopyPixelsFrom(virtual_memory_mapped_addrs_[buffer_index], stride, DesktopRect::MakeWH(width_, height_)); + // Mark the whole screen as having been updated. + frame->mutable_updated_region()->SetRect( + DesktopRect::MakeWH(width_, height_)); fuchsia::ui::composition::ScreenCapture_ReleaseFrame_Result release_result; screen_capture_->ReleaseFrame(buffer_index, &release_result); diff --git a/third_party/libwebrtc/modules/desktop_capture/win/wgc_capture_session.cc b/third_party/libwebrtc/modules/desktop_capture/win/wgc_capture_session.cc index 831257b4d476..ea5565c89c88 100644 --- a/third_party/libwebrtc/modules/desktop_capture/win/wgc_capture_session.cc +++ b/third_party/libwebrtc/modules/desktop_capture/win/wgc_capture_session.cc @@ -105,7 +105,7 @@ WgcCaptureSession::~WgcCaptureSession() { RemoveEventHandlers(); } -HRESULT WgcCaptureSession::StartCapture() { +HRESULT WgcCaptureSession::StartCapture(const DesktopCaptureOptions& options) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK(!is_capture_started_); @@ -187,6 +187,15 @@ HRESULT WgcCaptureSession::StartCapture() { return hr; } + if (!options.prefer_cursor_embedded()) { + ComPtr session2; + if (SUCCEEDED(session_->QueryInterface( + ABI::Windows::Graphics::Capture::IID_IGraphicsCaptureSession2, + &session2))) { + session2->put_IsCursorCaptureEnabled(false); + } + } + hr = session_->StartCapture(); if (FAILED(hr)) { RTC_LOG(LS_ERROR) << "Failed to start CaptureSession: " << hr; @@ -388,17 +397,14 @@ HRESULT WgcCaptureSession::OnItemClosed(WGC::IGraphicsCaptureItem* sender, RTC_LOG(LS_INFO) << "Capture target has been closed."; item_closed_ = true; - is_capture_started_ = false; RemoveEventHandlers(); - mapped_texture_ = nullptr; - session_ = nullptr; - frame_pool_ = nullptr; - direct3d_device_ = nullptr; - item_ = nullptr; - d3d11_device_ = nullptr; - + // Do not attempt to free resources in the OnItemClosed handler, as this + // causes a race where we try to delete the item that is calling us. Removing + // the event handlers and setting `item_closed_` above is sufficient to ensure + // that the resources are no longer used, and the next time the capturer tries + // to get a frame, we will report a permanent failure and be destroyed. return S_OK; } diff --git a/third_party/libwebrtc/modules/desktop_capture/win/wgc_capture_session.h b/third_party/libwebrtc/modules/desktop_capture/win/wgc_capture_session.h index 27d412baf9fd..dfe1fa60bb71 100644 --- a/third_party/libwebrtc/modules/desktop_capture/win/wgc_capture_session.h +++ b/third_party/libwebrtc/modules/desktop_capture/win/wgc_capture_session.h @@ -39,7 +39,7 @@ class WgcCaptureSession final { ~WgcCaptureSession(); - HRESULT StartCapture(); + HRESULT StartCapture(const DesktopCaptureOptions& options); // Returns a frame from the frame pool, if any are present. HRESULT GetFrame(std::unique_ptr* output_frame); diff --git a/third_party/libwebrtc/modules/desktop_capture/win/wgc_capturer_win.cc b/third_party/libwebrtc/modules/desktop_capture/win/wgc_capturer_win.cc index ce5eb6b31fdd..8ec6a29f2350 100644 --- a/third_party/libwebrtc/modules/desktop_capture/win/wgc_capturer_win.cc +++ b/third_party/libwebrtc/modules/desktop_capture/win/wgc_capturer_win.cc @@ -140,10 +140,12 @@ bool IsWgcSupported(CaptureType capture_type) { } WgcCapturerWin::WgcCapturerWin( + const DesktopCaptureOptions& options, std::unique_ptr source_factory, std::unique_ptr source_enumerator, bool allow_delayed_capturable_check) - : source_factory_(std::move(source_factory)), + : options_(options), + source_factory_(std::move(source_factory)), source_enumerator_(std::move(source_enumerator)), allow_delayed_capturable_check_(allow_delayed_capturable_check) { if (!core_messaging_library_) @@ -166,7 +168,7 @@ std::unique_ptr WgcCapturerWin::CreateRawWindowCapturer( const DesktopCaptureOptions& options, bool allow_delayed_capturable_check) { return std::make_unique( - std::make_unique(), + options, std::make_unique(), std::make_unique( options.enumerate_current_process_windows()), allow_delayed_capturable_check); @@ -176,7 +178,7 @@ std::unique_ptr WgcCapturerWin::CreateRawWindowCapturer( std::unique_ptr WgcCapturerWin::CreateRawScreenCapturer( const DesktopCaptureOptions& options) { return std::make_unique( - std::make_unique(), + options, std::make_unique(), std::make_unique(), false); } @@ -309,7 +311,7 @@ void WgcCapturerWin::CaptureFrame() { } if (!capture_session->IsCaptureStarted()) { - hr = capture_session->StartCapture(); + hr = capture_session->StartCapture(options_); if (FAILED(hr)) { RTC_LOG(LS_ERROR) << "Failed to start capture: " << hr; ongoing_captures_.erase(capture_source_->GetSourceId()); @@ -344,7 +346,7 @@ void WgcCapturerWin::CaptureFrame() { capture_time_ms); frame->set_capture_time_ms(capture_time_ms); frame->set_capturer_id(DesktopCapturerId::kWgcCapturerWin); - frame->set_may_contain_cursor(true); + frame->set_may_contain_cursor(options_.prefer_cursor_embedded()); frame->set_top_left(capture_source_->GetTopLeft()); RecordWgcCapturerResult(WgcCapturerResult::kSuccess); callback_->OnCaptureResult(DesktopCapturer::Result::SUCCESS, diff --git a/third_party/libwebrtc/modules/desktop_capture/win/wgc_capturer_win.h b/third_party/libwebrtc/modules/desktop_capture/win/wgc_capturer_win.h index d9ee9d3fc6b1..30253d9db62b 100644 --- a/third_party/libwebrtc/modules/desktop_capture/win/wgc_capturer_win.h +++ b/third_party/libwebrtc/modules/desktop_capture/win/wgc_capturer_win.h @@ -83,7 +83,8 @@ class ScreenEnumerator final : public SourceEnumerator { // capturer appropriate for the type of source they want to capture. class WgcCapturerWin : public DesktopCapturer { public: - WgcCapturerWin(std::unique_ptr source_factory, + WgcCapturerWin(const DesktopCaptureOptions& options, + std::unique_ptr source_factory, std::unique_ptr source_enumerator, bool allow_delayed_capturable_check); @@ -114,6 +115,8 @@ class WgcCapturerWin : public DesktopCapturer { DispatcherQueueOptions, ABI::Windows::System::IDispatcherQueueController**); + DesktopCaptureOptions options_; + // We need to either create or ensure that someone else created a // `DispatcherQueue` on the current thread so that events will be delivered // on the current thread rather than an arbitrary thread. A diff --git a/third_party/libwebrtc/modules/pacing/bitrate_prober.cc b/third_party/libwebrtc/modules/pacing/bitrate_prober.cc index e01c3ae5a972..e8ebf54f32c3 100644 --- a/third_party/libwebrtc/modules/pacing/bitrate_prober.cc +++ b/third_party/libwebrtc/modules/pacing/bitrate_prober.cc @@ -18,12 +18,12 @@ #include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/metrics.h" namespace webrtc { namespace { constexpr TimeDelta kProbeClusterTimeout = TimeDelta::Seconds(5); +constexpr size_t kMaxPendingProbeClusters = 5; } // namespace @@ -36,18 +36,9 @@ BitrateProberConfig::BitrateProberConfig( key_value_config->Lookup("WebRTC-Bwe-ProbingBehavior")); } -BitrateProber::~BitrateProber() { - RTC_HISTOGRAM_COUNTS_1000("WebRTC.BWE.Probing.TotalProbeClustersRequested", - total_probe_count_); - RTC_HISTOGRAM_COUNTS_1000("WebRTC.BWE.Probing.TotalFailedProbeClusters", - total_failed_probe_count_); -} - BitrateProber::BitrateProber(const FieldTrialsView& field_trials) : probing_state_(ProbingState::kDisabled), next_probe_time_(Timestamp::PlusInfinity()), - total_probe_count_(0), - total_failed_probe_count_(0), config_(&field_trials) { SetEnabled(true); } @@ -82,12 +73,11 @@ void BitrateProber::CreateProbeCluster( const ProbeClusterConfig& cluster_config) { RTC_DCHECK(probing_state_ != ProbingState::kDisabled); - total_probe_count_++; while (!clusters_.empty() && - cluster_config.at_time - clusters_.front().requested_at > - kProbeClusterTimeout) { + (cluster_config.at_time - clusters_.front().requested_at > + kProbeClusterTimeout || + clusters_.size() > kMaxPendingProbeClusters)) { clusters_.pop(); - total_failed_probe_count_++; } ProbeCluster cluster; @@ -169,13 +159,6 @@ void BitrateProber::ProbeSent(Timestamp now, DataSize size) { next_probe_time_ = CalculateNextProbeTime(*cluster); if (cluster->sent_bytes >= cluster->pace_info.probe_cluster_min_bytes && cluster->sent_probes >= cluster->pace_info.probe_cluster_min_probes) { - RTC_HISTOGRAM_COUNTS_100000("WebRTC.BWE.Probing.ProbeClusterSizeInBytes", - cluster->sent_bytes); - RTC_HISTOGRAM_COUNTS_100("WebRTC.BWE.Probing.ProbesPerCluster", - cluster->sent_probes); - RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.Probing.TimePerProbeCluster", - (now - cluster->started_at).ms()); - clusters_.pop(); } if (clusters_.empty()) { diff --git a/third_party/libwebrtc/modules/pacing/bitrate_prober.h b/third_party/libwebrtc/modules/pacing/bitrate_prober.h index d2f13942627b..4d8ec68c4f5c 100644 --- a/third_party/libwebrtc/modules/pacing/bitrate_prober.h +++ b/third_party/libwebrtc/modules/pacing/bitrate_prober.h @@ -45,7 +45,7 @@ struct BitrateProberConfig { class BitrateProber { public: explicit BitrateProber(const FieldTrialsView& field_trials); - ~BitrateProber(); + ~BitrateProber() = default; void SetEnabled(bool enable); @@ -118,9 +118,6 @@ class BitrateProber { // Time the next probe should be sent when in kActive state. Timestamp next_probe_time_; - int total_probe_count_; - int total_failed_probe_count_; - BitrateProberConfig config_; }; diff --git a/third_party/libwebrtc/modules/pacing/bitrate_prober_unittest.cc b/third_party/libwebrtc/modules/pacing/bitrate_prober_unittest.cc index 00f84e69f1c0..3be7d2d99e1f 100644 --- a/third_party/libwebrtc/modules/pacing/bitrate_prober_unittest.cc +++ b/third_party/libwebrtc/modules/pacing/bitrate_prober_unittest.cc @@ -13,6 +13,8 @@ #include #include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "test/explicit_key_value_config.h" #include "test/gtest.h" @@ -143,6 +145,41 @@ TEST(BitrateProberTest, DiscardsDelayedProbes) { EXPECT_FALSE(prober.CurrentCluster(now).has_value()); } +TEST(BitrateProberTest, LimitsNumberOfPendingProbeClusters) { + const FieldTrialBasedConfig config; + BitrateProber prober(config); + const DataSize kProbeSize = DataSize::Bytes(1000); + Timestamp now = Timestamp::Zero(); + prober.CreateProbeCluster({.at_time = now, + .target_data_rate = DataRate::KilobitsPerSec(900), + .target_duration = TimeDelta::Millis(15), + .target_probe_count = 5, + .id = 0}); + prober.OnIncomingPacket(kProbeSize); + ASSERT_TRUE(prober.is_probing()); + ASSERT_EQ(prober.CurrentCluster(now)->probe_cluster_id, 0); + + for (int i = 1; i < 11; ++i) { + prober.CreateProbeCluster( + {.at_time = now, + .target_data_rate = DataRate::KilobitsPerSec(900), + .target_duration = TimeDelta::Millis(15), + .target_probe_count = 5, + .id = i}); + prober.OnIncomingPacket(kProbeSize); + } + // Expect some clusters has been dropped. + EXPECT_TRUE(prober.is_probing()); + EXPECT_GE(prober.CurrentCluster(now)->probe_cluster_id, 5); + + Timestamp max_expected_probe_time = now + TimeDelta::Seconds(1); + while (prober.is_probing() && now < max_expected_probe_time) { + now = std::max(now, prober.NextProbeTime(now)); + prober.ProbeSent(now, kProbeSize); + } + EXPECT_FALSE(prober.is_probing()); +} + TEST(BitrateProberTest, DoesntInitializeProbingForSmallPackets) { const FieldTrialBasedConfig config; BitrateProber prober(config); diff --git a/third_party/libwebrtc/modules/pacing/pacing_controller.cc b/third_party/libwebrtc/modules/pacing/pacing_controller.cc index a926e325453d..2251e5681497 100644 --- a/third_party/libwebrtc/modules/pacing/pacing_controller.cc +++ b/third_party/libwebrtc/modules/pacing/pacing_controller.cc @@ -33,7 +33,6 @@ constexpr TimeDelta kCongestedPacketInterval = TimeDelta::Millis(500); // The maximum debt level, in terms of time, capped when sending packets. constexpr TimeDelta kMaxDebtInTime = TimeDelta::Millis(500); constexpr TimeDelta kMaxElapsedTime = TimeDelta::Seconds(2); -constexpr TimeDelta kTargetPaddingDuration = TimeDelta::Millis(5); bool IsDisabled(const FieldTrialsView& field_trials, absl::string_view key) { return absl::StartsWith(field_trials.Lookup(key), "Disabled"); @@ -50,6 +49,9 @@ const TimeDelta PacingController::kMaxExpectedQueueLength = const TimeDelta PacingController::kPausedProcessInterval = kCongestedPacketInterval; const TimeDelta PacingController::kMinSleepTime = TimeDelta::Millis(1); +const TimeDelta PacingController::kTargetPaddingDuration = TimeDelta::Millis(5); +const TimeDelta PacingController::kMaxPaddingReplayDuration = + TimeDelta::Millis(50); const TimeDelta PacingController::kMaxEarlyProbeProcessing = TimeDelta::Millis(1); @@ -87,7 +89,8 @@ PacingController::PacingController(Clock* clock, congested_(false), queue_time_limit_(kMaxExpectedQueueLength), account_for_audio_(false), - include_overhead_(false) { + include_overhead_(false), + circuit_breaker_threshold_(1 << 16) { if (!drain_large_queues_) { RTC_LOG(LS_WARNING) << "Pacer queues will not be drained," "pushback experiment must be enabled."; @@ -141,6 +144,14 @@ void PacingController::SetCongested(bool congested) { congested_ = congested; } +void PacingController::SetCircuitBreakerThreshold(int num_iterations) { + circuit_breaker_threshold_ = num_iterations; +} + +void PacingController::RemovePacketsForSsrc(uint32_t ssrc) { + packet_queue_.RemovePacketsForSsrc(ssrc); +} + bool PacingController::IsProbing() const { return prober_.is_probing(); } @@ -423,18 +434,24 @@ void PacingController::ProcessPackets() { } DataSize data_sent = DataSize::Zero(); - // Circuit breaker, making sure main loop isn't forever. - static constexpr int kMaxIterations = 1 << 16; int iteration = 0; int packets_sent = 0; int padding_packets_generated = 0; - for (; iteration < kMaxIterations; ++iteration) { + for (; iteration < circuit_breaker_threshold_; ++iteration) { // Fetch packet, so long as queue is not empty or budget is not // exhausted. std::unique_ptr rtp_packet = GetPendingPacket(pacing_info, target_send_time, now); if (rtp_packet == nullptr) { // No packet available to send, check if we should send padding. + if (now - target_send_time > kMaxPaddingReplayDuration) { + // The target send time is more than `kMaxPaddingReplayDuration` behind + // the real-time clock. This can happen if the clock is adjusted forward + // without `ProcessPackets()` having been called at the expected times. + target_send_time = now - kMaxPaddingReplayDuration; + last_process_time_ = std::max(last_process_time_, target_send_time); + } + DataSize padding_to_add = PaddingToAdd(recommended_probe_size, data_sent); if (padding_to_add > DataSize::Zero()) { std::vector> padding_packets = @@ -499,14 +516,30 @@ void PacingController::ProcessPackets() { } } - if (iteration >= kMaxIterations) { + if (iteration >= circuit_breaker_threshold_) { // Circuit break activated. Log warning, adjust send time and return. // TODO(sprang): Consider completely clearing state. - RTC_LOG(LS_ERROR) << "PacingController exceeded max iterations in " - "send-loop: packets sent = " - << packets_sent << ", padding packets generated = " - << padding_packets_generated - << ", bytes sent = " << data_sent.bytes(); + RTC_LOG(LS_ERROR) + << "PacingController exceeded max iterations in " + "send-loop. Debug info: " + << " packets sent = " << packets_sent + << ", padding packets generated = " << padding_packets_generated + << ", bytes sent = " << data_sent.bytes() + << ", probing = " << (is_probing ? "true" : "false") + << ", recommended_probe_size = " << recommended_probe_size.bytes() + << ", now = " << now.us() + << ", target_send_time = " << target_send_time.us() + << ", last_process_time = " << last_process_time_.us() + << ", last_send_time = " << last_send_time_.us() + << ", paused = " << (paused_ ? "true" : "false") + << ", media_debt = " << media_debt_.bytes() + << ", padding_debt = " << padding_debt_.bytes() + << ", pacing_rate = " << pacing_rate_.bps() + << ", adjusted_media_rate = " << adjusted_media_rate_.bps() + << ", padding_rate = " << padding_rate_.bps() + << ", queue size (packets) = " << packet_queue_.SizeInPackets() + << ", queue size (payload bytes) = " + << packet_queue_.SizeInPayloadBytes(); last_send_time_ = now; last_process_time_ = now; return; diff --git a/third_party/libwebrtc/modules/pacing/pacing_controller.h b/third_party/libwebrtc/modules/pacing/pacing_controller.h index 94d2402de694..91c0548568c4 100644 --- a/third_party/libwebrtc/modules/pacing/pacing_controller.h +++ b/third_party/libwebrtc/modules/pacing/pacing_controller.h @@ -72,9 +72,14 @@ class PacingController { // order to send a keep-alive packet so we don't get stuck in a bad state due // to lack of feedback. static const TimeDelta kPausedProcessInterval; - + // The default minimum time that should elapse calls to `ProcessPackets()`. static const TimeDelta kMinSleepTime; - + // When padding should be generated, add packets to the buffer with a size + // corresponding to this duration times the current padding rate. + static const TimeDelta kTargetPaddingDuration; + // The maximum time that the pacer can use when "replaying" passed time where + // padding should have been generated. + static const TimeDelta kMaxPaddingReplayDuration; // Allow probes to be processed slightly ahead of inteded send time. Currently // set to 1ms as this is intended to allow times be rounded down to the // nearest millisecond. @@ -156,6 +161,14 @@ class PacingController { bool IsProbing() const; + // Note: Intended for debugging purposes only, will be removed. + // Sets the number of iterations of the main loop in `ProcessPackets()` that + // is considered erroneous to exceed. + void SetCircuitBreakerThreshold(int num_iterations); + + // Remove any pending packets matching this SSRC from the packet queue. + void RemovePacketsForSsrc(uint32_t ssrc); + private: TimeDelta UpdateTimeAndGetElapsed(Timestamp now); bool ShouldSendKeepalive(Timestamp now) const; @@ -232,6 +245,8 @@ class PacingController { TimeDelta queue_time_limit_; bool account_for_audio_; bool include_overhead_; + + int circuit_breaker_threshold_; }; } // namespace webrtc diff --git a/third_party/libwebrtc/modules/pacing/pacing_controller_unittest.cc b/third_party/libwebrtc/modules/pacing/pacing_controller_unittest.cc index 37b8605e2ecb..3b3c3eb761d4 100644 --- a/third_party/libwebrtc/modules/pacing/pacing_controller_unittest.cc +++ b/third_party/libwebrtc/modules/pacing/pacing_controller_unittest.cc @@ -27,6 +27,7 @@ #include "test/gtest.h" using ::testing::_; +using ::testing::AnyNumber; using ::testing::Field; using ::testing::Pointee; using ::testing::Property; @@ -1520,7 +1521,7 @@ TEST_F(PacingControllerTest, SmallFirstProbePacket) { size_t packets_sent = 0; bool media_seen = false; EXPECT_CALL(callback, SendPacket) - .Times(::testing::AnyNumber()) + .Times(AnyNumber()) .WillRepeatedly([&](std::unique_ptr packet, const PacedPacketInfo& cluster_info) { if (packets_sent == 0) { @@ -1674,7 +1675,7 @@ TEST_F(PacingControllerTest, for (bool account_for_audio : {false, true}) { uint16_t sequence_number = 1234; MockPacketSender callback; - EXPECT_CALL(callback, SendPacket).Times(::testing::AnyNumber()); + EXPECT_CALL(callback, SendPacket).Times(AnyNumber()); auto pacer = std::make_unique(&clock_, &callback, trials_); pacer->SetAccountForAudioPackets(account_for_audio); @@ -2115,5 +2116,60 @@ TEST_F(PacingControllerTest, BudgetDoesNotAffectRetransmissionInsTrial) { pacer.ProcessPackets(); } +TEST_F(PacingControllerTest, AbortsAfterReachingCircuitBreakLimit) { + const DataSize kPacketSize = DataSize::Bytes(1000); + + EXPECT_CALL(callback_, SendPadding).Times(0); + PacingController pacer(&clock_, &callback_, trials_); + pacer.SetPacingRates(kTargetRate, /*padding_rate=*/DataRate::Zero()); + + // Set the circuit breaker to abort after one iteration of the main + // sending loop. + pacer.SetCircuitBreakerThreshold(1); + EXPECT_CALL(callback_, SendPacket).Times(1); + + // Send two packets. + pacer.EnqueuePacket(BuildPacket(RtpPacketMediaType::kVideo, kVideoSsrc, + /*sequence_number=*/1, + /*capture_time=*/1, kPacketSize.bytes())); + pacer.EnqueuePacket(BuildPacket(RtpPacketMediaType::kVideo, kVideoSsrc, + /*sequence_number=*/2, + /*capture_time=*/2, kPacketSize.bytes())); + + // Advance time to way past where both should be eligible for sending. + clock_.AdvanceTime(TimeDelta::Seconds(1)); + + pacer.ProcessPackets(); +} + +TEST_F(PacingControllerTest, DoesNotPadIfProcessThreadIsBorked) { + PacingControllerPadding callback; + PacingController pacer(&clock_, &callback, trials_); + + // Set both pacing and padding rate to be non-zero. + pacer.SetPacingRates(kTargetRate, /*padding_rate=*/kTargetRate); + + // Add one packet to the queue, but do not send it yet. + pacer.EnqueuePacket(BuildPacket(RtpPacketMediaType::kVideo, kVideoSsrc, + /*sequence_number=*/1, + /*capture_time=*/1, + /*size=*/1000)); + + // Advance time to waaay after the packet should have been sent. + clock_.AdvanceTime(TimeDelta::Seconds(42)); + + // `ProcessPackets()` should send the delayed packet, followed by a small + // amount of missed padding. + pacer.ProcessPackets(); + + // The max padding window is the max replay duration + the target padding + // duration. + const DataSize kMaxPadding = (PacingController::kMaxPaddingReplayDuration + + PacingController::kTargetPaddingDuration) * + kTargetRate; + + EXPECT_LE(callback.padding_sent(), kMaxPadding.bytes()); +} + } // namespace } // namespace webrtc diff --git a/third_party/libwebrtc/modules/pacing/prioritized_packet_queue.cc b/third_party/libwebrtc/modules/pacing/prioritized_packet_queue.cc index b3874a2324aa..0c285c463a78 100644 --- a/third_party/libwebrtc/modules/pacing/prioritized_packet_queue.cc +++ b/third_party/libwebrtc/modules/pacing/prioritized_packet_queue.cc @@ -60,7 +60,7 @@ bool PrioritizedPacketQueue::StreamQueue::EnqueuePacket(QueuedPacket packet, } PrioritizedPacketQueue::QueuedPacket -PrioritizedPacketQueue::StreamQueue::DequePacket(int priority_level) { +PrioritizedPacketQueue::StreamQueue::DequeuePacket(int priority_level) { RTC_DCHECK(!packets_[priority_level].empty()); QueuedPacket packet = std::move(packets_[priority_level].front()); packets_[priority_level].pop_front(); @@ -91,6 +91,16 @@ Timestamp PrioritizedPacketQueue::StreamQueue::LastEnqueueTime() const { return last_enqueue_time_; } +std::array, + PrioritizedPacketQueue::kNumPriorityLevels> +PrioritizedPacketQueue::StreamQueue::DequeueAll() { + std::array, kNumPriorityLevels> packets_by_prio; + for (int i = 0; i < kNumPriorityLevels; ++i) { + packets_by_prio[i].swap(packets_[i]); + } + return packets_by_prio; +} + PrioritizedPacketQueue::PrioritizedPacketQueue(Timestamp creation_time) : queue_time_sum_(TimeDelta::Zero()), pause_time_sum_(TimeDelta::Zero()), @@ -162,54 +172,16 @@ std::unique_ptr PrioritizedPacketQueue::Pop() { RTC_DCHECK_GE(top_active_prio_level_, 0); StreamQueue& stream_queue = *streams_by_prio_[top_active_prio_level_].front(); - QueuedPacket packet = stream_queue.DequePacket(top_active_prio_level_); - --size_packets_; - RTC_DCHECK(packet.packet->packet_type().has_value()); - RtpPacketMediaType packet_type = packet.packet->packet_type().value(); - --size_packets_per_media_type_[static_cast(packet_type)]; - RTC_DCHECK_GE(size_packets_per_media_type_[static_cast(packet_type)], - 0); - size_payload_ -= packet.PacketSize(); - - // Calculate the total amount of time spent by this packet in the queue - // while in a non-paused state. Note that the `pause_time_sum_ms_` was - // subtracted from `packet.enqueue_time_ms` when the packet was pushed, and - // by subtracting it now we effectively remove the time spent in in the - // queue while in a paused state. - TimeDelta time_in_non_paused_state = - last_update_time_ - packet.enqueue_time - pause_time_sum_; - queue_time_sum_ -= time_in_non_paused_state; - - // Set the time spent in the send queue, which is the per-packet equivalent of - // totalPacketSendDelay. The notion of being paused is an implementation - // detail that we do not want to expose, so it makes sense to report the - // metric excluding the pause time. This also avoids spikes in the metric. - // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay - packet.packet->set_time_in_send_queue(time_in_non_paused_state); - - RTC_DCHECK(size_packets_ > 0 || queue_time_sum_ == TimeDelta::Zero()); - - RTC_CHECK(packet.enqueue_time_iterator != enqueue_times_.end()); - enqueue_times_.erase(packet.enqueue_time_iterator); + QueuedPacket packet = stream_queue.DequeuePacket(top_active_prio_level_); + DequeuePacketInternal(packet); // Remove StreamQueue from head of fifo-queue for this prio level, and // and add it to the end if it still has packets. streams_by_prio_[top_active_prio_level_].pop_front(); if (stream_queue.HasPacketsAtPrio(top_active_prio_level_)) { streams_by_prio_[top_active_prio_level_].push_back(&stream_queue); - } else if (streams_by_prio_[top_active_prio_level_].empty()) { - // No stream queues have packets at this prio level, find top priority - // that is not empty. - if (size_packets_ == 0) { - top_active_prio_level_ = -1; - } else { - for (int i = 0; i < kNumPriorityLevels; ++i) { - if (!streams_by_prio_[i].empty()) { - top_active_prio_level_ = i; - break; - } - } - } + } else { + MaybeUpdateTopPrioLevel(); } return std::move(packet.packet); @@ -276,4 +248,96 @@ void PrioritizedPacketQueue::SetPauseState(bool paused, Timestamp now) { paused_ = paused; } +void PrioritizedPacketQueue::RemovePacketsForSsrc(uint32_t ssrc) { + auto kv = streams_.find(ssrc); + if (kv != streams_.end()) { + // Dequeue all packets from the queue for this SSRC. + StreamQueue& queue = *kv->second; + std::array, kNumPriorityLevels> packets_by_prio = + queue.DequeueAll(); + for (int i = 0; i < kNumPriorityLevels; ++i) { + std::deque& packet_queue = packets_by_prio[i]; + if (packet_queue.empty()) { + continue; + } + + // First erase all packets at this prio level. + while (!packet_queue.empty()) { + QueuedPacket packet = std::move(packet_queue.front()); + packet_queue.pop_front(); + DequeuePacketInternal(packet); + } + + // Next, deregister this `StreamQueue` from the round-robin tables. + RTC_DCHECK(!streams_by_prio_[i].empty()); + if (streams_by_prio_[i].size() == 1) { + // This is the last and only queue that had packets for this prio level. + // Update the global top prio level if neccessary. + RTC_DCHECK(streams_by_prio_[i].front() == &queue); + streams_by_prio_[i].pop_front(); + if (i == top_active_prio_level_) { + MaybeUpdateTopPrioLevel(); + } + } else { + // More than stream had packets at this prio level, filter this one out. + std::deque filtered_queue; + for (StreamQueue* queue_ptr : streams_by_prio_[i]) { + if (queue_ptr != &queue) { + filtered_queue.push_back(queue_ptr); + } + } + streams_by_prio_[i].swap(filtered_queue); + } + } + } +} + +void PrioritizedPacketQueue::DequeuePacketInternal(QueuedPacket& packet) { + --size_packets_; + RTC_DCHECK(packet.packet->packet_type().has_value()); + RtpPacketMediaType packet_type = packet.packet->packet_type().value(); + --size_packets_per_media_type_[static_cast(packet_type)]; + RTC_DCHECK_GE(size_packets_per_media_type_[static_cast(packet_type)], + 0); + size_payload_ -= packet.PacketSize(); + + // Calculate the total amount of time spent by this packet in the queue + // while in a non-paused state. Note that the `pause_time_sum_ms_` was + // subtracted from `packet.enqueue_time_ms` when the packet was pushed, and + // by subtracting it now we effectively remove the time spent in in the + // queue while in a paused state. + TimeDelta time_in_non_paused_state = + last_update_time_ - packet.enqueue_time - pause_time_sum_; + queue_time_sum_ -= time_in_non_paused_state; + + // Set the time spent in the send queue, which is the per-packet equivalent of + // totalPacketSendDelay. The notion of being paused is an implementation + // detail that we do not want to expose, so it makes sense to report the + // metric excluding the pause time. This also avoids spikes in the metric. + // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay + packet.packet->set_time_in_send_queue(time_in_non_paused_state); + + RTC_DCHECK(size_packets_ > 0 || queue_time_sum_ == TimeDelta::Zero()); + + RTC_CHECK(packet.enqueue_time_iterator != enqueue_times_.end()); + enqueue_times_.erase(packet.enqueue_time_iterator); +} + +void PrioritizedPacketQueue::MaybeUpdateTopPrioLevel() { + if (streams_by_prio_[top_active_prio_level_].empty()) { + // No stream queues have packets at this prio level, find top priority + // that is not empty. + if (size_packets_ == 0) { + top_active_prio_level_ = -1; + } else { + for (int i = 0; i < kNumPriorityLevels; ++i) { + if (!streams_by_prio_[i].empty()) { + top_active_prio_level_ = i; + break; + } + } + } + } +} + } // namespace webrtc diff --git a/third_party/libwebrtc/modules/pacing/prioritized_packet_queue.h b/third_party/libwebrtc/modules/pacing/prioritized_packet_queue.h index 3b5748f12afa..364b53af11ce 100644 --- a/third_party/libwebrtc/modules/pacing/prioritized_packet_queue.h +++ b/third_party/libwebrtc/modules/pacing/prioritized_packet_queue.h @@ -13,10 +13,12 @@ #include +#include #include #include #include #include +#include #include "api/units/data_size.h" #include "api/units/time_delta.h" @@ -80,6 +82,9 @@ class PrioritizedPacketQueue { // Set the pause state, while `paused` is true queuing time is not counted. void SetPauseState(bool paused, Timestamp now); + // Remove any packets matching the given SSRC. + void RemovePacketsForSsrc(uint32_t ssrc); + private: static constexpr int kNumPriorityLevels = 4; @@ -107,18 +112,27 @@ class PrioritizedPacketQueue { // count for that priority level went from zero to non-zero. bool EnqueuePacket(QueuedPacket packet, int priority_level); - QueuedPacket DequePacket(int priority_level); + QueuedPacket DequeuePacket(int priority_level); bool HasPacketsAtPrio(int priority_level) const; bool IsEmpty() const; Timestamp LeadingPacketEnqueueTime(int priority_level) const; Timestamp LastEnqueueTime() const; + std::array, kNumPriorityLevels> DequeueAll(); + private: std::deque packets_[kNumPriorityLevels]; Timestamp last_enqueue_time_; }; + // Remove the packet from the internal state, e.g. queue time / size etc. + void DequeuePacketInternal(QueuedPacket& packet); + + // Check if the queue pointed to by `top_active_prio_level_` is empty and + // if so move it to the lowest non-empty index. + void MaybeUpdateTopPrioLevel(); + // Cumulative sum, over all packets, of time spent in the queue. TimeDelta queue_time_sum_; // Cumulative sum of time the queue has spent in a paused state. diff --git a/third_party/libwebrtc/modules/pacing/prioritized_packet_queue_unittest.cc b/third_party/libwebrtc/modules/pacing/prioritized_packet_queue_unittest.cc index 5e79e7b68e35..964051c0c701 100644 --- a/third_party/libwebrtc/modules/pacing/prioritized_packet_queue_unittest.cc +++ b/third_party/libwebrtc/modules/pacing/prioritized_packet_queue_unittest.cc @@ -306,4 +306,58 @@ TEST(PrioritizedPacketQueue, } } +TEST(PrioritizedPacketQueue, ClearsPackets) { + Timestamp now = Timestamp::Zero(); + PrioritizedPacketQueue queue(now); + const uint32_t kSsrc = 1; + + // Add two packets of each type, all using the same SSRC. + int sequence_number = 0; + for (size_t i = 0; i < kNumMediaTypes; ++i) { + queue.Push(now, CreatePacket(static_cast(i), + sequence_number++, kSsrc)); + queue.Push(now, CreatePacket(static_cast(i), + sequence_number++, kSsrc)); + } + EXPECT_EQ(queue.SizeInPackets(), 2 * int{kNumMediaTypes}); + + // Remove all of them. + queue.RemovePacketsForSsrc(kSsrc); + EXPECT_TRUE(queue.Empty()); +} + +TEST(PrioritizedPacketQueue, ClearPacketsAffectsOnlySpecifiedSsrc) { + Timestamp now = Timestamp::Zero(); + PrioritizedPacketQueue queue(now); + const uint32_t kRemovingSsrc = 1; + const uint32_t kStayingSsrc = 2; + + // Add an audio packet and a retransmission for the SSRC we will remove, + // ensuring they are first in line. + queue.Push( + now, CreatePacket(RtpPacketMediaType::kAudio, /*seq=*/1, kRemovingSsrc)); + queue.Push(now, CreatePacket(RtpPacketMediaType::kRetransmission, /*seq=*/2, + kRemovingSsrc)); + + // Add a video packet and a retransmission for the SSRC that will remain. + // The retransmission packets now both have pointers to their respective qeues + // from the same prio level. + queue.Push(now, + CreatePacket(RtpPacketMediaType::kVideo, /*seq=*/3, kStayingSsrc)); + queue.Push(now, CreatePacket(RtpPacketMediaType::kRetransmission, /*seq=*/4, + kStayingSsrc)); + + EXPECT_EQ(queue.SizeInPackets(), 4); + + // Clear the first two packets. + queue.RemovePacketsForSsrc(kRemovingSsrc); + EXPECT_EQ(queue.SizeInPackets(), 2); + + // We should get the single remaining retransmission first, then the video + // packet. + EXPECT_EQ(queue.Pop()->SequenceNumber(), 4); + EXPECT_EQ(queue.Pop()->SequenceNumber(), 3); + EXPECT_TRUE(queue.Empty()); +} + } // namespace webrtc diff --git a/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.cc b/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.cc index a42220b8341f..4ba249582c99 100644 --- a/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.cc +++ b/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.cc @@ -57,7 +57,8 @@ TaskQueuePacedSender::TaskQueuePacedSender( const FieldTrialsView& field_trials, TaskQueueFactory* task_queue_factory, TimeDelta max_hold_back_window, - int max_hold_back_window_in_packets) + int max_hold_back_window_in_packets, + absl::optional burst_interval) : clock_(clock), bursty_pacer_flags_(field_trials), slacked_pacer_flags_(field_trials), @@ -85,6 +86,11 @@ TaskQueuePacedSender::TaskQueuePacedSender( burst = slacked_burst; } } + // If not overriden by an experiment, the burst is specified by the + // `burst_interval` argument. + if (!burst.has_value()) { + burst = burst_interval; + } if (burst.has_value()) { pacing_controller_.SetSendBurstInterval(burst.value()); } @@ -175,6 +181,14 @@ void TaskQueuePacedSender::EnqueuePackets( })); } +void TaskQueuePacedSender::RemovePacketsForSsrc(uint32_t ssrc) { + task_queue_.RunOrPost([this, ssrc]() { + RTC_DCHECK_RUN_ON(&task_queue_); + pacing_controller_.RemovePacketsForSsrc(ssrc); + MaybeProcessPackets(Timestamp::MinusInfinity()); + }); +} + void TaskQueuePacedSender::SetAccountForAudioPackets(bool account_for_audio) { task_queue_.RunOrPost([this, account_for_audio]() { RTC_DCHECK_RUN_ON(&task_queue_); diff --git a/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.h b/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.h index 18be6acef0a9..ea335fd8e372 100644 --- a/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.h +++ b/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.h @@ -39,16 +39,25 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { public: static const int kNoPacketHoldback; + // The pacer can be configured using `field_trials` or specified parameters. + // // The `hold_back_window` parameter sets a lower bound on time to sleep if // there is currently a pacer queue and packets can't immediately be // processed. Increasing this reduces thread wakeups at the expense of higher // latency. - TaskQueuePacedSender(Clock* clock, - PacingController::PacketSender* packet_sender, - const FieldTrialsView& field_trials, - TaskQueueFactory* task_queue_factory, - TimeDelta max_hold_back_window, - int max_hold_back_window_in_packets); + // + // If the `burst_interval` parameter is set, the pacer is allowed to build up + // a packet "debt" that correspond to approximately the send rate during the + // specified interval. This greatly reduced wake ups by not pacing packets + // within the allowed burst budget. + TaskQueuePacedSender( + Clock* clock, + PacingController::PacketSender* packet_sender, + const FieldTrialsView& field_trials, + TaskQueueFactory* task_queue_factory, + TimeDelta max_hold_back_window, + int max_hold_back_window_in_packets, + absl::optional burst_interval = absl::nullopt); ~TaskQueuePacedSender() override; @@ -61,6 +70,8 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { // PacingController::PacketSender::SendPacket() when it's time to send. void EnqueuePackets( std::vector> packets) override; + // Remove any pending packets matching this SSRC from the packet queue. + void RemovePacketsForSsrc(uint32_t ssrc) override; // Methods implementing RtpPacketPacer. diff --git a/third_party/libwebrtc/modules/pacing/task_queue_paced_sender_unittest.cc b/third_party/libwebrtc/modules/pacing/task_queue_paced_sender_unittest.cc index 59790d00ddef..69c7b9b7efdd 100644 --- a/third_party/libwebrtc/modules/pacing/task_queue_paced_sender_unittest.cc +++ b/third_party/libwebrtc/modules/pacing/task_queue_paced_sender_unittest.cc @@ -253,6 +253,53 @@ TEST_P(TaskQueuePacedSenderTest, PacesPackets) { EXPECT_NEAR((end_time - start_time).ms(), 1000.0, 50.0); } +// Same test as above, but with 0.5s of burst applied. +TEST_P(TaskQueuePacedSenderTest, PacesPacketsWithBurst) { + GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); + MockPacketRouter packet_router; + ScopedKeyValueConfig trials(GetParam()); + TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials, + time_controller.GetTaskQueueFactory(), + PacingController::kMinSleepTime, + TaskQueuePacedSender::kNoPacketHoldback, + // Half a second of bursting. + TimeDelta::Seconds(0.5)); + + // Insert a number of packets, covering one second. + static constexpr size_t kPacketsToSend = 42; + SequenceChecker sequence_checker; + pacer.SetPacingRates( + DataRate::BitsPerSec(kDefaultPacketSize * 8 * kPacketsToSend), + DataRate::Zero()); + pacer.EnsureStarted(); + pacer.EnqueuePackets( + GeneratePackets(RtpPacketMediaType::kVideo, kPacketsToSend)); + + // Expect all of them to be sent. + size_t packets_sent = 0; + Timestamp end_time = Timestamp::PlusInfinity(); + EXPECT_CALL(packet_router, SendPacket) + .WillRepeatedly([&](std::unique_ptr packet, + const PacedPacketInfo& cluster_info) { + ++packets_sent; + if (packets_sent == kPacketsToSend) { + end_time = time_controller.GetClock()->CurrentTime(); + } + EXPECT_EQ(sequence_checker.IsCurrent(), UsingWorkerThread(GetParam())); + }); + + const Timestamp start_time = time_controller.GetClock()->CurrentTime(); + + // Packets should be sent over a period of close to 1s. Expect a little + // lower than this since initial probing is a bit quicker. + time_controller.AdvanceTime(TimeDelta::Seconds(1)); + EXPECT_EQ(packets_sent, kPacketsToSend); + ASSERT_TRUE(end_time.IsFinite()); + // Because of half a second of burst, what would normally have been paced over + // ~1 second now takes ~0.5 seconds. + EXPECT_NEAR((end_time - start_time).ms(), 500.0, 50.0); +} + TEST_P(TaskQueuePacedSenderTest, ReschedulesProcessOnRateChange) { GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); MockPacketRouter packet_router; diff --git a/third_party/libwebrtc/modules/portal/BUILD.gn b/third_party/libwebrtc/modules/portal/BUILD.gn new file mode 100644 index 000000000000..34884c3270ec --- /dev/null +++ b/third_party/libwebrtc/modules/portal/BUILD.gn @@ -0,0 +1,147 @@ +# Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("//build/config/linux/pkg_config.gni") +import("//tools/generate_stubs/rules.gni") +import("../../webrtc.gni") + +if ((is_linux || is_chromeos) && rtc_use_pipewire) { +if (!build_with_mozilla) { + pkg_config("gio") { + packages = [ + "gio-2.0", + "gio-unix-2.0", + ] + } + + pkg_config("pipewire") { + packages = [ "libpipewire-0.3" ] + if (!rtc_link_pipewire) { + ignore_libs = true + } + } + + pkg_config("gbm") { + packages = [ "gbm" ] + } + pkg_config("egl") { + packages = [ "egl" ] + } + pkg_config("epoxy") { + packages = [ "epoxy" ] + ignore_libs = true + } + pkg_config("libdrm") { + packages = [ "libdrm" ] + } + + if (!rtc_link_pipewire) { + # When libpipewire is not directly linked, use stubs to allow for dlopening of + # the binary. + generate_stubs("pipewire_stubs") { + configs = [ + "../../:common_config", + ":pipewire", + ] + deps = [ "../../rtc_base" ] + extra_header = "pipewire_stub_header.fragment" + logging_function = "RTC_LOG(LS_VERBOSE)" + logging_include = "rtc_base/logging.h" + output_name = "pipewire_stubs" + path_from_source = "modules/portal" + sigs = [ "pipewire.sigs" ] + if (!build_with_chromium) { + macro_include = "rtc_base/system/no_cfi_icall.h" + macro_deps = [ "../../rtc_base/system:no_cfi_icall" ] + } + } + } + + config("pipewire_base") { + configs = [ + ":gio", + ":pipewire", + ] + } + + config("pipewire_all") { + configs = [ + ":pipewire_base", + ":gbm", + ":egl", + ":epoxy", + ":libdrm", + ] + } + + config("pipewire_config") { + defines = [ "WEBRTC_USE_PIPEWIRE" ] + + # Chromecast build config overrides `WEBRTC_USE_PIPEWIRE` even when + # `rtc_use_pipewire` is not set, which causes pipewire_config to not be + # included in targets. More details in: webrtc:13898 + if (is_linux && !is_castos) { + defines += [ "WEBRTC_USE_GIO" ] + } + } +} else { + config("pipewire_all") { + } + config("pipewire_config") { + } +} + + rtc_library("portal") { + sources = [ + "pipewire_utils.cc", + "pipewire_utils.h", + "portal_request_response.h", + "scoped_glib.cc", + "scoped_glib.h", + "xdg_desktop_portal_utils.cc", + "xdg_desktop_portal_utils.h", + "xdg_session_details.h", + ] + + configs += [ + ":gio", + ":pipewire", + ":pipewire_config", + ] + + deps = [ + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:sanitizer", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + + if (!rtc_link_pipewire) { + defines = [ "WEBRTC_DLOPEN_PIPEWIRE" ] + + deps += [ ":pipewire_stubs" ] + } + + if (build_with_mozilla) { + configs -= [ + ":gio", + ":pipewire", + ":pipewire_config", + ] + deps -= [ ":pipewire_stubs" ] + defines -= [ "WEBRTC_DLOPEN_PIPEWIRE" ] + public_deps = [ + "//third_party/pipewire", + "//third_party/drm", + "//third_party/gbm", + "//third_party/libepoxy" + ] + } + } +} + diff --git a/third_party/libwebrtc/modules/portal/OWNERS b/third_party/libwebrtc/modules/portal/OWNERS new file mode 100644 index 000000000000..e3bc32ee5c19 --- /dev/null +++ b/third_party/libwebrtc/modules/portal/OWNERS @@ -0,0 +1,2 @@ +alcooper@chromium.org +mfoltz@chromium.org diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire.sigs b/third_party/libwebrtc/modules/portal/pipewire.sigs similarity index 100% rename from third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire.sigs rename to third_party/libwebrtc/modules/portal/pipewire.sigs diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire_stub_header.fragment b/third_party/libwebrtc/modules/portal/pipewire_stub_header.fragment similarity index 100% rename from third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire_stub_header.fragment rename to third_party/libwebrtc/modules/portal/pipewire_stub_header.fragment diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire_utils.cc b/third_party/libwebrtc/modules/portal/pipewire_utils.cc similarity index 68% rename from third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire_utils.cc rename to third_party/libwebrtc/modules/portal/pipewire_utils.cc index 878e459681e1..fd96b4a4db2d 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire_utils.cc +++ b/third_party/libwebrtc/modules/portal/pipewire_utils.cc @@ -8,12 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/desktop_capture/linux/wayland/pipewire_utils.h" +#include "modules/portal/pipewire_utils.h" + +#include #include "rtc_base/sanitizer.h" #if defined(WEBRTC_DLOPEN_PIPEWIRE) -#include "modules/desktop_capture/linux/wayland/pipewire_stubs.h" +#include "modules/portal/pipewire_stubs.h" #endif // defined(WEBRTC_DLOPEN_PIPEWIRE) namespace webrtc { @@ -23,10 +25,10 @@ bool InitializePipeWire() { #if defined(WEBRTC_DLOPEN_PIPEWIRE) static constexpr char kPipeWireLib[] = "libpipewire-0.3.so.0"; - using modules_desktop_capture_linux_wayland::InitializeStubs; - using modules_desktop_capture_linux_wayland::kModulePipewire; + using modules_portal::InitializeStubs; + using modules_portal::kModulePipewire; - modules_desktop_capture_linux_wayland::StubPathMap paths; + modules_portal::StubPathMap paths; // Check if the PipeWire library is available. paths[kModulePipewire].push_back(kPipeWireLib); @@ -39,4 +41,13 @@ bool InitializePipeWire() { #endif // defined(WEBRTC_DLOPEN_PIPEWIRE) } +PipeWireThreadLoopLock::PipeWireThreadLoopLock(pw_thread_loop* loop) + : loop_(loop) { + pw_thread_loop_lock(loop_); +} + +PipeWireThreadLoopLock::~PipeWireThreadLoopLock() { + pw_thread_loop_unlock(loop_); +} + } // namespace webrtc diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire_utils.h b/third_party/libwebrtc/modules/portal/pipewire_utils.h similarity index 65% rename from third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire_utils.h rename to third_party/libwebrtc/modules/portal/pipewire_utils.h index b785d395bd8a..0f5ccf3292b5 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/pipewire_utils.h +++ b/third_party/libwebrtc/modules/portal/pipewire_utils.h @@ -8,8 +8,10 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_PIPEWIRE_UTILS_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_PIPEWIRE_UTILS_H_ +#ifndef MODULES_PORTAL_PIPEWIRE_UTILS_H_ +#define MODULES_PORTAL_PIPEWIRE_UTILS_H_ + +struct pw_thread_loop; namespace webrtc { @@ -18,6 +20,16 @@ namespace webrtc { // running nor does it establish a connection to one. bool InitializePipeWire(); +// Locks pw_thread_loop in the current scope +class PipeWireThreadLoopLock { + public: + explicit PipeWireThreadLoopLock(pw_thread_loop* loop); + ~PipeWireThreadLoopLock(); + + private: + pw_thread_loop* const loop_; +}; + } // namespace webrtc -#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_PIPEWIRE_UTILS_H_ +#endif // MODULES_PORTAL_PIPEWIRE_UTILS_H_ diff --git a/third_party/libwebrtc/modules/audio_coding/isac_fix_gn/moz.build b/third_party/libwebrtc/modules/portal/portal_gn/moz.build similarity index 57% rename from third_party/libwebrtc/modules/audio_coding/isac_fix_gn/moz.build rename to third_party/libwebrtc/modules/portal/portal_gn/moz.build index 0baaccb21063..92ca36986be3 100644 --- a/third_party/libwebrtc/modules/audio_coding/isac_fix_gn/moz.build +++ b/third_party/libwebrtc/modules/portal/portal_gn/moz.build @@ -5,29 +5,33 @@ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### ### DO NOT edit it by hand. ### +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": + CXXFLAGS += CONFIG["MOZ_GTK3_CFLAGS"] COMPILE_FLAGS["OS_INCLUDES"] = [] AllowCompilerWarnings() -CXXFLAGS += [ - "-mfpu=neon" -] - DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True DEFINES["RTC_ENABLE_VP9"] = True -DEFINES["WEBRTC_ARCH_ARM"] = True -DEFINES["WEBRTC_ARCH_ARM_V7"] = True +DEFINES["USE_AURA"] = "1" +DEFINES["USE_GLIB"] = "1" +DEFINES["USE_NSS_CERTS"] = "1" +DEFINES["USE_OZONE"] = "1" +DEFINES["USE_UDEV"] = True DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" -DEFINES["WEBRTC_HAS_NEON"] = True DEFINES["WEBRTC_LIBRARY_IMPL"] = True DEFINES["WEBRTC_LINUX"] = True DEFINES["WEBRTC_MOZILLA_BUILD"] = True DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" DEFINES["WEBRTC_POSIX"] = True DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" +DEFINES["WEBRTC_USE_PIPEWIRE"] = True +DEFINES["_FILE_OFFSET_BITS"] = "64" DEFINES["_GNU_SOURCE"] = True +DEFINES["_LARGEFILE64_SOURCE"] = True +DEFINES["_LARGEFILE_SOURCE"] = True DEFINES["__STDC_CONSTANT_MACROS"] = True DEFINES["__STDC_FORMAT_MACROS"] = True @@ -37,15 +41,23 @@ FINAL_LIBRARY = "webrtc" LOCAL_INCLUDES += [ "!/ipc/ipdl/_ipdlheaders", "!/third_party/libwebrtc/gen", + "/gfx/angle/checkout/include/", "/ipc/chromium/src", + "/third_party/drm/drm/", + "/third_party/drm/drm/include/", + "/third_party/drm/drm/include/libdrm/", + "/third_party/gbm/gbm/", + "/third_party/libepoxy/libepoxy/include/", "/third_party/libwebrtc/", "/third_party/libwebrtc/third_party/abseil-cpp/", + "/third_party/pipewire/", "/tools/profiler/public" ] UNIFIED_SOURCES += [ - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/audio_decoder_isacfix.cc", - "/third_party/libwebrtc/modules/audio_coding/codecs/isac/fix/source/audio_encoder_isacfix.cc" + "/third_party/libwebrtc/modules/portal/pipewire_utils.cc", + "/third_party/libwebrtc/modules/portal/scoped_glib.cc", + "/third_party/libwebrtc/modules/portal/xdg_desktop_portal_utils.cc" ] if not CONFIG["MOZ_DEBUG"]: @@ -59,37 +71,29 @@ if CONFIG["MOZ_DEBUG"] == "1": DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" DEFINES["_DEBUG"] = True -if CONFIG["OS_TARGET"] == "Android": +if CONFIG["CPU_ARCH"] == "aarch64": - DEFINES["ANDROID"] = True - DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" - DEFINES["HAVE_SYS_UIO_H"] = True - DEFINES["WEBRTC_ANDROID"] = True - DEFINES["WEBRTC_ANDROID_OPENSLES"] = True + DEFINES["WEBRTC_ARCH_ARM64"] = True + DEFINES["WEBRTC_HAS_NEON"] = True - OS_LIBS += [ - "android_support", - "log", - "unwind" +if CONFIG["CPU_ARCH"] == "arm": + + CXXFLAGS += [ + "-mfpu=neon" ] -if CONFIG["OS_TARGET"] == "Linux": + DEFINES["WEBRTC_ARCH_ARM"] = True + DEFINES["WEBRTC_ARCH_ARM_V7"] = True + DEFINES["WEBRTC_HAS_NEON"] = True - DEFINES["USE_AURA"] = "1" - DEFINES["USE_GLIB"] = "1" - DEFINES["USE_NSS_CERTS"] = "1" - DEFINES["USE_OZONE"] = "1" - DEFINES["USE_UDEV"] = True - DEFINES["_FILE_OFFSET_BITS"] = "64" - DEFINES["_LARGEFILE64_SOURCE"] = True - DEFINES["_LARGEFILE_SOURCE"] = True +if CONFIG["CPU_ARCH"] == "x86": - OS_LIBS += [ - "rt" + CXXFLAGS += [ + "-msse2" ] if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": DEFINES["USE_X11"] = "1" -Library("isac_fix_gn") +Library("portal_gn") diff --git a/third_party/libwebrtc/modules/portal/portal_request_response.h b/third_party/libwebrtc/modules/portal/portal_request_response.h new file mode 100644 index 000000000000..5fac4eb13779 --- /dev/null +++ b/third_party/libwebrtc/modules/portal/portal_request_response.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_PORTAL_PORTAL_REQUEST_RESPONSE_H_ +#define MODULES_PORTAL_PORTAL_REQUEST_RESPONSE_H_ + +namespace webrtc { +namespace xdg_portal { + +// Contains type of responses that can be observed when making a request to +// a desktop portal interface. +enum class RequestResponse { + // Unknown, the initialized status. + kUnknown, + // Success, the request is carried out. + kSuccess, + // The user cancelled the interaction. + kUserCancelled, + // The user interaction was ended in some other way. + kError, + + kMaxValue = kError, +}; + +} // namespace xdg_portal +} // namespace webrtc +#endif // MODULES_PORTAL_PORTAL_REQUEST_RESPONSE_H_ diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/scoped_glib.cc b/third_party/libwebrtc/modules/portal/scoped_glib.cc similarity index 94% rename from third_party/libwebrtc/modules/desktop_capture/linux/wayland/scoped_glib.cc rename to third_party/libwebrtc/modules/portal/scoped_glib.cc index 0d9a87d7fd46..cb4c80526ef8 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/scoped_glib.cc +++ b/third_party/libwebrtc/modules/portal/scoped_glib.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/desktop_capture/linux/wayland/scoped_glib.h" +#include "modules/portal/scoped_glib.h" namespace webrtc { diff --git a/third_party/libwebrtc/modules/portal/scoped_glib.h b/third_party/libwebrtc/modules/portal/scoped_glib.h new file mode 100644 index 000000000000..b2aaa2eb3a21 --- /dev/null +++ b/third_party/libwebrtc/modules/portal/scoped_glib.h @@ -0,0 +1,65 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_PORTAL_SCOPED_GLIB_H_ +#define MODULES_PORTAL_SCOPED_GLIB_H_ + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +template +class Scoped { + public: + Scoped() {} + explicit Scoped(T* val) { ptr_ = val; } + ~Scoped() { RTC_DCHECK_NOTREACHED(); } + + T* operator->() const { return ptr_; } + + explicit operator bool() const { return ptr_ != nullptr; } + + bool operator!() const { return ptr_ == nullptr; } + + T* get() const { return ptr_; } + + T** receive() { + RTC_CHECK(!ptr_); + return &ptr_; + } + + Scoped& operator=(T* val) { + RTC_DCHECK(val); + ptr_ = val; + return *this; + } + + protected: + T* ptr_ = nullptr; +}; + +template <> +Scoped::~Scoped(); +template <> +Scoped::~Scoped(); +template <> +Scoped::~Scoped(); +template <> +Scoped::~Scoped(); +template <> +Scoped::~Scoped(); +template <> +Scoped::~Scoped(); + +} // namespace webrtc + +#endif // MODULES_PORTAL_SCOPED_GLIB_H_ diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.cc b/third_party/libwebrtc/modules/portal/xdg_desktop_portal_utils.cc similarity index 98% rename from third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.cc rename to third_party/libwebrtc/modules/portal/xdg_desktop_portal_utils.cc index 75dbf2bdf31d..271e0844637f 100644 --- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.cc +++ b/third_party/libwebrtc/modules/portal/xdg_desktop_portal_utils.cc @@ -7,12 +7,12 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" +#include "modules/portal/xdg_desktop_portal_utils.h" #include #include "absl/strings/string_view.h" -#include "modules/desktop_capture/linux/wayland/scoped_glib.h" +#include "modules/portal/scoped_glib.h" #include "rtc_base/logging.h" namespace webrtc { diff --git a/third_party/libwebrtc/modules/portal/xdg_desktop_portal_utils.h b/third_party/libwebrtc/modules/portal/xdg_desktop_portal_utils.h new file mode 100644 index 000000000000..8571c64a2890 --- /dev/null +++ b/third_party/libwebrtc/modules/portal/xdg_desktop_portal_utils.h @@ -0,0 +1,111 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_PORTAL_XDG_DESKTOP_PORTAL_UTILS_H_ +#define MODULES_PORTAL_XDG_DESKTOP_PORTAL_UTILS_H_ + +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "modules/portal/portal_request_response.h" +#include "modules/portal/scoped_glib.h" +#include "modules/portal/xdg_session_details.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace xdg_portal { + +constexpr char kDesktopBusName[] = "org.freedesktop.portal.Desktop"; +constexpr char kDesktopObjectPath[] = "/org/freedesktop/portal/desktop"; +constexpr char kDesktopRequestObjectPath[] = + "/org/freedesktop/portal/desktop/request"; +constexpr char kSessionInterfaceName[] = "org.freedesktop.portal.Session"; +constexpr char kRequestInterfaceName[] = "org.freedesktop.portal.Request"; +constexpr char kScreenCastInterfaceName[] = "org.freedesktop.portal.ScreenCast"; + +using ProxyRequestCallback = void (*)(GObject*, GAsyncResult*, gpointer); +using SessionRequestCallback = void (*)(GDBusProxy*, GAsyncResult*, gpointer); +using SessionRequestResponseSignalHandler = void (*)(GDBusConnection*, + const char*, + const char*, + const char*, + const char*, + GVariant*, + gpointer); +using StartRequestResponseSignalHandler = void (*)(GDBusConnection*, + const char*, + const char*, + const char*, + const char*, + GVariant*, + gpointer); +using SessionStartRequestedHandler = void (*)(GDBusProxy*, + GAsyncResult*, + gpointer); + +std::string RequestResponseToString(RequestResponse request); + +RequestResponse RequestResponseFromPortalResponse(uint32_t portal_response); + +// Returns a string path for signal handle based on the provided connection and +// token. +std::string PrepareSignalHandle(absl::string_view token, + GDBusConnection* connection); + +// Sets up the callback to execute when a response signal is received for the +// given object. +uint32_t SetupRequestResponseSignal(absl::string_view object_path, + const GDBusSignalCallback callback, + gpointer user_data, + GDBusConnection* connection); + +void RequestSessionProxy(absl::string_view interface_name, + const ProxyRequestCallback proxy_request_callback, + GCancellable* cancellable, + gpointer user_data); + +void SetupSessionRequestHandlers( + absl::string_view portal_prefix, + const SessionRequestCallback session_request_callback, + const SessionRequestResponseSignalHandler request_response_signale_handler, + GDBusConnection* connection, + GDBusProxy* proxy, + GCancellable* cancellable, + std::string& portal_handle, + guint& session_request_signal_id, + gpointer user_data); + +void StartSessionRequest( + absl::string_view prefix, + absl::string_view session_handle, + const StartRequestResponseSignalHandler signal_handler, + const SessionStartRequestedHandler session_started_handler, + GDBusProxy* proxy, + GDBusConnection* connection, + GCancellable* cancellable, + guint& start_request_signal_id, + std::string& start_handle, + gpointer user_data); + +// Tears down the portal session and cleans up related objects. +void TearDownSession(absl::string_view session_handle, + GDBusProxy* proxy, + GCancellable* cancellable, + GDBusConnection* connection); + +} // namespace xdg_portal +} // namespace webrtc + +#endif // MODULES_PORTAL_XDG_DESKTOP_PORTAL_UTILS_H_ diff --git a/third_party/libwebrtc/modules/portal/xdg_session_details.h b/third_party/libwebrtc/modules/portal/xdg_session_details.h new file mode 100644 index 000000000000..ab52508c2f22 --- /dev/null +++ b/third_party/libwebrtc/modules/portal/xdg_session_details.h @@ -0,0 +1,33 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_PORTAL_XDG_SESSION_DETAILS_H_ +#define MODULES_PORTAL_XDG_SESSION_DETAILS_H_ + +#include + +#include + +namespace webrtc { +namespace xdg_portal { + +// Details of the session associated with XDG desktop portal session. Portal API +// calls can be invoked by utilizing the information here. +struct SessionDetails { + GDBusProxy* proxy = nullptr; + GCancellable* cancellable = nullptr; + std::string session_handle; + uint32_t pipewire_stream_node_id = 0; +}; + +} // namespace xdg_portal +} // namespace webrtc + +#endif // MODULES_PORTAL_XDG_SESSION_DETAILS_H_ diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc b/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc index b625a745dfd5..6c3638b59fd5 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc @@ -39,10 +39,6 @@ bool IsEnabled(const FieldTrialsView& field_trials, absl::string_view key) { return absl::StartsWith(field_trials.Lookup(key), "Enabled"); } -bool IsNotDisabled(const FieldTrialsView& field_trials, absl::string_view key) { - return !absl::StartsWith(field_trials.Lookup(key), "Disabled"); -} - double ReadBackoffFactor(const FieldTrialsView& key_value_config) { std::string experiment_string = key_value_config.Lookup(kBweBackOffFactorExperiment); @@ -86,13 +82,9 @@ AimdRateControl::AimdRateControl(const FieldTrialsView* key_value_config, in_alr_(false), rtt_(kDefaultRtt), send_side_(send_side), - in_experiment_(!AdaptiveThresholdExperimentIsDisabled(*key_value_config)), no_bitrate_increase_in_alr_( IsEnabled(*key_value_config, "WebRTC-DontIncreaseDelayBasedBweInAlr")), - estimate_bounded_backoff_( - IsNotDisabled(*key_value_config, - "WebRTC-Bwe-EstimateBoundedBackoff")), initial_backoff_interval_("initial_backoff_interval"), link_capacity_fix_("link_capacity_fix") { ParseFieldTrial( @@ -233,7 +225,7 @@ double AimdRateControl::GetNearMaxIncreaseRateBpsPerSecond() const { // Approximate the over-use estimator delay to 100 ms. TimeDelta response_time = rtt_ + TimeDelta::Millis(100); - if (in_experiment_) + response_time = response_time * 2; double increase_rate_bps_per_second = (avg_packet_size / response_time).bps(); @@ -382,8 +374,7 @@ DataRate AimdRateControl::ClampBitrate(DataRate new_bitrate) const { } new_bitrate = std::min(upper_bound, new_bitrate); } - if (estimate_bounded_backoff_ && network_estimate_ && - network_estimate_->link_capacity_lower.IsFinite() && + if (network_estimate_ && network_estimate_->link_capacity_lower.IsFinite() && new_bitrate < current_bitrate_) { new_bitrate = std::min( current_bitrate_, diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control.h b/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control.h index 6c770cdc457b..8321fd523901 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control.h +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control.h @@ -100,13 +100,9 @@ class AimdRateControl { bool in_alr_; TimeDelta rtt_; const bool send_side_; - const bool in_experiment_; // Allow the delay based estimate to only increase as long as application // limited region (alr) is not detected. const bool no_bitrate_increase_in_alr_; - // Use estimated link capacity lower bound if it is higher than the - // acknowledged rate when backing off due to overuse. - const bool estimate_bounded_backoff_; // If false, uses estimated link capacity upper bound * // `estimate_bounded_increase_ratio_` as upper limit for the estimate. FieldTrialFlag disable_estimate_bounded_increase_{"Disabled"}; diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/overuse_detector.cc b/third_party/libwebrtc/modules/remote_bitrate_estimator/overuse_detector.cc index 672822bbcd2a..bd2d7568768f 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/overuse_detector.cc +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/overuse_detector.cc @@ -22,57 +22,22 @@ namespace webrtc { -const char kAdaptiveThresholdExperiment[] = "WebRTC-AdaptiveBweThreshold"; -const char kEnabledPrefix[] = "Enabled"; -const size_t kEnabledPrefixLength = sizeof(kEnabledPrefix) - 1; -const char kDisabledPrefix[] = "Disabled"; -const size_t kDisabledPrefixLength = sizeof(kDisabledPrefix) - 1; - const double kMaxAdaptOffsetMs = 15.0; const double kOverUsingTimeThreshold = 10; const int kMaxNumDeltas = 60; -bool AdaptiveThresholdExperimentIsDisabled( - const FieldTrialsView& key_value_config) { - std::string experiment_string = - key_value_config.Lookup(kAdaptiveThresholdExperiment); - const size_t kMinExperimentLength = kDisabledPrefixLength; - if (experiment_string.length() < kMinExperimentLength) - return false; - return experiment_string.substr(0, kDisabledPrefixLength) == kDisabledPrefix; -} - -// Gets thresholds from the experiment name following the format -// "WebRTC-AdaptiveBweThreshold/Enabled-0.5,0.002/". -bool ReadExperimentConstants(const FieldTrialsView& key_value_config, - double* k_up, - double* k_down) { - std::string experiment_string = - key_value_config.Lookup(kAdaptiveThresholdExperiment); - const size_t kMinExperimentLength = kEnabledPrefixLength + 3; - if (experiment_string.length() < kMinExperimentLength || - experiment_string.substr(0, kEnabledPrefixLength) != kEnabledPrefix) - return false; - return sscanf(experiment_string.substr(kEnabledPrefixLength + 1).c_str(), - "%lf,%lf", k_up, k_down) == 2; -} - OveruseDetector::OveruseDetector(const FieldTrialsView* key_value_config) // Experiment is on by default, but can be disabled with finch by setting // the field trial string to "WebRTC-AdaptiveBweThreshold/Disabled/". - : in_experiment_(!AdaptiveThresholdExperimentIsDisabled(*key_value_config)), - k_up_(0.0087), + : k_up_(0.0087), k_down_(0.039), - overusing_time_threshold_(100), + overusing_time_threshold_(kOverUsingTimeThreshold), threshold_(12.5), last_update_ms_(-1), prev_offset_(0.0), time_over_using_(-1), overuse_counter_(0), - hypothesis_(BandwidthUsage::kBwNormal) { - if (!AdaptiveThresholdExperimentIsDisabled(*key_value_config)) - InitializeExperiment(*key_value_config); -} + hypothesis_(BandwidthUsage::kBwNormal) {} OveruseDetector::~OveruseDetector() {} @@ -125,9 +90,6 @@ BandwidthUsage OveruseDetector::Detect(double offset, } void OveruseDetector::UpdateThreshold(double modified_offset, int64_t now_ms) { - if (!in_experiment_) - return; - if (last_update_ms_ == -1) last_update_ms_ = now_ms; @@ -146,15 +108,4 @@ void OveruseDetector::UpdateThreshold(double modified_offset, int64_t now_ms) { last_update_ms_ = now_ms; } -void OveruseDetector::InitializeExperiment( - const FieldTrialsView& key_value_config) { - RTC_DCHECK(in_experiment_); - double k_up = 0.0; - double k_down = 0.0; - overusing_time_threshold_ = kOverUsingTimeThreshold; - if (ReadExperimentConstants(key_value_config, &k_up, &k_down)) { - k_up_ = k_up; - k_down_ = k_down; - } -} } // namespace webrtc diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/overuse_detector.h b/third_party/libwebrtc/modules/remote_bitrate_estimator/overuse_detector.h index dfaea9187a54..07ae8734c485 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/overuse_detector.h +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/overuse_detector.h @@ -17,9 +17,6 @@ namespace webrtc { -bool AdaptiveThresholdExperimentIsDisabled( - const FieldTrialsView& key_value_config); - class OveruseDetector { public: explicit OveruseDetector(const FieldTrialsView* key_value_config); @@ -46,10 +43,9 @@ class OveruseDetector { void UpdateThreshold(double modified_offset, int64_t now_ms); void InitializeExperiment(const FieldTrialsView& key_value_config); - bool in_experiment_; - double k_up_; - double k_down_; - double overusing_time_threshold_; + const double k_up_; + const double k_down_; + const double overusing_time_threshold_; double threshold_; int64_t last_update_ms_; double prev_offset_; diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/overuse_detector_unittest.cc b/third_party/libwebrtc/modules/remote_bitrate_estimator/overuse_detector_unittest.cc index 8420af96a17c..e91d4f0d22dd 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/overuse_detector_unittest.cc +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/overuse_detector_unittest.cc @@ -21,7 +21,6 @@ #include "modules/remote_bitrate_estimator/inter_arrival.h" #include "modules/remote_bitrate_estimator/overuse_estimator.h" #include "rtc_base/random.h" -#include "test/field_trial.h" #include "test/gtest.h" namespace webrtc { @@ -218,69 +217,6 @@ TEST_F(OveruseDetectorTest, SimpleOveruse100kbit10fps) { EXPECT_EQ(7, frames_until_overuse); } -TEST_F(OveruseDetectorTest, DISABLED_OveruseWithHighVariance100Kbit10fps) { - uint32_t frame_duration_ms = 100; - uint32_t drift_per_frame_ms = 10; - uint32_t rtp_timestamp = frame_duration_ms * 90; - size_t packet_size = 1200; - int offset = 10; - - // Run 1000 samples to reach steady state. - for (int i = 0; i < 1000; ++i) { - UpdateDetector(rtp_timestamp, now_ms_, packet_size); - rtp_timestamp += frame_duration_ms * 90; - if (i % 2) { - offset = random_.Rand(0, 49); - now_ms_ += frame_duration_ms - offset; - } else { - now_ms_ += frame_duration_ms + offset; - } - EXPECT_EQ(BandwidthUsage::kBwNormal, overuse_detector_->State()); - } - // Simulate a higher send pace, that is too high. - // Above noise generate a standard deviation of approximately 28 ms. - // Total build up of 150 ms. - for (int j = 0; j < 15; ++j) { - UpdateDetector(rtp_timestamp, now_ms_, packet_size); - now_ms_ += frame_duration_ms + drift_per_frame_ms; - rtp_timestamp += frame_duration_ms * 90; - EXPECT_EQ(BandwidthUsage::kBwNormal, overuse_detector_->State()); - } - UpdateDetector(rtp_timestamp, now_ms_, packet_size); - EXPECT_EQ(BandwidthUsage::kBwOverusing, overuse_detector_->State()); -} - -TEST_F(OveruseDetectorTest, DISABLED_OveruseWithLowVariance100Kbit10fps) { - uint32_t frame_duration_ms = 100; - uint32_t drift_per_frame_ms = 1; - uint32_t rtp_timestamp = frame_duration_ms * 90; - size_t packet_size = 1200; - int offset = 10; - - // Run 1000 samples to reach steady state. - for (int i = 0; i < 1000; ++i) { - UpdateDetector(rtp_timestamp, now_ms_, packet_size); - rtp_timestamp += frame_duration_ms * 90; - if (i % 2) { - offset = random_.Rand(0, 1); - now_ms_ += frame_duration_ms - offset; - } else { - now_ms_ += frame_duration_ms + offset; - } - EXPECT_EQ(BandwidthUsage::kBwNormal, overuse_detector_->State()); - } - // Simulate a higher send pace, that is too high. - // Total build up of 6 ms. - for (int j = 0; j < 6; ++j) { - UpdateDetector(rtp_timestamp, now_ms_, packet_size); - now_ms_ += frame_duration_ms + drift_per_frame_ms; - rtp_timestamp += frame_duration_ms * 90; - EXPECT_EQ(BandwidthUsage::kBwNormal, overuse_detector_->State()); - } - UpdateDetector(rtp_timestamp, now_ms_, packet_size); - EXPECT_EQ(BandwidthUsage::kBwOverusing, overuse_detector_->State()); -} - TEST_F(OveruseDetectorTest, OveruseWithLowVariance2000Kbit30fps) { uint32_t frame_duration_ms = 33; uint32_t drift_per_frame_ms = 1; @@ -322,13 +258,7 @@ TEST_F(OveruseDetectorTest, OveruseWithLowVariance2000Kbit30fps) { EXPECT_EQ(BandwidthUsage::kBwOverusing, overuse_detector_->State()); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_LowGaussianVariance30Kbit3fps \ - DISABLED_LowGaussianVariance30Kbit3fps -#else -#define MAYBE_LowGaussianVariance30Kbit3fps LowGaussianVariance30Kbit3fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance30Kbit3fps) { +TEST_F(OveruseDetectorTest, LowGaussianVariance30Kbit3fps) { size_t packet_size = 1200; int packets_per_frame = 1; int frame_duration_ms = 333; @@ -388,13 +318,7 @@ TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift30Kbit3fps) { EXPECT_EQ(4, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_LowGaussianVariance100Kbit5fps \ - DISABLED_LowGaussianVariance100Kbit5fps -#else -#define MAYBE_LowGaussianVariance100Kbit5fps LowGaussianVariance100Kbit5fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance100Kbit5fps) { +TEST_F(OveruseDetectorTest, LowGaussianVariance100Kbit5fps) { size_t packet_size = 1200; int packets_per_frame = 2; int frame_duration_ms = 200; @@ -409,13 +333,7 @@ TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance100Kbit5fps) { EXPECT_EQ(20, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_HighGaussianVariance100Kbit5fps \ - DISABLED_HighGaussianVariance100Kbit5fps -#else -#define MAYBE_HighGaussianVariance100Kbit5fps HighGaussianVariance100Kbit5fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_HighGaussianVariance100Kbit5fps) { +TEST_F(OveruseDetectorTest, HighGaussianVariance100Kbit5fps) { size_t packet_size = 1200; int packets_per_frame = 2; int frame_duration_ms = 200; @@ -430,13 +348,7 @@ TEST_F(OveruseDetectorTest, MAYBE_HighGaussianVariance100Kbit5fps) { EXPECT_EQ(44, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_LowGaussianVariance100Kbit10fps \ - DISABLED_LowGaussianVariance100Kbit10fps -#else -#define MAYBE_LowGaussianVariance100Kbit10fps LowGaussianVariance100Kbit10fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance100Kbit10fps) { +TEST_F(OveruseDetectorTest, LowGaussianVariance100Kbit10fps) { size_t packet_size = 1200; int packets_per_frame = 1; int frame_duration_ms = 100; @@ -451,13 +363,7 @@ TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance100Kbit10fps) { EXPECT_EQ(20, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_HighGaussianVariance100Kbit10fps \ - DISABLED_HighGaussianVariance100Kbit10fps -#else -#define MAYBE_HighGaussianVariance100Kbit10fps HighGaussianVariance100Kbit10fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_HighGaussianVariance100Kbit10fps) { +TEST_F(OveruseDetectorTest, HighGaussianVariance100Kbit10fps) { size_t packet_size = 1200; int packets_per_frame = 1; int frame_duration_ms = 100; @@ -472,13 +378,7 @@ TEST_F(OveruseDetectorTest, MAYBE_HighGaussianVariance100Kbit10fps) { EXPECT_EQ(44, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_LowGaussianVariance300Kbit30fps \ - DISABLED_LowGaussianVariance300Kbit30fps -#else -#define MAYBE_LowGaussianVariance300Kbit30fps LowGaussianVariance300Kbit30fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance300Kbit30fps) { +TEST_F(OveruseDetectorTest, LowGaussianVariance300Kbit30fps) { size_t packet_size = 1200; int packets_per_frame = 1; int frame_duration_ms = 33; @@ -538,13 +438,7 @@ TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift300Kbit30fps) { EXPECT_EQ(10, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_LowGaussianVariance1000Kbit30fps \ - DISABLED_LowGaussianVariance1000Kbit30fps -#else -#define MAYBE_LowGaussianVariance1000Kbit30fps LowGaussianVariance1000Kbit30fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance1000Kbit30fps) { +TEST_F(OveruseDetectorTest, LowGaussianVariance1000Kbit30fps) { size_t packet_size = 1200; int packets_per_frame = 3; int frame_duration_ms = 33; @@ -604,13 +498,7 @@ TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift1000Kbit30fps) { EXPECT_EQ(10, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_LowGaussianVariance2000Kbit30fps \ - DISABLED_LowGaussianVariance2000Kbit30fps -#else -#define MAYBE_LowGaussianVariance2000Kbit30fps LowGaussianVariance2000Kbit30fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance2000Kbit30fps) { +TEST_F(OveruseDetectorTest, LowGaussianVariance2000Kbit30fps) { size_t packet_size = 1200; int packets_per_frame = 6; int frame_duration_ms = 33; @@ -670,22 +558,7 @@ TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift2000Kbit30fps) { EXPECT_EQ(10, frames_until_overuse); } -class OveruseDetectorExperimentTest : public OveruseDetectorTest { - public: - OveruseDetectorExperimentTest() - : override_field_trials_( - "WebRTC-AdaptiveBweThreshold/Enabled-0.01,0.00018/") {} - - protected: - void SetUp() override { - overuse_detector_.reset(new OveruseDetector(&field_trials_)); - } - - test::ScopedFieldTrials override_field_trials_; - const FieldTrialBasedConfig field_trials_; -}; - -TEST_F(OveruseDetectorExperimentTest, ThresholdAdapts) { +TEST_F(OveruseDetectorTest, ThresholdAdapts) { const double kOffset = 0.21; double kTsDelta = 3000.0; int64_t now_ms = 0; @@ -756,7 +629,7 @@ TEST_F(OveruseDetectorExperimentTest, ThresholdAdapts) { EXPECT_TRUE(overuse_detected); } -TEST_F(OveruseDetectorExperimentTest, DoesntAdaptToSpikes) { +TEST_F(OveruseDetectorTest, DoesntAdaptToSpikes) { const double kOffset = 1.0; const double kLargeOffset = 20.0; double kTsDelta = 3000.0; diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/packet_arrival_map.cc b/third_party/libwebrtc/modules/remote_bitrate_estimator/packet_arrival_map.cc index 16d400e227dd..71888dfaf247 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/packet_arrival_map.cc +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/packet_arrival_map.cc @@ -151,7 +151,6 @@ void PacketArrivalTimeMap::EraseTo(int64_t sequence_number) { } // Remove some. begin_sequence_number_ = sequence_number; - RTC_DCHECK(has_received(begin_sequence_number_)); AdjustToSize(end_sequence_number_ - begin_sequence_number_); } diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/packet_arrival_map_test.cc b/third_party/libwebrtc/modules/remote_bitrate_estimator/packet_arrival_map_test.cc index 73c532d9b860..f11f5a147b45 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/packet_arrival_map_test.cc +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/packet_arrival_map_test.cc @@ -264,5 +264,28 @@ TEST(PacketArrivalMapTest, EraseAllRemembersBeginningSeqNbr) { EXPECT_FALSE(map.has_received(51)); } +TEST(PacketArrivalMapTest, EraseToMissingSequenceNumber) { + PacketArrivalTimeMap map; + + map.AddPacket(37, Timestamp::Millis(10)); + map.AddPacket(39, Timestamp::Millis(11)); + map.AddPacket(40, Timestamp::Millis(12)); + map.AddPacket(41, Timestamp::Millis(13)); + + map.EraseTo(38); + + map.AddPacket(42, Timestamp::Millis(40)); + + EXPECT_EQ(map.begin_sequence_number(), 38); + EXPECT_EQ(map.end_sequence_number(), 43); + + EXPECT_FALSE(map.has_received(37)); + EXPECT_FALSE(map.has_received(38)); + EXPECT_TRUE(map.has_received(39)); + EXPECT_TRUE(map.has_received(40)); + EXPECT_TRUE(map.has_received(41)); + EXPECT_TRUE(map.has_received(42)); +} + } // namespace } // namespace webrtc diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc index 6f442e5e2c1b..8f15912a4906 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc @@ -63,7 +63,7 @@ RemoteBitrateEstimatorSingleStream::RemoteBitrateEstimatorSingleStream( : clock_(clock), incoming_bitrate_(kBitrateWindowMs, 8000), last_valid_incoming_bitrate_(0), - remote_rate_(new AimdRateControl(&field_trials_)), + remote_rate_(&field_trials_), observer_(observer), last_process_time_(-1), process_interval_ms_(kProcessIntervalMs), @@ -144,7 +144,7 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket( incoming_bitrate_.Rate(now_ms); if (incoming_bitrate_bps && (prior_state != BandwidthUsage::kBwOverusing || - GetRemoteRate()->TimeToReduceFurther( + remote_rate_.TimeToReduceFurther( Timestamp::Millis(now_ms), DataRate::BitsPerSec(*incoming_bitrate_bps)))) { // The first overuse should immediately trigger a new estimate. @@ -193,14 +193,13 @@ void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t now_ms) { if (overuse_detectors_.empty()) { return; } - AimdRateControl* remote_rate = GetRemoteRate(); const RateControlInput input( bw_state, OptionalRateFromOptionalBps(incoming_bitrate_.Rate(now_ms))); uint32_t target_bitrate = - remote_rate->Update(&input, Timestamp::Millis(now_ms)).bps(); - if (remote_rate->ValidEstimate()) { - process_interval_ms_ = remote_rate->GetFeedbackInterval().ms(); + remote_rate_.Update(&input, Timestamp::Millis(now_ms)).bps(); + if (remote_rate_.ValidEstimate()) { + process_interval_ms_ = remote_rate_.GetFeedbackInterval().ms(); RTC_DCHECK_GT(process_interval_ms_, 0); std::vector ssrcs; GetSsrcs(&ssrcs); @@ -212,7 +211,7 @@ void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t now_ms) { void RemoteBitrateEstimatorSingleStream::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) { MutexLock lock(&mutex_); - GetRemoteRate()->SetRtt(TimeDelta::Millis(avg_rtt_ms)); + remote_rate_.SetRtt(TimeDelta::Millis(avg_rtt_ms)); } void RemoteBitrateEstimatorSingleStream::RemoveStream(unsigned int ssrc) { @@ -226,10 +225,10 @@ void RemoteBitrateEstimatorSingleStream::RemoveStream(unsigned int ssrc) { DataRate RemoteBitrateEstimatorSingleStream::LatestEstimate() const { MutexLock lock(&mutex_); - if (!remote_rate_->ValidEstimate() || overuse_detectors_.empty()) { + if (!remote_rate_.ValidEstimate() || overuse_detectors_.empty()) { return DataRate::Zero(); } - return remote_rate_->LatestEstimate(); + return remote_rate_.LatestEstimate(); } void RemoteBitrateEstimatorSingleStream::GetSsrcs( @@ -243,10 +242,4 @@ void RemoteBitrateEstimatorSingleStream::GetSsrcs( } } -AimdRateControl* RemoteBitrateEstimatorSingleStream::GetRemoteRate() { - if (!remote_rate_) - remote_rate_.reset(new AimdRateControl(&field_trials_)); - return remote_rate_.get(); -} - } // namespace webrtc diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h index d62f922e021b..699f259d48d3 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h @@ -15,7 +15,6 @@ #include #include -#include #include #include "api/transport/field_trial_based_config.h" @@ -65,16 +64,12 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { void GetSsrcs(std::vector* ssrcs) const RTC_SHARED_LOCKS_REQUIRED(mutex_); - // Returns `remote_rate_` if the pointed to object exists, - // otherwise creates it. - AimdRateControl* GetRemoteRate() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - Clock* const clock_; const FieldTrialBasedConfig field_trials_; SsrcOveruseEstimatorMap overuse_detectors_ RTC_GUARDED_BY(mutex_); RateStatistics incoming_bitrate_ RTC_GUARDED_BY(mutex_); uint32_t last_valid_incoming_bitrate_ RTC_GUARDED_BY(mutex_); - std::unique_ptr remote_rate_ RTC_GUARDED_BY(mutex_); + AimdRateControl remote_rate_ RTC_GUARDED_BY(mutex_); RemoteBitrateObserver* const observer_ RTC_GUARDED_BY(mutex_); mutable Mutex mutex_; int64_t last_process_time_; diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc index dd9fbbc94489..598279e0af2f 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc @@ -27,8 +27,11 @@ namespace webrtc { namespace { // The maximum allowed value for a timestamp in milliseconds. This is lower // than the numerical limit since we often convert to microseconds. -static constexpr int64_t kMaxTimeMs = - std::numeric_limits::max() / 1000; +constexpr int64_t kMaxTimeMs = std::numeric_limits::max() / 1000; +constexpr TimeDelta kBackWindow = TimeDelta::Millis(500); +constexpr TimeDelta kMinInterval = TimeDelta::Millis(50); +constexpr TimeDelta kMaxInterval = TimeDelta::Millis(250); +constexpr TimeDelta kDefaultInterval = TimeDelta::Millis(100); TimeDelta GetAbsoluteSendTimeDelta(uint32_t new_sendtime, uint32_t previous_sendtime) { @@ -48,22 +51,20 @@ TimeDelta GetAbsoluteSendTimeDelta(uint32_t new_sendtime, RemoteEstimatorProxy::RemoteEstimatorProxy( TransportFeedbackSender feedback_sender, - const FieldTrialsView* key_value_config, NetworkStateEstimator* network_state_estimator) : feedback_sender_(std::move(feedback_sender)), - send_config_(key_value_config), last_process_time_(Timestamp::MinusInfinity()), network_state_estimator_(network_state_estimator), media_ssrc_(0), feedback_packet_count_(0), packet_overhead_(DataSize::Zero()), - send_interval_(send_config_.default_interval.Get()), + send_interval_(kDefaultInterval), send_periodic_feedback_(true), previous_abs_send_time_(0), abs_send_timestamp_(Timestamp::Zero()) { RTC_LOG(LS_INFO) - << "Maximum interval between transport feedback RTCP messages (ms): " - << send_config_.max_interval->ms(); + << "Maximum interval between transport feedback RTCP messages: " + << kMaxInterval; } RemoteEstimatorProxy::~RemoteEstimatorProxy() {} @@ -72,10 +73,10 @@ void RemoteEstimatorProxy::MaybeCullOldPackets(int64_t sequence_number, Timestamp arrival_time) { if (periodic_window_start_seq_ >= packet_arrival_times_.end_sequence_number() && - arrival_time - Timestamp::Zero() >= send_config_.back_window.Get()) { + arrival_time - Timestamp::Zero() >= kBackWindow) { // Start new feedback packet, cull old packets. - packet_arrival_times_.RemoveOldPackets( - sequence_number, arrival_time - send_config_.back_window.Get()); + packet_arrival_times_.RemoveOldPackets(sequence_number, + arrival_time - kBackWindow); } } @@ -172,19 +173,17 @@ void RemoteEstimatorProxy::OnBitrateChanged(int bitrate_bps) { // TwccReport size at 250ms interval is 36 byte. // AverageTwccReport = (TwccReport(50ms) + TwccReport(250ms)) / 2 constexpr DataSize kTwccReportSize = DataSize::Bytes(20 + 8 + 10 + 30); - const DataRate kMinTwccRate = - kTwccReportSize / send_config_.max_interval.Get(); + constexpr DataRate kMinTwccRate = kTwccReportSize / kMaxInterval; // Let TWCC reports occupy 5% of total bandwidth. - DataRate twcc_bitrate = - DataRate::BitsPerSec(send_config_.bandwidth_fraction * bitrate_bps); + DataRate twcc_bitrate = DataRate::BitsPerSec(0.05 * bitrate_bps); // Check upper send_interval bound by checking bitrate to avoid overflow when // dividing by small bitrate, in particular avoid dividing by zero bitrate. - TimeDelta send_interval = twcc_bitrate <= kMinTwccRate - ? send_config_.max_interval.Get() - : std::max(kTwccReportSize / twcc_bitrate, - send_config_.min_interval.Get()); + TimeDelta send_interval = + twcc_bitrate <= kMinTwccRate + ? kMaxInterval + : std::max(kTwccReportSize / twcc_bitrate, kMinInterval); MutexLock lock(&lock_); send_interval_ = send_interval; diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h index 509ad0ba02dd..7b0a8a6c49d5 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h @@ -26,7 +26,6 @@ #include "modules/remote_bitrate_estimator/packet_arrival_map.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" -#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/synchronization/mutex.h" @@ -42,7 +41,6 @@ class RemoteEstimatorProxy { using TransportFeedbackSender = std::function> packets)>; RemoteEstimatorProxy(TransportFeedbackSender feedback_sender, - const FieldTrialsView* key_value_config, NetworkStateEstimator* network_state_estimator); ~RemoteEstimatorProxy(); @@ -69,22 +67,6 @@ class RemoteEstimatorProxy { void SetTransportOverhead(DataSize overhead_per_packet); private: - struct TransportWideFeedbackConfig { - FieldTrialParameter back_window{"wind", TimeDelta::Millis(500)}; - FieldTrialParameter min_interval{"min", TimeDelta::Millis(50)}; - FieldTrialParameter max_interval{"max", TimeDelta::Millis(250)}; - FieldTrialParameter default_interval{"def", - TimeDelta::Millis(100)}; - FieldTrialParameter bandwidth_fraction{"frac", 0.05}; - explicit TransportWideFeedbackConfig( - const FieldTrialsView* key_value_config) { - ParseFieldTrial({&back_window, &min_interval, &max_interval, - &default_interval, &bandwidth_fraction}, - key_value_config->Lookup( - "WebRTC-Bwe-TransportWideFeedbackIntervals")); - } - }; - void MaybeCullOldPackets(int64_t sequence_number, Timestamp arrival_time) RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); void SendPeriodicFeedbacks() RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); @@ -111,7 +93,6 @@ class RemoteEstimatorProxy { bool is_periodic_update) RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); const TransportFeedbackSender feedback_sender_; - const TransportWideFeedbackConfig send_config_; Timestamp last_process_time_; Mutex lock_; diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc index 10bc1e80a02a..16455b44a4e6 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc @@ -13,7 +13,6 @@ #include #include -#include "api/transport/field_trial_based_config.h" #include "api/transport/network_types.h" #include "api/transport/test/mock_network_control.h" #include "api/units/data_size.h" @@ -79,9 +78,7 @@ class RemoteEstimatorProxyTest : public ::testing::Test { public: RemoteEstimatorProxyTest() : clock_(0), - proxy_(feedback_sender_.AsStdFunction(), - &field_trial_config_, - &network_state_estimator_) {} + proxy_(feedback_sender_.AsStdFunction(), &network_state_estimator_) {} protected: void IncomingPacket( @@ -100,7 +97,6 @@ class RemoteEstimatorProxyTest : public ::testing::Test { proxy_.Process(clock_.CurrentTime()); } - FieldTrialBasedConfig field_trial_config_; SimulatedClock clock_; MockFunction>)> feedback_sender_; diff --git a/third_party/libwebrtc/modules/rtp_rtcp/BUILD.gn b/third_party/libwebrtc/modules/rtp_rtcp/BUILD.gn index ead114bc915f..abcdb619f48e 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/BUILD.gn +++ b/third_party/libwebrtc/modules/rtp_rtcp/BUILD.gn @@ -425,6 +425,7 @@ rtc_library("rtp_video_header") { "../../api:rtp_headers", "../../api/transport/rtp:dependency_descriptor", "../../api/video:video_frame", + "../../api/video:video_frame_metadata", "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../modules/video_coding:codec_globals_headers", @@ -436,6 +437,17 @@ rtc_library("rtp_video_header") { ] } +rtc_source_set("rtp_video_header_unittest") { + testonly = true + sources = [ "source/rtp_video_header_unittest.cc" ] + deps = [ + ":rtp_video_header", + "../../api/video:video_frame_metadata", + "../../api/video:video_frame_type", + "../../test:test_support", + ] +} + rtc_library("fec_test_helper") { testonly = true sources = [ @@ -596,15 +608,18 @@ if (rtc_include_tests) { ] deps = [ ":fec_test_helper", + ":frame_transformer_factory_unittest", ":mock_rtp_rtcp", ":rtcp_transceiver", ":rtp_packetizer_av1_test_helper", ":rtp_rtcp", ":rtp_rtcp_format", ":rtp_rtcp_legacy", + ":rtp_video_header_unittest", "../../api:array_view", "../../api:create_time_controller", "../../api:field_trials_registry", + "../../api:frame_transformer_factory", "../../api:libjingle_peerconnection_api", "../../api:mock_frame_encryptor", "../../api:rtp_headers", @@ -650,6 +665,7 @@ if (rtc_include_tests) { "../../rtc_base:threading", "../../rtc_base:timeutils", "../../system_wrappers", + "../../test:explicit_key_value_config", "../../test:field_trial", "../../test:mock_frame_transformer", "../../test:mock_transport", @@ -669,3 +685,19 @@ if (rtc_include_tests) { ] } } + +rtc_source_set("frame_transformer_factory_unittest") { + testonly = true + sources = [ "source/frame_transformer_factory_unittest.cc" ] + deps = [ + "../../api:frame_transformer_factory", + "../../api:transport_api", + "../../call:video_stream_api", + "../../modules/rtp_rtcp", + "../../rtc_base:rtc_event", + "../../test:mock_frame_transformer", + "../../test:test_support", + "../../video", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] +} diff --git a/third_party/libwebrtc/modules/rtp_rtcp/include/rtp_packet_sender.h b/third_party/libwebrtc/modules/rtp_rtcp/include/rtp_packet_sender.h index ae221b09d3ff..ebc65298a5d6 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/include/rtp_packet_sender.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/include/rtp_packet_sender.h @@ -28,6 +28,11 @@ class RtpPacketSender { // packets and the current target send rate. virtual void EnqueuePackets( std::vector> packets) = 0; + + // Clear any pending packets with the given SSRC from the queue. + // TODO(crbug.com/1395081): Make pure virtual when downstream code has been + // updated. + virtual void RemovePacketsForSsrc(uint32_t ssrc) {} }; } // namespace webrtc diff --git a/third_party/libwebrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/third_party/libwebrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h index 5c93e0bfed2b..75c30742df81 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h @@ -67,7 +67,6 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { MOCK_METHOD(RtpState, GetRtxState, (), (const, override)); MOCK_METHOD(uint32_t, SSRC, (), (const, override)); MOCK_METHOD(void, SetMid, (absl::string_view mid), (override)); - MOCK_METHOD(void, SetCsrcs, (const std::vector& csrcs), (override)); MOCK_METHOD(void, SetRtxSendStatus, (int modes), (override)); MOCK_METHOD(int, RtxSendStatus, (), (const, override)); MOCK_METHOD(absl::optional, RtxSsrc, (), (const, override)); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc index 3687669b2ff8..2e7e219f9422 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc @@ -28,11 +28,6 @@ constexpr int kSendSideDelayWindowMs = 1000; constexpr int kBitrateStatisticsWindowMs = 1000; constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; -bool IsDisabled(absl::string_view name, const FieldTrialsView* field_trials) { - FieldTrialBasedConfig default_trials; - auto& trials = field_trials ? *field_trials : default_trials; - return absl::StartsWith(trials.Lookup(name), "Disabled"); -} } // namespace DEPRECATED_RtpSenderEgress::NonPacedPacketSender::NonPacedPacketSender( @@ -72,8 +67,6 @@ DEPRECATED_RtpSenderEgress::DEPRECATED_RtpSenderEgress( flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc() : absl::nullopt), populate_network2_timestamp_(config.populate_network2_timestamp), - send_side_bwe_with_overhead_( - !IsDisabled("WebRTC-SendSideBwe-WithOverhead", config.field_trials)), clock_(config.clock), packet_history_(packet_history), transport_(config.outgoing_transport), @@ -316,16 +309,11 @@ void DEPRECATED_RtpSenderEgress::AddPacketToTransportFeedback( const RtpPacketToSend& packet, const PacedPacketInfo& pacing_info) { if (transport_feedback_observer_) { - size_t packet_size = packet.payload_size() + packet.padding_size(); - if (send_side_bwe_with_overhead_) { - packet_size = packet.size(); - } - RtpPacketSendInfo packet_info; packet_info.media_ssrc = ssrc_; packet_info.transport_sequence_number = packet_id; packet_info.rtp_sequence_number = packet.SequenceNumber(); - packet_info.length = packet_size; + packet_info.length = packet.size(); packet_info.pacing_info = pacing_info; packet_info.packet_type = packet.packet_type(); transport_feedback_observer_->OnAddPacket(packet_info); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h b/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h index fd5dfddc025c..609a90d4fe29 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h @@ -43,6 +43,7 @@ class DEPRECATED_RtpSenderEgress { void EnqueuePackets( std::vector> packets) override; + void RemovePacketsForSsrc(uint32_t ssrc) override {} private: uint16_t transport_sequence_number_; @@ -110,7 +111,6 @@ class DEPRECATED_RtpSenderEgress { const absl::optional rtx_ssrc_; const absl::optional flexfec_ssrc_; const bool populate_network2_timestamp_; - const bool send_side_bwe_with_overhead_; Clock* const clock_; RtpPacketHistory* const packet_history_; Transport* const transport_; diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/forward_error_correction.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/forward_error_correction.cc index 903d3e7d4523..1462c2f4810a 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/forward_error_correction.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/forward_error_correction.cc @@ -225,10 +225,10 @@ void ForwardErrorCorrection::GenerateFecPayloads( size_t fec_packet_length = fec_header_size + media_payload_length; if (fec_packet_length > fec_packet->data.size()) { - // Recall that XORing with zero (which the FEC packets are prefilled - // with) is the identity operator, thus all prior XORs are - // still correct even though we expand the packet length here. + size_t old_size = fec_packet->data.size(); fec_packet->data.SetSize(fec_packet_length); + memset(fec_packet->data.MutableData() + old_size, 0, + fec_packet_length - old_size); } XorHeaders(*media_packet, fec_packet); XorPayloads(*media_packet, media_payload_length, fec_header_size, @@ -573,7 +573,13 @@ bool ForwardErrorCorrection::FinishPacketRecovery( "typical IP packet, and is thus dropped."; return false; } + size_t old_size = recovered_packet->pkt->data.size(); recovered_packet->pkt->data.SetSize(new_size); + data = recovered_packet->pkt->data.MutableData(); + if (new_size > old_size) { + memset(data + old_size, 0, new_size - old_size); + } + // Set the SN field. ByteWriter::WriteBigEndian(&data[2], recovered_packet->seq_num); // Set the SSRC field. @@ -613,7 +619,10 @@ void ForwardErrorCorrection::XorPayloads(const Packet& src, RTC_DCHECK_LE(kRtpHeaderSize + payload_length, src.data.size()); RTC_DCHECK_LE(dst_offset + payload_length, dst->data.capacity()); if (dst_offset + payload_length > dst->data.size()) { - dst->data.SetSize(dst_offset + payload_length); + size_t old_size = dst->data.size(); + size_t new_size = dst_offset + payload_length; + dst->data.SetSize(new_size); + memset(dst->data.MutableData() + old_size, 0, new_size - old_size); } uint8_t* dst_data = dst->data.MutableData(); const uint8_t* src_data = src.data.cdata(); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/frame_transformer_factory_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/frame_transformer_factory_unittest.cc new file mode 100644 index 000000000000..e011a76ed554 --- /dev/null +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/frame_transformer_factory_unittest.cc @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/frame_transformer_factory.h" + +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "api/call/transport.h" +#include "call/video_receive_stream.h" +#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" +#include "rtc_base/event.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/mock_frame_transformer.h" + +namespace webrtc { +namespace { + +using testing::NiceMock; +using testing::Return; + +class MockTransformableVideoFrame + : public webrtc::TransformableVideoFrameInterface { + public: + MOCK_METHOD(rtc::ArrayView, GetData, (), (const override)); + MOCK_METHOD(void, SetData, (rtc::ArrayView data), (override)); + MOCK_METHOD(uint8_t, GetPayloadType, (), (const, override)); + MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); + MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override)); + MOCK_METHOD(TransformableFrameInterface::Direction, + GetDirection, + (), + (const, override)); + MOCK_METHOD(bool, IsKeyFrame, (), (const, override)); + MOCK_METHOD(std::vector, GetAdditionalData, (), (const, override)); + MOCK_METHOD(const webrtc::VideoFrameMetadata&, + GetMetadata, + (), + (const, override)); +}; + +TEST(FrameTransformerFactory, CloneVideoFrame) { + NiceMock original_frame; + uint8_t data[10]; + std::fill_n(data, 10, 5); + rtc::ArrayView data_view(data); + EXPECT_CALL(original_frame, GetData()).WillRepeatedly(Return(data_view)); + auto cloned_frame = CloneVideoFrame(&original_frame); + EXPECT_EQ(cloned_frame->GetData().size(), 10u); + EXPECT_THAT(cloned_frame->GetData(), testing::Each(5u)); +} + +} // namespace +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc index cee4b9878ec4..9c070bac17dd 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc @@ -265,11 +265,6 @@ void ModuleRtpRtcpImpl::SetMid(absl::string_view mid) { // RTCP, this will need to be passed down to the RTCPSender also. } -void ModuleRtpRtcpImpl::SetCsrcs(const std::vector& csrcs) { - rtcp_sender_.SetCsrcs(csrcs); - rtp_sender_->packet_generator.SetCsrcs(csrcs); -} - // TODO(pbos): Handle media and RTX streams separately (separate RTCP // feedbacks). RTCPSender::FeedbackState ModuleRtpRtcpImpl::GetFeedbackState() { diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h index 60fda386e116..6070b67d44de 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h @@ -103,8 +103,6 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl void SetMid(absl::string_view mid) override; - void SetCsrcs(const std::vector& csrcs) override; - RTCPSender::FeedbackState GetFeedbackState(); void SetRtxSendStatus(int mode) override; diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc index b66cb07ad581..66d2e7a44ee3 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc @@ -89,6 +89,8 @@ ModuleRtpRtcpImpl2::ModuleRtpRtcpImpl2(const Configuration& configuration) // Make sure rtcp sender use same timestamp offset as rtp sender. rtcp_sender_.SetTimestampOffset( rtp_sender_->packet_generator.TimestampOffset()); + rtp_sender_->packet_sender.SetTimestampOffset( + rtp_sender_->packet_generator.TimestampOffset()); } // Set default packet size limit. @@ -186,6 +188,7 @@ void ModuleRtpRtcpImpl2::SetRtpState(const RtpState& rtp_state) { rtp_sender_->packet_generator.SetRtpState(rtp_state); rtp_sender_->sequencer.SetRtpState(rtp_state); rtcp_sender_.SetTimestampOffset(rtp_state.start_timestamp); + rtp_sender_->packet_sender.SetTimestampOffset(rtp_state.start_timestamp); } void ModuleRtpRtcpImpl2::SetRtxState(const RtpState& rtp_state) { @@ -227,11 +230,6 @@ void ModuleRtpRtcpImpl2::SetMid(absl::string_view mid) { // RTCP, this will need to be passed down to the RTCPSender also. } -void ModuleRtpRtcpImpl2::SetCsrcs(const std::vector& csrcs) { - rtcp_sender_.SetCsrcs(csrcs); - rtp_sender_->packet_generator.SetCsrcs(csrcs); -} - // TODO(pbos): Handle media and RTX streams separately (separate RTCP // feedbacks). RTCPSender::FeedbackState ModuleRtpRtcpImpl2::GetFeedbackState() { diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h index 248278988c3e..c43d0c34ba9e 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h @@ -114,8 +114,6 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, void SetMid(absl::string_view mid) override; - void SetCsrcs(const std::vector& csrcs) override; - RTCPSender::FeedbackState GetFeedbackState(); void SetRtxSendStatus(int mode) override; diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc index 4dece662ecb9..918e075be8c3 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc @@ -28,6 +28,7 @@ #include "rtc_base/logging.h" #include "rtc_base/rate_limiter.h" #include "rtc_base/strings/string_builder.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -43,6 +44,8 @@ using ::testing::Not; using ::testing::Optional; using ::testing::SizeIs; +using webrtc::test::ExplicitKeyValueConfig; + namespace webrtc { namespace { constexpr uint32_t kSenderSsrc = 0x12345; @@ -151,36 +154,6 @@ class SendTransport : public Transport, std::deque rtcp_packets_; }; -struct TestConfig { - explicit TestConfig(bool with_overhead) : with_overhead(with_overhead) {} - - bool with_overhead = false; -}; - -class FieldTrialConfig : public FieldTrialsRegistry { - public: - static FieldTrialConfig GetFromTestConfig(const TestConfig& config) { - FieldTrialConfig trials; - trials.overhead_enabled_ = config.with_overhead; - return trials; - } - - FieldTrialConfig() : overhead_enabled_(false) {} - ~FieldTrialConfig() override {} - - void SetOverHeadEnabled(bool enabled) { overhead_enabled_ = enabled; } - - private: - std::string GetValue(absl::string_view key) const override { - if (key == "WebRTC-SendSideBwe-WithOverhead") { - return overhead_enabled_ ? "Enabled" : "Disabled"; - } - return ""; - } - - bool overhead_enabled_; -}; - class RtpRtcpModule : public RtcpPacketTypeCounterObserver, public SendPacketObserver { public: @@ -194,7 +167,7 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver, RtpRtcpModule(GlobalSimulatedTimeController* time_controller, bool is_sender, - const FieldTrialConfig& trials) + const FieldTrialsRegistry& trials) : time_controller_(time_controller), is_sender_(is_sender), trials_(trials), @@ -206,7 +179,7 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver, TimeController* const time_controller_; const bool is_sender_; - const FieldTrialConfig& trials_; + const FieldTrialsRegistry& trials_; RtcpPacketTypeCounter packets_sent_; RtcpPacketTypeCounter packets_received_; std::unique_ptr receive_statistics_; @@ -289,11 +262,11 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver, }; } // namespace -class RtpRtcpImpl2Test : public ::testing::TestWithParam { +class RtpRtcpImpl2Test : public ::testing::Test { protected: RtpRtcpImpl2Test() : time_controller_(Timestamp::Micros(133590000000000)), - field_trials_(FieldTrialConfig::GetFromTestConfig(GetParam())), + field_trials_(""), sender_(&time_controller_, /*is_sender=*/true, field_trials_), @@ -346,7 +319,7 @@ class RtpRtcpImpl2Test : public ::testing::TestWithParam { } GlobalSimulatedTimeController time_controller_; - FieldTrialConfig field_trials_; + test::ExplicitKeyValueConfig field_trials_; RtpRtcpModule sender_; std::unique_ptr sender_video_; RtpRtcpModule receiver_; @@ -403,7 +376,7 @@ class RtpRtcpImpl2Test : public ::testing::TestWithParam { } }; -TEST_P(RtpRtcpImpl2Test, RetransmitsAllLayers) { +TEST_F(RtpRtcpImpl2Test, RetransmitsAllLayers) { // Send frames. EXPECT_EQ(0, sender_.RtpSent()); EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), @@ -432,7 +405,7 @@ TEST_P(RtpRtcpImpl2Test, RetransmitsAllLayers) { EXPECT_EQ(kSequenceNumber + 2, sender_.LastRtpSequenceNumber()); } -TEST_P(RtpRtcpImpl2Test, Rtt) { +TEST_F(RtpRtcpImpl2Test, Rtt) { RtpPacketReceived packet; packet.SetTimestamp(1); packet.SetSequenceNumber(123); @@ -476,7 +449,7 @@ TEST_P(RtpRtcpImpl2Test, Rtt) { EXPECT_NEAR(2 * kOneWayNetworkDelay.ms(), sender_.impl_->rtt_ms(), 1); } -TEST_P(RtpRtcpImpl2Test, RttForReceiverOnly) { +TEST_F(RtpRtcpImpl2Test, RttForReceiverOnly) { // Receiver module should send a Receiver time reference report (RTRR). EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport)); @@ -495,7 +468,7 @@ TEST_P(RtpRtcpImpl2Test, RttForReceiverOnly) { EXPECT_NEAR(2 * kOneWayNetworkDelay.ms(), receiver_.impl_->rtt_ms(), 1); } -TEST_P(RtpRtcpImpl2Test, NoSrBeforeMedia) { +TEST_F(RtpRtcpImpl2Test, NoSrBeforeMedia) { // Ignore fake transport delays in this test. sender_.transport_.SimulateNetworkDelay(TimeDelta::Zero()); receiver_.transport_.SimulateNetworkDelay(TimeDelta::Zero()); @@ -512,7 +485,7 @@ TEST_P(RtpRtcpImpl2Test, NoSrBeforeMedia) { EXPECT_EQ(sender_.transport_.NumRtcpSent(), 1u); } -TEST_P(RtpRtcpImpl2Test, RtcpPacketTypeCounter_Nack) { +TEST_F(RtpRtcpImpl2Test, RtcpPacketTypeCounter_Nack) { EXPECT_EQ(0U, sender_.RtcpReceived().nack_packets); EXPECT_EQ(0U, receiver_.RtcpSent().nack_packets); @@ -527,7 +500,7 @@ TEST_P(RtpRtcpImpl2Test, RtcpPacketTypeCounter_Nack) { EXPECT_EQ(1U, sender_.RtcpReceived().nack_packets); } -TEST_P(RtpRtcpImpl2Test, AddStreamDataCounters) { +TEST_F(RtpRtcpImpl2Test, AddStreamDataCounters) { StreamDataCounters rtp; const int64_t kStartTimeMs = 1; rtp.first_packet_time_ms = kStartTimeMs; @@ -570,7 +543,7 @@ TEST_P(RtpRtcpImpl2Test, AddStreamDataCounters) { EXPECT_EQ(kStartTimeMs, sum.first_packet_time_ms); // Holds oldest time. } -TEST_P(RtpRtcpImpl2Test, SendsInitialNackList) { +TEST_F(RtpRtcpImpl2Test, SendsInitialNackList) { // Send module sends a NACK. const uint16_t kNackLength = 1; uint16_t nack_list[kNackLength] = {123}; @@ -582,7 +555,7 @@ TEST_P(RtpRtcpImpl2Test, SendsInitialNackList) { EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123)); } -TEST_P(RtpRtcpImpl2Test, SendsExtendedNackList) { +TEST_F(RtpRtcpImpl2Test, SendsExtendedNackList) { // Send module sends a NACK. const uint16_t kNackLength = 1; uint16_t nack_list[kNackLength] = {123}; @@ -606,7 +579,7 @@ TEST_P(RtpRtcpImpl2Test, SendsExtendedNackList) { EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(124)); } -TEST_P(RtpRtcpImpl2Test, ReSendsNackListAfterRttMs) { +TEST_F(RtpRtcpImpl2Test, ReSendsNackListAfterRttMs) { sender_.transport_.SimulateNetworkDelay(TimeDelta::Zero()); // Send module sends a NACK. const uint16_t kNackLength = 2; @@ -631,7 +604,7 @@ TEST_P(RtpRtcpImpl2Test, ReSendsNackListAfterRttMs) { EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123, 125)); } -TEST_P(RtpRtcpImpl2Test, UniqueNackRequests) { +TEST_F(RtpRtcpImpl2Test, UniqueNackRequests) { receiver_.transport_.SimulateNetworkDelay(TimeDelta::Zero()); EXPECT_EQ(0U, receiver_.RtcpSent().nack_packets); EXPECT_EQ(0U, receiver_.RtcpSent().nack_requests); @@ -671,7 +644,7 @@ TEST_P(RtpRtcpImpl2Test, UniqueNackRequests) { EXPECT_EQ(75, sender_.RtcpReceived().UniqueNackRequestsInPercent()); } -TEST_P(RtpRtcpImpl2Test, ConfigurableRtcpReportInterval) { +TEST_F(RtpRtcpImpl2Test, ConfigurableRtcpReportInterval) { const TimeDelta kVideoReportInterval = TimeDelta::Millis(3000); // Recreate sender impl with new configuration, and redo setup. @@ -709,7 +682,37 @@ TEST_P(RtpRtcpImpl2Test, ConfigurableRtcpReportInterval) { EXPECT_EQ(sender_.transport_.NumRtcpSent(), 2u); } -TEST_P(RtpRtcpImpl2Test, StoresPacketInfoForSentPackets) { +TEST_F(RtpRtcpImpl2Test, RtpSenderEgressTimestampOffset) { + // RTP timestamp offset not explicitly set, default to random value. + uint16_t seqno = sender_.impl_->GetRtpState().sequence_number; + uint32_t media_rtp_ts = 1001; + uint32_t rtp_ts = media_rtp_ts + sender_.impl_->StartTimestamp(); + EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, rtp_ts, + /*capture_time_ms=*/0)); + AdvanceTime(kOneWayNetworkDelay); + EXPECT_THAT( + sender_.impl_->GetSentRtpPacketInfos(std::vector{seqno}), + ElementsAre(Field(&RtpSequenceNumberMap::Info::timestamp, media_rtp_ts))); + + RtpState saved_rtp_state = sender_.impl_->GetRtpState(); + + // Change RTP timestamp offset. + sender_.impl_->SetStartTimestamp(2000); + + // Restores RtpState and make sure the old timestamp offset is in place. + sender_.impl_->SetRtpState(saved_rtp_state); + seqno = sender_.impl_->GetRtpState().sequence_number; + media_rtp_ts = 1031; + rtp_ts = media_rtp_ts + sender_.impl_->StartTimestamp(); + EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, rtp_ts, + /*capture_time_ms=*/0)); + AdvanceTime(kOneWayNetworkDelay); + EXPECT_THAT( + sender_.impl_->GetSentRtpPacketInfos(std::vector{seqno}), + ElementsAre(Field(&RtpSequenceNumberMap::Info::timestamp, media_rtp_ts))); +} + +TEST_F(RtpRtcpImpl2Test, StoresPacketInfoForSentPackets) { const uint32_t kStartTimestamp = 1u; SetUp(); sender_.impl_->SetStartTimestamp(kStartTimestamp); @@ -768,12 +771,12 @@ TEST_P(RtpRtcpImpl2Test, StoresPacketInfoForSentPackets) { } // Checks that the sender report stats are not available if no RTCP SR was sent. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsNotAvailable) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsNotAvailable) { EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Eq(absl::nullopt)); } // Checks that the sender report stats are available if an RTCP SR was sent. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsAvailable) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsAvailable) { // Send a frame in order to send an SR. EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid)); // Send an SR. @@ -784,7 +787,7 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsAvailable) { // Checks that the sender report stats are not available if an RTCP SR with an // unexpected SSRC is received. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsNotUpdatedWithUnexpectedSsrc) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsNotUpdatedWithUnexpectedSsrc) { constexpr uint32_t kUnexpectedSenderSsrc = 0x87654321; static_assert(kUnexpectedSenderSsrc != kSenderSsrc, ""); // Forge a sender report and pass it to the receiver as if an RTCP SR were @@ -800,7 +803,7 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsNotUpdatedWithUnexpectedSsrc) { } // Checks the stats derived from the last received RTCP SR are set correctly. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsCheckStatsFromLastReport) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsCheckStatsFromLastReport) { using SenderReportStats = RtpRtcpInterface::SenderReportStats; const NtpTime ntp(/*seconds=*/1u, /*fractions=*/1u << 31); constexpr uint32_t kPacketCount = 123u; @@ -823,7 +826,7 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsCheckStatsFromLastReport) { } // Checks that the sender report stats count equals the number of sent RTCP SRs. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsCount) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsCount) { using SenderReportStats = RtpRtcpInterface::SenderReportStats; // Send a frame in order to send an SR. EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid)); @@ -841,7 +844,7 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsCount) { // Checks that the sender report stats include a valid arrival time if an RTCP // SR was sent. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsArrivalTimestampSet) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsArrivalTimestampSet) { // Send a frame in order to send an SR. EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid)); // Send an SR. @@ -854,7 +857,7 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsArrivalTimestampSet) { // Checks that the packet and byte counters from an RTCP SR are not zero once // a frame is sent. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsPacketByteCounters) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsPacketByteCounters) { using SenderReportStats = RtpRtcpInterface::SenderReportStats; // Send a frame in order to send an SR. EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid)); @@ -870,14 +873,14 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsPacketByteCounters) { Field(&SenderReportStats::bytes_sent, Gt(0u))))); } -TEST_P(RtpRtcpImpl2Test, SendingVideoAdvancesSequenceNumber) { +TEST_F(RtpRtcpImpl2Test, SendingVideoAdvancesSequenceNumber) { const uint16_t sequence_number = sender_.impl_->SequenceNumber(); EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid)); ASSERT_THAT(sender_.transport_.rtp_packets_sent_, Gt(0)); EXPECT_EQ(sequence_number + 1, sender_.impl_->SequenceNumber()); } -TEST_P(RtpRtcpImpl2Test, SequenceNumberNotAdvancedWhenNotSending) { +TEST_F(RtpRtcpImpl2Test, SequenceNumberNotAdvancedWhenNotSending) { const uint16_t sequence_number = sender_.impl_->SequenceNumber(); sender_.impl_->SetSendingMediaStatus(false); EXPECT_FALSE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid)); @@ -885,7 +888,7 @@ TEST_P(RtpRtcpImpl2Test, SequenceNumberNotAdvancedWhenNotSending) { EXPECT_EQ(sequence_number, sender_.impl_->SequenceNumber()); } -TEST_P(RtpRtcpImpl2Test, PaddingNotAllowedInMiddleOfFrame) { +TEST_F(RtpRtcpImpl2Test, PaddingNotAllowedInMiddleOfFrame) { constexpr size_t kPaddingSize = 100; // Can't send padding before media. @@ -920,7 +923,7 @@ TEST_P(RtpRtcpImpl2Test, PaddingNotAllowedInMiddleOfFrame) { EXPECT_THAT(sender_.impl_->GeneratePadding(kPaddingSize), SizeIs(Gt(0u))); } -TEST_P(RtpRtcpImpl2Test, PaddingTimestampMatchesMedia) { +TEST_F(RtpRtcpImpl2Test, PaddingTimestampMatchesMedia) { constexpr size_t kPaddingSize = 100; const uint32_t kTimestamp = 123; @@ -941,7 +944,7 @@ TEST_P(RtpRtcpImpl2Test, PaddingTimestampMatchesMedia) { EXPECT_EQ(sender_.last_packet().Timestamp(), kTimestamp); } -TEST_P(RtpRtcpImpl2Test, AssignsTransportSequenceNumber) { +TEST_F(RtpRtcpImpl2Test, AssignsTransportSequenceNumber) { sender_.RegisterHeaderExtension(TransportSequenceNumber::Uri(), kTransportSequenceNumberExtensionId); @@ -958,7 +961,7 @@ TEST_P(RtpRtcpImpl2Test, AssignsTransportSequenceNumber) { EXPECT_EQ(first_transport_seq + 1, second_transport_seq); } -TEST_P(RtpRtcpImpl2Test, AssignsAbsoluteSendTime) { +TEST_F(RtpRtcpImpl2Test, AssignsAbsoluteSendTime) { sender_.RegisterHeaderExtension(AbsoluteSendTime::Uri(), kAbsoluteSendTimeExtensionId); @@ -966,7 +969,7 @@ TEST_P(RtpRtcpImpl2Test, AssignsAbsoluteSendTime) { EXPECT_NE(sender_.last_packet().GetExtension(), 0u); } -TEST_P(RtpRtcpImpl2Test, AssignsTransmissionTimeOffset) { +TEST_F(RtpRtcpImpl2Test, AssignsTransmissionTimeOffset) { sender_.RegisterHeaderExtension(TransmissionOffset::Uri(), kTransmissionOffsetExtensionId); @@ -982,7 +985,7 @@ TEST_P(RtpRtcpImpl2Test, AssignsTransmissionTimeOffset) { kOffset.ms() * kCaptureTimeMsToRtpTimestamp); } -TEST_P(RtpRtcpImpl2Test, PropagatesSentPacketInfo) { +TEST_F(RtpRtcpImpl2Test, PropagatesSentPacketInfo) { sender_.RegisterHeaderExtension(TransportSequenceNumber::Uri(), kTransportSequenceNumberExtensionId); int64_t now_ms = time_controller_.GetClock()->TimeInMilliseconds(); @@ -997,7 +1000,7 @@ TEST_P(RtpRtcpImpl2Test, PropagatesSentPacketInfo) { Field(&RtpRtcpModule::SentPacket::ssrc, Eq(kSenderSsrc))))); } -TEST_P(RtpRtcpImpl2Test, GeneratesFlexfec) { +TEST_F(RtpRtcpImpl2Test, GeneratesFlexfec) { constexpr int kFlexfecPayloadType = 118; constexpr uint32_t kFlexfecSsrc = 17; const char kNoMid[] = ""; @@ -1030,7 +1033,7 @@ TEST_P(RtpRtcpImpl2Test, GeneratesFlexfec) { EXPECT_EQ(fec_packet.PayloadType(), kFlexfecPayloadType); } -TEST_P(RtpRtcpImpl2Test, GeneratesUlpfec) { +TEST_F(RtpRtcpImpl2Test, GeneratesUlpfec) { constexpr int kUlpfecPayloadType = 118; constexpr int kRedPayloadType = 119; UlpfecGenerator ulpfec_sender(kRedPayloadType, kUlpfecPayloadType, @@ -1058,7 +1061,7 @@ TEST_P(RtpRtcpImpl2Test, GeneratesUlpfec) { EXPECT_EQ(fec_packet.payload()[0], kUlpfecPayloadType); } -TEST_P(RtpRtcpImpl2Test, RtpStateReflectsCurrentState) { +TEST_F(RtpRtcpImpl2Test, RtpStateReflectsCurrentState) { // Verify that that each of the field of GetRtpState actually reflects // the current state. @@ -1106,7 +1109,7 @@ TEST_P(RtpRtcpImpl2Test, RtpStateReflectsCurrentState) { EXPECT_EQ(state.ssrc_has_acked, true); } -TEST_P(RtpRtcpImpl2Test, RtxRtpStateReflectsCurrentState) { +TEST_F(RtpRtcpImpl2Test, RtxRtpStateReflectsCurrentState) { // Enable RTX. sender_.impl_->SetStorePacketsStatus(/*enable=*/true, /*number_to_store=*/10); sender_.impl_->SetRtxSendPayloadType(kRtxPayloadType, kPayloadType); @@ -1151,9 +1154,4 @@ TEST_P(RtpRtcpImpl2Test, RtxRtpStateReflectsCurrentState) { EXPECT_EQ(rtx_state.sequence_number, rtx_packet.SequenceNumber() + 1); } -INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead, - RtpRtcpImpl2Test, - ::testing::Values(TestConfig{false}, - TestConfig{true})); - } // namespace webrtc diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h index eb4c005eb20d..cb4a0a427f25 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h @@ -261,10 +261,6 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // Once set, this value can not be changed or removed. virtual void SetMid(absl::string_view mid) = 0; - // Sets CSRC. - // `csrcs` - vector of CSRCs - virtual void SetCsrcs(const std::vector& csrcs) = 0; - // Turns on/off sending RTX (RFC 4588). The modes can be set as a combination // of values of the enumerator RtxMode. virtual void SetRtxSendStatus(int modes) = 0; diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc index e81ea8da1963..c211b5a1ec15 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc @@ -28,14 +28,6 @@ constexpr int kBitrateStatisticsWindowMs = 1000; constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; constexpr TimeDelta kUpdateInterval = TimeDelta::Millis(kBitrateStatisticsWindowMs); - -bool IsTrialSetTo(const FieldTrialsView* field_trials, - absl::string_view name, - absl::string_view value) { - FieldTrialBasedConfig default_trials; - auto& trials = field_trials ? *field_trials : default_trials; - return absl::StartsWith(trials.Lookup(name), value); -} } // namespace RtpSenderEgress::NonPacedPacketSender::NonPacedPacketSender( @@ -81,10 +73,6 @@ RtpSenderEgress::RtpSenderEgress(const RtpRtcpInterface::Configuration& config, flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc() : absl::nullopt), populate_network2_timestamp_(config.populate_network2_timestamp), - send_side_bwe_with_overhead_( - !IsTrialSetTo(config.field_trials, - "WebRTC-SendSideBwe-WithOverhead", - "Disabled")), clock_(config.clock), packet_history_(packet_history), transport_(config.outgoing_transport), @@ -422,15 +410,10 @@ void RtpSenderEgress::AddPacketToTransportFeedback( const RtpPacketToSend& packet, const PacedPacketInfo& pacing_info) { if (transport_feedback_observer_) { - size_t packet_size = packet.payload_size() + packet.padding_size(); - if (send_side_bwe_with_overhead_) { - packet_size = packet.size(); - } - RtpPacketSendInfo packet_info; packet_info.transport_sequence_number = packet_id; packet_info.rtp_timestamp = packet.Timestamp(); - packet_info.length = packet_size; + packet_info.length = packet.size(); packet_info.pacing_info = pacing_info; packet_info.packet_type = packet.packet_type(); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.h index c46f6aeb401d..e0a8d966f5a1 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.h @@ -49,6 +49,8 @@ class RtpSenderEgress { void EnqueuePackets( std::vector> packets) override; + // Since we don't pace packets, there's no pending packets to remove. + void RemovePacketsForSsrc(uint32_t ssrc) override {} private: void PrepareForSend(RtpPacketToSend* packet); @@ -137,7 +139,6 @@ class RtpSenderEgress { const absl::optional rtx_ssrc_; const absl::optional flexfec_ssrc_; const bool populate_network2_timestamp_; - const bool send_side_bwe_with_overhead_; Clock* const clock_; RtpPacketHistory* const packet_history_; Transport* const transport_; diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc index 30e0c64a30e8..cc1c8feb8d7f 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc @@ -26,6 +26,7 @@ #include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/time_controller/simulated_time_controller.h" @@ -53,11 +54,6 @@ enum : int { kVideoTimingExtensionId, }; -struct TestConfig { - explicit TestConfig(bool with_overhead) : with_overhead(with_overhead) {} - bool with_overhead = false; -}; - class MockSendPacketObserver : public SendPacketObserver { public: MOCK_METHOD(void, OnSendPacket, (uint16_t, int64_t, uint32_t), (override)); @@ -85,24 +81,6 @@ class MockSendSideDelayObserver : public SendSideDelayObserver { MOCK_METHOD(void, SendSideDelayUpdated, (int, int, uint32_t), (override)); }; -class FieldTrialConfig : public FieldTrialsRegistry { - public: - FieldTrialConfig() : overhead_enabled_(false) {} - ~FieldTrialConfig() override {} - - void SetOverHeadEnabled(bool enabled) { overhead_enabled_ = enabled; } - - private: - std::string GetValue(absl::string_view key) const override { - if (key == "WebRTC-SendSideBwe-WithOverhead") { - return overhead_enabled_ ? "Enabled" : "Disabled"; - } - return ""; - } - - bool overhead_enabled_; -}; - struct TransmittedPacket { TransmittedPacket(rtc::ArrayView data, const PacketOptions& packet_options, @@ -139,16 +117,15 @@ class TestTransport : public Transport { } // namespace -class RtpSenderEgressTest : public ::testing::TestWithParam { +class RtpSenderEgressTest : public ::testing::Test { protected: RtpSenderEgressTest() : time_controller_(kStartTime), clock_(time_controller_.GetClock()), transport_(&header_extensions_), packet_history_(clock_, /*enable_rtx_padding_prioritization=*/true), - sequence_number_(kStartSequenceNumber) { - trials_.SetOverHeadEnabled(GetParam().with_overhead); - } + trials_(""), + sequence_number_(kStartSequenceNumber) {} std::unique_ptr CreateRtpSenderEgress() { return std::make_unique(DefaultConfig(), &packet_history_); @@ -200,11 +177,11 @@ class RtpSenderEgressTest : public ::testing::TestWithParam { RtpHeaderExtensionMap header_extensions_; TestTransport transport_; RtpPacketHistory packet_history_; - FieldTrialConfig trials_; + test::ExplicitKeyValueConfig trials_; uint16_t sequence_number_; }; -TEST_P(RtpSenderEgressTest, TransportFeedbackObserverGetsCorrectByteCount) { +TEST_F(RtpSenderEgressTest, TransportFeedbackObserverGetsCorrectByteCount) { constexpr size_t kRtpOverheadBytesPerPacket = 12 + 8; constexpr size_t kPayloadSize = 1400; const uint16_t kTransportSequenceNumber = 17; @@ -212,9 +189,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverGetsCorrectByteCount) { header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); - const size_t expected_bytes = GetParam().with_overhead - ? kPayloadSize + kRtpOverheadBytesPerPacket - : kPayloadSize; + const size_t expected_bytes = kPayloadSize + kRtpOverheadBytesPerPacket; EXPECT_CALL( feedback_observer_, @@ -234,7 +209,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverGetsCorrectByteCount) { sender->SendPacket(packet.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, PacketOptionsIsRetransmitSetByPacketType) { +TEST_F(RtpSenderEgressTest, PacketOptionsIsRetransmitSetByPacketType) { std::unique_ptr sender = CreateRtpSenderEgress(); std::unique_ptr media_packet = BuildRtpPacket(); @@ -250,7 +225,7 @@ TEST_P(RtpSenderEgressTest, PacketOptionsIsRetransmitSetByPacketType) { EXPECT_TRUE(transport_.last_packet()->options.is_retransmit); } -TEST_P(RtpSenderEgressTest, DoesnSetIncludedInAllocationByDefault) { +TEST_F(RtpSenderEgressTest, DoesnSetIncludedInAllocationByDefault) { std::unique_ptr sender = CreateRtpSenderEgress(); std::unique_ptr packet = BuildRtpPacket(); @@ -259,7 +234,7 @@ TEST_P(RtpSenderEgressTest, DoesnSetIncludedInAllocationByDefault) { EXPECT_FALSE(transport_.last_packet()->options.included_in_allocation); } -TEST_P(RtpSenderEgressTest, +TEST_F(RtpSenderEgressTest, SetsIncludedInFeedbackWhenTransportSequenceNumberExtensionIsRegistered) { std::unique_ptr sender = CreateRtpSenderEgress(); @@ -270,7 +245,7 @@ TEST_P(RtpSenderEgressTest, EXPECT_TRUE(transport_.last_packet()->options.included_in_feedback); } -TEST_P( +TEST_F( RtpSenderEgressTest, SetsIncludedInAllocationWhenTransportSequenceNumberExtensionIsRegistered) { std::unique_ptr sender = CreateRtpSenderEgress(); @@ -282,7 +257,7 @@ TEST_P( EXPECT_TRUE(transport_.last_packet()->options.included_in_allocation); } -TEST_P(RtpSenderEgressTest, +TEST_F(RtpSenderEgressTest, SetsIncludedInAllocationWhenForcedAsPartOfAllocation) { std::unique_ptr sender = CreateRtpSenderEgress(); sender->ForceIncludeSendPacketsInAllocation(true); @@ -293,7 +268,7 @@ TEST_P(RtpSenderEgressTest, EXPECT_TRUE(transport_.last_packet()->options.included_in_allocation); } -TEST_P(RtpSenderEgressTest, OnSendSideDelayUpdated) { +TEST_F(RtpSenderEgressTest, OnSendSideDelayUpdated) { StrictMock send_side_delay_observer; RtpRtcpInterface::Configuration config = DefaultConfig(); config.send_side_delay_observer = &send_side_delay_observer; @@ -335,7 +310,7 @@ TEST_P(RtpSenderEgressTest, OnSendSideDelayUpdated) { PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, WritesPacerExitToTimingExtension) { +TEST_F(RtpSenderEgressTest, WritesPacerExitToTimingExtension) { std::unique_ptr sender = CreateRtpSenderEgress(); header_extensions_.RegisterByUri(kVideoTimingExtensionId, VideoTimingExtension::Uri()); @@ -355,7 +330,7 @@ TEST_P(RtpSenderEgressTest, WritesPacerExitToTimingExtension) { EXPECT_EQ(video_timing.pacer_exit_delta_ms, kStoredTimeInMs); } -TEST_P(RtpSenderEgressTest, WritesNetwork2ToTimingExtension) { +TEST_F(RtpSenderEgressTest, WritesNetwork2ToTimingExtension) { RtpRtcpInterface::Configuration rtp_config = DefaultConfig(); rtp_config.populate_network2_timestamp = true; auto sender = std::make_unique(rtp_config, &packet_history_); @@ -381,7 +356,7 @@ TEST_P(RtpSenderEgressTest, WritesNetwork2ToTimingExtension) { EXPECT_EQ(video_timing.pacer_exit_delta_ms, kPacerExitMs); } -TEST_P(RtpSenderEgressTest, OnSendPacketUpdated) { +TEST_F(RtpSenderEgressTest, OnSendPacketUpdated) { std::unique_ptr sender = CreateRtpSenderEgress(); header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -395,7 +370,7 @@ TEST_P(RtpSenderEgressTest, OnSendPacketUpdated) { sender->SendPacket(packet.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, OnSendPacketNotUpdatedForRetransmits) { +TEST_F(RtpSenderEgressTest, OnSendPacketNotUpdatedForRetransmits) { std::unique_ptr sender = CreateRtpSenderEgress(); header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -409,7 +384,7 @@ TEST_P(RtpSenderEgressTest, OnSendPacketNotUpdatedForRetransmits) { sender->SendPacket(packet.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, ReportsFecRate) { +TEST_F(RtpSenderEgressTest, ReportsFecRate) { constexpr int kNumPackets = 10; constexpr TimeDelta kTimeBetweenPackets = TimeDelta::Millis(33); @@ -437,7 +412,7 @@ TEST_P(RtpSenderEgressTest, ReportsFecRate) { (total_fec_data_sent / (kTimeBetweenPackets * kNumPackets)).bps(), 500); } -TEST_P(RtpSenderEgressTest, BitrateCallbacks) { +TEST_F(RtpSenderEgressTest, BitrateCallbacks) { class MockBitrateStaticsObserver : public BitrateStatisticsObserver { public: MOCK_METHOD(void, Notify, (uint32_t, uint32_t, uint32_t), (override)); @@ -484,7 +459,7 @@ TEST_P(RtpSenderEgressTest, BitrateCallbacks) { } } -TEST_P(RtpSenderEgressTest, DoesNotPutNotRetransmittablePacketsInHistory) { +TEST_F(RtpSenderEgressTest, DoesNotPutNotRetransmittablePacketsInHistory) { std::unique_ptr sender = CreateRtpSenderEgress(); packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); @@ -495,7 +470,7 @@ TEST_P(RtpSenderEgressTest, DoesNotPutNotRetransmittablePacketsInHistory) { EXPECT_FALSE(packet_history_.GetPacketState(packet->SequenceNumber())); } -TEST_P(RtpSenderEgressTest, PutsRetransmittablePacketsInHistory) { +TEST_F(RtpSenderEgressTest, PutsRetransmittablePacketsInHistory) { std::unique_ptr sender = CreateRtpSenderEgress(); packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); @@ -506,7 +481,7 @@ TEST_P(RtpSenderEgressTest, PutsRetransmittablePacketsInHistory) { EXPECT_TRUE(packet_history_.GetPacketState(packet->SequenceNumber())); } -TEST_P(RtpSenderEgressTest, DoesNotPutNonMediaInHistory) { +TEST_F(RtpSenderEgressTest, DoesNotPutNonMediaInHistory) { std::unique_ptr sender = CreateRtpSenderEgress(); packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); @@ -535,7 +510,7 @@ TEST_P(RtpSenderEgressTest, DoesNotPutNonMediaInHistory) { EXPECT_FALSE(packet_history_.GetPacketState(padding->SequenceNumber())); } -TEST_P(RtpSenderEgressTest, UpdatesSendStatusOfRetransmittedPackets) { +TEST_F(RtpSenderEgressTest, UpdatesSendStatusOfRetransmittedPackets) { std::unique_ptr sender = CreateRtpSenderEgress(); packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); @@ -559,7 +534,7 @@ TEST_P(RtpSenderEgressTest, UpdatesSendStatusOfRetransmittedPackets) { EXPECT_TRUE(packet_history_.GetPacketState(media_packet->SequenceNumber())); } -TEST_P(RtpSenderEgressTest, StreamDataCountersCallbacks) { +TEST_F(RtpSenderEgressTest, StreamDataCountersCallbacks) { std::unique_ptr sender = CreateRtpSenderEgress(); const RtpPacketCounter kEmptyCounter; @@ -644,7 +619,7 @@ TEST_P(RtpSenderEgressTest, StreamDataCountersCallbacks) { time_controller_.AdvanceTime(TimeDelta::Zero()); } -TEST_P(RtpSenderEgressTest, StreamDataCountersCallbacksFec) { +TEST_F(RtpSenderEgressTest, StreamDataCountersCallbacksFec) { std::unique_ptr sender = CreateRtpSenderEgress(); const RtpPacketCounter kEmptyCounter; @@ -694,7 +669,7 @@ TEST_P(RtpSenderEgressTest, StreamDataCountersCallbacksFec) { time_controller_.AdvanceTime(TimeDelta::Zero()); } -TEST_P(RtpSenderEgressTest, UpdatesDataCounters) { +TEST_F(RtpSenderEgressTest, UpdatesDataCounters) { std::unique_ptr sender = CreateRtpSenderEgress(); const RtpPacketCounter kEmptyCounter; @@ -735,7 +710,7 @@ TEST_P(RtpSenderEgressTest, UpdatesDataCounters) { EXPECT_EQ(rtx_stats.fec, kEmptyCounter); } -TEST_P(RtpSenderEgressTest, SendPacketUpdatesExtensions) { +TEST_F(RtpSenderEgressTest, SendPacketUpdatesExtensions) { header_extensions_.RegisterByUri(kVideoTimingExtensionId, VideoTimingExtension::Uri()); header_extensions_.RegisterByUri(kAbsoluteSendTimeExtensionId, @@ -764,7 +739,7 @@ TEST_P(RtpSenderEgressTest, SendPacketUpdatesExtensions) { EXPECT_EQ(timing.pacer_exit_delta_ms, kDiffMs); } -TEST_P(RtpSenderEgressTest, SendPacketSetsPacketOptions) { +TEST_F(RtpSenderEgressTest, SendPacketSetsPacketOptions) { const uint16_t kPacketId = 42; std::unique_ptr sender = CreateRtpSenderEgress(); header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, @@ -791,7 +766,7 @@ TEST_P(RtpSenderEgressTest, SendPacketSetsPacketOptions) { EXPECT_TRUE(transport_.last_packet()->options.is_retransmit); } -TEST_P(RtpSenderEgressTest, SendPacketUpdatesStats) { +TEST_F(RtpSenderEgressTest, SendPacketUpdatesStats) { const size_t kPayloadSize = 1000; StrictMock send_side_delay_observer; @@ -856,7 +831,7 @@ TEST_P(RtpSenderEgressTest, SendPacketUpdatesStats) { EXPECT_EQ(rtx_stats.retransmitted.packets, 1u); } -TEST_P(RtpSenderEgressTest, TransportFeedbackObserverWithRetransmission) { +TEST_F(RtpSenderEgressTest, TransportFeedbackObserverWithRetransmission) { const uint16_t kTransportSequenceNumber = 17; header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -878,7 +853,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverWithRetransmission) { sender->SendPacket(retransmission.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, TransportFeedbackObserverWithRtxRetransmission) { +TEST_F(RtpSenderEgressTest, TransportFeedbackObserverWithRtxRetransmission) { const uint16_t kTransportSequenceNumber = 17; header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -902,7 +877,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverWithRtxRetransmission) { sender->SendPacket(rtx_retransmission.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, TransportFeedbackObserverPadding) { +TEST_F(RtpSenderEgressTest, TransportFeedbackObserverPadding) { const uint16_t kTransportSequenceNumber = 17; header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -920,7 +895,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverPadding) { sender->SendPacket(padding.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, TransportFeedbackObserverRtxPadding) { +TEST_F(RtpSenderEgressTest, TransportFeedbackObserverRtxPadding) { const uint16_t kTransportSequenceNumber = 17; header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -940,7 +915,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverRtxPadding) { sender->SendPacket(rtx_padding.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, TransportFeedbackObserverFec) { +TEST_F(RtpSenderEgressTest, TransportFeedbackObserverFec) { const uint16_t kTransportSequenceNumber = 17; header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -965,7 +940,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverFec) { sender->SendPacket(fec_packet.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, SupportsAbortingRetransmissions) { +TEST_F(RtpSenderEgressTest, SupportsAbortingRetransmissions) { std::unique_ptr sender = CreateRtpSenderEgress(); packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); @@ -992,9 +967,4 @@ TEST_P(RtpSenderEgressTest, SupportsAbortingRetransmissions) { EXPECT_TRUE(packet_history_.GetPacketAndMarkAsPending(media_sequence_number)); } -INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead, - RtpSenderEgressTest, - ::testing::Values(TestConfig(false), - TestConfig(true))); - } // namespace webrtc diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc index ea9277f612c5..c9e98ff3c3f9 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc @@ -102,6 +102,7 @@ class MockRtpPacketPacer : public RtpPacketSender { EnqueuePackets, (std::vector>), (override)); + MOCK_METHOD(void, RemovePacketsForSsrc, (uint32_t), (override)); }; } // namespace diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video.cc index 3a583c684c86..e1ac4e41c3ac 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video.cc @@ -525,32 +525,41 @@ bool RTPSenderVideo::SendVideo( (use_fec ? FecPacketOverhead() : 0) - (rtp_sender_->RtxStatus() ? kRtxHeaderSize : 0); + absl::optional capture_time; + if (capture_time_ms > 0) { + capture_time = Timestamp::Millis(capture_time_ms); + } + std::unique_ptr single_packet = rtp_sender_->AllocatePacket(); RTC_DCHECK_LE(packet_capacity, single_packet->capacity()); single_packet->SetPayloadType(payload_type); single_packet->SetTimestamp(rtp_timestamp); - single_packet->set_capture_time(Timestamp::Millis(capture_time_ms)); + if (capture_time) + single_packet->set_capture_time(*capture_time); // Construct the absolute capture time extension if not provided. - if (!video_header.absolute_capture_time.has_value()) { + if (!video_header.absolute_capture_time.has_value() && + capture_time.has_value()) { video_header.absolute_capture_time.emplace(); video_header.absolute_capture_time->absolute_capture_timestamp = Int64MsToUQ32x32( - clock_->ConvertTimestampToNtpTimeInMilliseconds(capture_time_ms)); + clock_->ConvertTimestampToNtpTime(*capture_time).ToMs()); if (include_capture_clock_offset_) { video_header.absolute_capture_time->estimated_capture_clock_offset = 0; } } // Let `absolute_capture_time_sender_` decide if the extension should be sent. - video_header.absolute_capture_time = - absolute_capture_time_sender_.OnSendPacket( - AbsoluteCaptureTimeSender::GetSource(single_packet->Ssrc(), - single_packet->Csrcs()), - single_packet->Timestamp(), kVideoPayloadTypeFrequency, - video_header.absolute_capture_time->absolute_capture_timestamp, - video_header.absolute_capture_time->estimated_capture_clock_offset); + if (video_header.absolute_capture_time.has_value()) { + video_header.absolute_capture_time = + absolute_capture_time_sender_.OnSendPacket( + AbsoluteCaptureTimeSender::GetSource(single_packet->Ssrc(), + single_packet->Csrcs()), + single_packet->Timestamp(), kVideoPayloadTypeFrequency, + video_header.absolute_capture_time->absolute_capture_timestamp, + video_header.absolute_capture_time->estimated_capture_clock_offset); + } auto first_packet = std::make_unique(*single_packet); auto middle_packet = std::make_unique(*single_packet); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc index 3d6931fe232c..02194391af50 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc @@ -34,7 +34,7 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { uint32_t ssrc) : encoded_data_(encoded_image.GetEncodedData()), header_(video_header), - metadata_(header_), + metadata_(header_.GetAsMetadata()), frame_type_(encoded_image._frameType), payload_type_(payload_type), codec_type_(codec_type), @@ -103,9 +103,9 @@ RTPSenderVideoFrameTransformerDelegate::RTPSenderVideoFrameTransformerDelegate( : sender_(sender), frame_transformer_(std::move(frame_transformer)), ssrc_(ssrc), - task_queue_factory_(task_queue_factory) { - RTC_DCHECK(task_queue_factory_); -} + transformation_queue_(task_queue_factory->CreateTaskQueue( + "video_frame_transformer", + TaskQueueFactory::Priority::NORMAL)) {} void RTPSenderVideoFrameTransformerDelegate::Init() { frame_transformer_->RegisterTransformedFrameSinkCallback( @@ -119,29 +119,6 @@ bool RTPSenderVideoFrameTransformerDelegate::TransformFrame( const EncodedImage& encoded_image, RTPVideoHeader video_header, absl::optional expected_retransmission_time_ms) { - TaskQueueBase* current = TaskQueueBase::Current(); - if (!encoder_queue_) { - // Save the current task queue to post the transformed frame for sending - // once it is transformed. When there is no current task queue, i.e. - // encoding is done on an external thread (for example in the case of - // hardware encoders), create a new task queue. - if (current) { - encoder_queue_ = current; - } else { - owned_encoder_queue_ = task_queue_factory_->CreateTaskQueue( - "video_frame_transformer", TaskQueueFactory::Priority::NORMAL); - encoder_queue_ = owned_encoder_queue_.get(); - } - } - // DCHECK that the current queue does not change, or if does then it was due - // to a hardware encoder fallback and thus there is an owned queue. - RTC_DCHECK(!current || current == encoder_queue_ || owned_encoder_queue_) - << "Current thread must either be an external thread (nullptr) or be the " - "same as the previous encoder queue. The current thread is " - << (current ? "non-null" : "nullptr") << " and the encoder thread is " - << (current == encoder_queue_ ? "the same queue." - : "not the same queue."); - frame_transformer_->Transform(std::make_unique( encoded_image, video_header, payload_type, codec_type, rtp_timestamp, expected_retransmission_time_ms, ssrc_)); @@ -152,22 +129,20 @@ void RTPSenderVideoFrameTransformerDelegate::OnTransformedFrame( std::unique_ptr frame) { MutexLock lock(&sender_lock_); - // The encoder queue normally gets destroyed after the sender; - // however, it might still be null by the time a previously queued frame - // arrives. - if (!sender_ || !encoder_queue_) + if (!sender_) { return; + } rtc::scoped_refptr delegate(this); - encoder_queue_->PostTask( + transformation_queue_->PostTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { - RTC_DCHECK_RUN_ON(delegate->encoder_queue_); + RTC_DCHECK_RUN_ON(delegate->transformation_queue_.get()); delegate->SendVideo(std::move(frame)); }); } void RTPSenderVideoFrameTransformerDelegate::SendVideo( std::unique_ptr transformed_frame) const { - RTC_DCHECK_RUN_ON(encoder_queue_); + RTC_DCHECK_RUN_ON(transformation_queue_.get()); RTC_CHECK_EQ(transformed_frame->GetDirection(), TransformableFrameInterface::Direction::kSender); MutexLock lock(&sender_lock_); @@ -206,4 +181,37 @@ void RTPSenderVideoFrameTransformerDelegate::Reset() { sender_ = nullptr; } } + +std::unique_ptr CloneSenderVideoFrame( + TransformableVideoFrameInterface* original) { + auto encoded_image_buffer = EncodedImageBuffer::Create( + original->GetData().data(), original->GetData().size()); + EncodedImage encoded_image; + encoded_image.SetEncodedData(encoded_image_buffer); + RTPVideoHeader new_header; + absl::optional new_codec_type; + // TODO(bugs.webrtc.org/14708): Figure out a way to get the header information + // without casting to TransformableVideoSenderFrame. + if (original->GetDirection() == + TransformableFrameInterface::Direction::kSender) { + // TODO(bugs.webrtc.org/14708): Figure out a way to bulletproof this cast. + auto original_as_sender = + static_cast(original); + new_header = original_as_sender->GetHeader(); + new_codec_type = original_as_sender->GetCodecType(); + } else { + // TODO(bugs.webrtc.org/14708): Make this codec dependent + new_header.video_type_header.emplace(); + new_codec_type = kVideoCodecVP8; + // TODO(bugs.webrtc.org/14708): Fill in the new_header when it's not + // `Direction::kSender` + } + // TODO(bugs.webrtc.org/14708): Fill in other EncodedImage parameters + return std::make_unique( + encoded_image, new_header, original->GetPayloadType(), new_codec_type, + original->GetTimestamp(), + absl::nullopt, // expected_retransmission_time_ms + original->GetSsrc()); +} + } // namespace webrtc diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h index 65d6d3f6cd59..55f7961e2ded 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h @@ -53,7 +53,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { // Delegates the call to RTPSendVideo::SendVideo on the `encoder_queue_`. void SendVideo(std::unique_ptr frame) const - RTC_RUN_ON(encoder_queue_); + RTC_RUN_ON(transformation_queue_); // Delegates the call to RTPSendVideo::SetVideoStructureAfterTransformation // under `sender_lock_`. @@ -74,17 +74,21 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { ~RTPSenderVideoFrameTransformerDelegate() override = default; private: + void EnsureEncoderQueueCreated(); + mutable Mutex sender_lock_; RTPSenderVideo* sender_ RTC_GUARDED_BY(sender_lock_); rtc::scoped_refptr frame_transformer_; const uint32_t ssrc_; - TaskQueueBase* encoder_queue_ = nullptr; - TaskQueueFactory* task_queue_factory_; // Used when the encoded frames arrives without a current task queue. This can // happen if a hardware encoder was used. - std::unique_ptr owned_encoder_queue_; + std::unique_ptr transformation_queue_; }; +// Method to support cloning a Sender frame from another frame +std::unique_ptr CloneSenderVideoFrame( + TransformableVideoFrameInterface* original); + } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_FRAME_TRANSFORMER_DELEGATE_H_ diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc index 13527128c900..72dfd0238d39 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc @@ -17,6 +17,7 @@ #include "absl/memory/memory.h" #include "api/field_trials_registry.h" +#include "api/frame_transformer_factory.h" #include "api/rtp_headers.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" @@ -38,6 +39,7 @@ #include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" #include "rtc_base/rate_limiter.h" #include "rtc_base/thread.h" #include "test/gmock.h" @@ -151,9 +153,7 @@ class TestRtpSenderVideo : public RTPSenderVideo { class FieldTrials : public FieldTrialsRegistry { public: - explicit FieldTrials(bool use_send_side_bwe_with_overhead) - : use_send_side_bwe_with_overhead_(use_send_side_bwe_with_overhead), - include_capture_clock_offset_(false) {} + FieldTrials() : include_capture_clock_offset_(false) {} void set_include_capture_clock_offset(bool include_capture_clock_offset) { include_capture_clock_offset_ = include_capture_clock_offset; @@ -161,23 +161,19 @@ class FieldTrials : public FieldTrialsRegistry { private: std::string GetValue(absl::string_view key) const override { - if (key == "WebRTC-SendSideBwe-WithOverhead") { - return use_send_side_bwe_with_overhead_ ? "Enabled" : ""; - } else if (key == "WebRTC-IncludeCaptureClockOffset") { + if (key == "WebRTC-IncludeCaptureClockOffset") { return include_capture_clock_offset_ ? "" : "Disabled"; } return ""; } - bool use_send_side_bwe_with_overhead_; bool include_capture_clock_offset_; }; -class RtpSenderVideoTest : public ::testing::TestWithParam { +class RtpSenderVideoTest : public ::testing::Test { public: RtpSenderVideoTest() - : field_trials_(GetParam()), - fake_clock_(kStartTime), + : fake_clock_(kStartTime), retransmission_rate_limiter_(&fake_clock_, 1000), rtp_module_(ModuleRtpRtcpImpl2::Create([&] { RtpRtcpInterface::Configuration config; @@ -210,7 +206,7 @@ class RtpSenderVideoTest : public ::testing::TestWithParam { std::unique_ptr rtp_sender_video_; }; -TEST_P(RtpSenderVideoTest, KeyFrameHasCVO) { +TEST_F(RtpSenderVideoTest, KeyFrameHasCVO) { uint8_t kFrame[kMaxPacketLength]; rtp_module_->RegisterRtpHeaderExtension(VideoOrientation::Uri(), kVideoRotationExtensionId); @@ -227,7 +223,7 @@ TEST_P(RtpSenderVideoTest, KeyFrameHasCVO) { EXPECT_EQ(kVideoRotation_0, rotation); } -TEST_P(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { +TEST_F(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { uint8_t kFrame[kMaxPacketLength]; const int64_t kPacketizationTimeMs = 100; const int64_t kEncodeStartDeltaMs = 10; @@ -255,7 +251,7 @@ TEST_P(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { EXPECT_EQ(kEncodeFinishDeltaMs, timing.encode_finish_delta_ms); } -TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) { +TEST_F(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) { uint8_t kFrame[kMaxPacketLength]; rtp_module_->RegisterRtpHeaderExtension(VideoOrientation::Uri(), kVideoRotationExtensionId); @@ -279,7 +275,7 @@ TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) { EXPECT_EQ(kVideoRotation_0, rotation); } -TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) { +TEST_F(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) { uint8_t kFrame[kMaxPacketLength]; rtp_module_->RegisterRtpHeaderExtension(VideoOrientation::Uri(), kVideoRotationExtensionId); @@ -304,7 +300,7 @@ TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) { // Make sure rotation is parsed correctly when the Camera (C) and Flip (F) bits // are set in the CVO byte. -TEST_P(RtpSenderVideoTest, SendVideoWithCameraAndFlipCVO) { +TEST_F(RtpSenderVideoTest, SendVideoWithCameraAndFlipCVO) { // Test extracting rotation when Camera (C) and Flip (F) bits are zero. EXPECT_EQ(kVideoRotation_0, ConvertCVOByteToVideoRotation(0)); EXPECT_EQ(kVideoRotation_90, ConvertCVOByteToVideoRotation(1)); @@ -323,7 +319,7 @@ TEST_P(RtpSenderVideoTest, SendVideoWithCameraAndFlipCVO) { ConvertCVOByteToVideoRotation(flip_bit | camera_bit | 3)); } -TEST_P(RtpSenderVideoTest, RetransmissionTypesGeneric) { +TEST_F(RtpSenderVideoTest, RetransmissionTypesGeneric) { RTPVideoHeader header; header.codec = kVideoCodecGeneric; @@ -338,7 +334,7 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesGeneric) { kDefaultExpectedRetransmissionTimeMs)); } -TEST_P(RtpSenderVideoTest, RetransmissionTypesH264) { +TEST_F(RtpSenderVideoTest, RetransmissionTypesH264) { RTPVideoHeader header; header.video_type_header.emplace().packetization_mode = H264PacketizationMode::NonInterleaved; @@ -355,7 +351,7 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesH264) { kDefaultExpectedRetransmissionTimeMs)); } -TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) { +TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) { RTPVideoHeader header; header.codec = kVideoCodecVP8; auto& vp8_header = header.video_type_header.emplace(); @@ -378,7 +374,7 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) { kDefaultExpectedRetransmissionTimeMs)); } -TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8HigherLayers) { +TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8HigherLayers) { RTPVideoHeader header; header.codec = kVideoCodecVP8; @@ -398,7 +394,7 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8HigherLayers) { } } -TEST_P(RtpSenderVideoTest, RetransmissionTypesVP9) { +TEST_F(RtpSenderVideoTest, RetransmissionTypesVP9) { RTPVideoHeader header; header.codec = kVideoCodecVP9; @@ -418,7 +414,7 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesVP9) { } } -TEST_P(RtpSenderVideoTest, ConditionalRetransmit) { +TEST_F(RtpSenderVideoTest, ConditionalRetransmit) { const int64_t kFrameIntervalMs = 33; const int64_t kRttMs = (kFrameIntervalMs * 3) / 2; const uint8_t kSettings = @@ -476,7 +472,7 @@ TEST_P(RtpSenderVideoTest, ConditionalRetransmit) { rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); } -TEST_P(RtpSenderVideoTest, ConditionalRetransmitLimit) { +TEST_F(RtpSenderVideoTest, ConditionalRetransmitLimit) { const int64_t kFrameIntervalMs = 200; const int64_t kRttMs = (kFrameIntervalMs * 3) / 2; const int32_t kSettings = @@ -509,7 +505,7 @@ TEST_P(RtpSenderVideoTest, ConditionalRetransmitLimit) { rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); } -TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { +TEST_F(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; rtp_module_->RegisterRtpHeaderExtension( @@ -577,7 +573,7 @@ TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { ElementsAre(1, 501)); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, SkipsDependencyDescriptorOnDeltaFrameWhenFailedToAttachToKeyFrame) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; @@ -631,7 +627,7 @@ TEST_P(RtpSenderVideoTest, .HasExtension()); } -TEST_P(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) { +TEST_F(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; rtp_module_->RegisterRtpHeaderExtension( @@ -664,7 +660,7 @@ TEST_P(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) { ContainerEq(generic.chain_diffs)); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, PropagatesActiveDecodeTargetsIntoDependencyDescriptor) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; @@ -698,7 +694,7 @@ TEST_P(RtpSenderVideoTest, EXPECT_EQ(descriptor_key.active_decode_targets_bitmask, 0b01u); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, SetDiffentVideoStructureAvoidsCollisionWithThePreviousStructure) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; @@ -772,7 +768,7 @@ TEST_P(RtpSenderVideoTest, descriptor_key2.attached_structure.get(), &descriptor_delta)); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, AuthenticateVideoHeaderWhenDependencyDescriptorExtensionIsUsed) { static constexpr size_t kFrameSize = 100; uint8_t kFrame[kFrameSize] = {1, 2, 3, 4}; @@ -815,7 +811,7 @@ TEST_P(RtpSenderVideoTest, .HasExtension()); } -TEST_P(RtpSenderVideoTest, PopulateGenericFrameDescriptor) { +TEST_F(RtpSenderVideoTest, PopulateGenericFrameDescriptor) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; rtp_module_->RegisterRtpHeaderExtension( @@ -872,17 +868,17 @@ void RtpSenderVideoTest:: EXPECT_EQ(transport_.last_sent_packet().payload_size(), 1 + kFrameSize); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed00) { UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed(0); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed01) { UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed(1); } -TEST_P(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) { +TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; rtp_module_->RegisterRtpHeaderExtension( @@ -918,7 +914,7 @@ TEST_P(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) { .GetExtension(&sent_allocation)); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithoutResolutionSentOnDeltaWhenUpdated) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; @@ -966,7 +962,7 @@ TEST_P(RtpSenderVideoTest, SizeIs(1)); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnDeltaWhenSpatialLayerAdded) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; @@ -1012,7 +1008,7 @@ TEST_P(RtpSenderVideoTest, EXPECT_TRUE(sent_allocation.resolution_and_frame_rate_is_valid); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnLargeFrameRateChange) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; @@ -1054,7 +1050,7 @@ TEST_P(RtpSenderVideoTest, EXPECT_EQ(sent_allocation.active_spatial_layers[0].frame_rate_fps, 20); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithoutResolutionSentOnSmallFrameRateChange) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; @@ -1095,7 +1091,7 @@ TEST_P(RtpSenderVideoTest, EXPECT_FALSE(sent_allocation.resolution_and_frame_rate_is_valid); } -TEST_P(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { +TEST_F(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; rtp_module_->RegisterRtpHeaderExtension( @@ -1137,7 +1133,7 @@ TEST_P(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { .GetExtension(&sent_allocation)); } -TEST_P(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) { +TEST_F(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; rtp_module_->RegisterRtpHeaderExtension( @@ -1173,7 +1169,7 @@ TEST_P(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) { .HasExtension()); } -TEST_P(RtpSenderVideoTest, AbsoluteCaptureTime) { +TEST_F(RtpSenderVideoTest, AbsoluteCaptureTime) { constexpr int64_t kAbsoluteCaptureTimestampMs = 12345678; uint8_t kFrame[kMaxPacketLength]; rtp_module_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), @@ -1208,9 +1204,27 @@ TEST_P(RtpSenderVideoTest, AbsoluteCaptureTime) { absolute_capture_time->estimated_capture_clock_offset.has_value()); } +TEST_F(RtpSenderVideoTest, + AbsoluteCaptureTimeNotForwardedWhenImageHasNoCaptureTime) { + uint8_t kFrame[kMaxPacketLength]; + rtp_module_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), + kAbsoluteCaptureTimeExtensionId); + + RTPVideoHeader hdr; + hdr.frame_type = VideoFrameType::kVideoFrameKey; + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, + /*capture_time_ms=*/0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + // No absolute capture time should be set as the capture_time_ms was the + // default value. + for (const RtpPacketReceived& packet : transport_.sent_packets()) { + EXPECT_FALSE(packet.HasExtension()); + } +} + // Essentially the same test as AbsoluteCaptureTime but with a field trial. // After the field trial is experimented, we will remove AbsoluteCaptureTime. -TEST_P(RtpSenderVideoTest, AbsoluteCaptureTimeWithCaptureClockOffset) { +TEST_F(RtpSenderVideoTest, AbsoluteCaptureTimeWithCaptureClockOffset) { field_trials_.set_include_capture_clock_offset(true); rtp_sender_video_ = std::make_unique( &fake_clock_, rtp_module_->RtpSender(), field_trials_); @@ -1248,7 +1262,7 @@ TEST_P(RtpSenderVideoTest, AbsoluteCaptureTimeWithCaptureClockOffset) { EXPECT_EQ(absolute_capture_time->estimated_capture_clock_offset, 0); } -TEST_P(RtpSenderVideoTest, AbsoluteCaptureTimeWithExtensionProvided) { +TEST_F(RtpSenderVideoTest, AbsoluteCaptureTimeWithExtensionProvided) { constexpr AbsoluteCaptureTime kAbsoluteCaptureTime = { 123, absl::optional(456), @@ -1281,7 +1295,7 @@ TEST_P(RtpSenderVideoTest, AbsoluteCaptureTimeWithExtensionProvided) { EXPECT_EQ(absolute_capture_time, kAbsoluteCaptureTime); } -TEST_P(RtpSenderVideoTest, PopulatesPlayoutDelay) { +TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) { // Single packet frames. constexpr size_t kPacketSize = 123; uint8_t kFrame[kPacketSize]; @@ -1338,7 +1352,7 @@ TEST_P(RtpSenderVideoTest, PopulatesPlayoutDelay) { EXPECT_EQ(received_delay, kExpectedDelay); } -TEST_P(RtpSenderVideoTest, SendGenericVideo) { +TEST_F(RtpSenderVideoTest, SendGenericVideo) { const uint8_t kPayloadType = 127; const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric; const uint8_t kPayload[] = {47, 11, 32, 93, 89}; @@ -1371,7 +1385,7 @@ TEST_P(RtpSenderVideoTest, SendGenericVideo) { EXPECT_THAT(sent_payload.subview(1), ElementsAreArray(kDeltaPayload)); } -TEST_P(RtpSenderVideoTest, SendRawVideo) { +TEST_F(RtpSenderVideoTest, SendRawVideo) { const uint8_t kPayloadType = 111; const uint8_t kPayload[] = {11, 22, 33, 44, 55}; @@ -1387,10 +1401,6 @@ TEST_P(RtpSenderVideoTest, SendRawVideo) { EXPECT_THAT(sent_payload, ElementsAreArray(kPayload)); } -INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead, - RtpSenderVideoTest, - ::testing::Bool()); - class RtpSenderVideoWithFrameTransformerTest : public ::testing::Test { public: RtpSenderVideoWithFrameTransformerTest() @@ -1534,75 +1544,6 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideo) { EXPECT_EQ(transport_.packets_sent(), 2); } -// Task queue which behaves as if it was a hardware encoder thread where no -// CurrentTaskQueue is set. -class HardwareEncoderTaskQueue : public TaskQueueBase { - public: - HardwareEncoderTaskQueue() = default; - - void Delete() override {} - void PostTask(absl::AnyInvocable task) override { - CurrentTaskQueueSetter null_setter(nullptr); - std::move(task)(); - } - void PostDelayedTask(absl::AnyInvocable task, - TimeDelta delay) override { - // Not implemented. - RTC_CHECK_NOTREACHED(); - } - void PostDelayedHighPrecisionTask(absl::AnyInvocable task, - TimeDelta delay) override { - // Not implemented. - RTC_CHECK_NOTREACHED(); - } -}; - -TEST_F(RtpSenderVideoWithFrameTransformerTest, - OnTransformedFrameSendsVideoOnNewQueueForHwEncoders) { - auto mock_frame_transformer = - rtc::make_ref_counted>(); - rtc::scoped_refptr callback; - EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) - .WillOnce(SaveArg<0>(&callback)); - std::unique_ptr rtp_sender_video = - CreateSenderWithFrameTransformer(mock_frame_transformer); - ASSERT_TRUE(callback); - - auto encoded_image = CreateDefaultEncodedImage(); - RTPVideoHeader video_header; - video_header.frame_type = VideoFrameType::kVideoFrameKey; - ON_CALL(*mock_frame_transformer, Transform) - .WillByDefault( - [&callback](std::unique_ptr frame) { - callback->OnTransformedFrame(std::move(frame)); - }); - - // Hardware encoder task queue has no TaskQueue::Current() set, and so a new - // task queue should be created to handle the callback. - HardwareEncoderTaskQueue hw_encoder_queue; - hw_encoder_queue.PostTask([&] { - rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, - *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs); - }); - - // No packets sent yet since a task should be posted onto a new task queue. - EXPECT_EQ(transport_.packets_sent(), 0); - time_controller_.AdvanceTime(TimeDelta::Zero()); - EXPECT_EQ(transport_.packets_sent(), 1); - - // Check software encoder fallback. - auto encoder_queue = time_controller_.GetTaskQueueFactory()->CreateTaskQueue( - "encoder_queue", TaskQueueFactory::Priority::NORMAL); - encoder_queue->PostTask([&] { - rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, - *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs); - }); - time_controller_.AdvanceTime(TimeDelta::Zero()); - EXPECT_EQ(transport_.packets_sent(), 2); -} - TEST_F(RtpSenderVideoWithFrameTransformerTest, TransformableFrameMetadataHasCorrectValue) { auto mock_frame_transformer = @@ -1645,5 +1586,45 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, kDefaultExpectedRetransmissionTimeMs); } +TEST_F(RtpSenderVideoWithFrameTransformerTest, + OnTransformedFrameSendsVideoWhenCloned) { + auto mock_frame_transformer = + rtc::make_ref_counted>(); + rtc::scoped_refptr callback; + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) + .WillOnce(SaveArg<0>(&callback)); + std::unique_ptr rtp_sender_video = + CreateSenderWithFrameTransformer(mock_frame_transformer); + ASSERT_TRUE(callback); + + auto encoded_image = CreateDefaultEncodedImage(); + RTPVideoHeader video_header; + video_header.frame_type = VideoFrameType::kVideoFrameKey; + ON_CALL(*mock_frame_transformer, Transform) + .WillByDefault( + [&callback](std::unique_ptr frame) { + auto clone = CloneVideoFrame( + static_cast(frame.get())); + EXPECT_TRUE(clone); + callback->OnTransformedFrame(std::move(clone)); + }); + auto encoder_queue = time_controller_.GetTaskQueueFactory()->CreateTaskQueue( + "encoder_queue", TaskQueueFactory::Priority::NORMAL); + encoder_queue->PostTask([&] { + rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + *encoded_image, video_header, + kDefaultExpectedRetransmissionTimeMs); + }); + time_controller_.AdvanceTime(TimeDelta::Zero()); + EXPECT_EQ(transport_.packets_sent(), 1); + encoder_queue->PostTask([&] { + rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + *encoded_image, video_header, + kDefaultExpectedRetransmissionTimeMs); + }); + time_controller_.AdvanceTime(TimeDelta::Zero()); + EXPECT_EQ(transport_.packets_sent(), 2); +} + } // namespace } // namespace webrtc diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_header.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_header.cc index bb9413ddd5e2..1da43eec2d1e 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_header.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_header.cc @@ -12,13 +12,33 @@ namespace webrtc { -RTPVideoHeader::RTPVideoHeader() : video_timing() {} -RTPVideoHeader::RTPVideoHeader(const RTPVideoHeader& other) = default; -RTPVideoHeader::~RTPVideoHeader() = default; - RTPVideoHeader::GenericDescriptorInfo::GenericDescriptorInfo() = default; RTPVideoHeader::GenericDescriptorInfo::GenericDescriptorInfo( const GenericDescriptorInfo& other) = default; RTPVideoHeader::GenericDescriptorInfo::~GenericDescriptorInfo() = default; +RTPVideoHeader::RTPVideoHeader() : video_timing() {} +RTPVideoHeader::RTPVideoHeader(const RTPVideoHeader& other) = default; +RTPVideoHeader::~RTPVideoHeader() = default; + +VideoFrameMetadata RTPVideoHeader::GetAsMetadata() const { + VideoFrameMetadata metadata; + metadata.SetFrameType(frame_type); + metadata.SetWidth(width); + metadata.SetHeight(height); + metadata.SetRotation(rotation); + metadata.SetContentType(content_type); + if (generic) { + metadata.SetFrameId(generic->frame_id); + metadata.SetSpatialIndex(generic->spatial_index); + metadata.SetTemporalIndex(generic->temporal_index); + metadata.SetFrameDependencies(generic->dependencies); + metadata.SetDecodeTargetIndications(generic->decode_target_indications); + } + metadata.SetIsLastFrameInPicture(is_last_frame_in_picture); + metadata.SetSimulcastIdx(simulcastIdx); + metadata.SetCodec(codec); + return metadata; +} + } // namespace webrtc diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_header.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_header.h index 115b17d36dc2..c65bde9fab68 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_header.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_header.h @@ -21,6 +21,7 @@ #include "api/video/color_space.h" #include "api/video/video_codec_type.h" #include "api/video/video_content_type.h" +#include "api/video/video_frame_metadata.h" #include "api/video/video_frame_type.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" @@ -62,6 +63,9 @@ struct RTPVideoHeader { ~RTPVideoHeader(); + // The subset of RTPVideoHeader that is exposed in the Insertable Streams API. + VideoFrameMetadata GetAsMetadata() const; + absl::optional generic; VideoFrameType frame_type = VideoFrameType::kEmptyFrame; diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_header_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_header_unittest.cc new file mode 100644 index 000000000000..c8439e1796b6 --- /dev/null +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_header_unittest.cc @@ -0,0 +1,166 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtp_video_header.h" + +#include "api/video/video_frame_metadata.h" +#include "api/video/video_frame_type.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAre; +using ::testing::IsEmpty; + +TEST(RTPVideoHeaderTest, GetAsMetadataGetFrameType) { + RTPVideoHeader video_header; + video_header.frame_type = VideoFrameType::kVideoFrameKey; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetFrameType(), VideoFrameType::kVideoFrameKey); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetWidth) { + RTPVideoHeader video_header; + video_header.width = 1280u; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetWidth(), video_header.width); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetHeight) { + RTPVideoHeader video_header; + video_header.height = 720u; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetHeight(), video_header.height); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetRotation) { + RTPVideoHeader video_header; + video_header.rotation = VideoRotation::kVideoRotation_90; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetRotation(), VideoRotation::kVideoRotation_90); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetContentType) { + RTPVideoHeader video_header; + video_header.content_type = VideoContentType::SCREENSHARE; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetContentType(), VideoContentType::SCREENSHARE); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetFrameId) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.frame_id = 10; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetFrameId().value(), 10); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataHasNoFrameIdForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + ASSERT_FALSE(video_header.generic); + EXPECT_FALSE(metadata.GetFrameId().has_value()); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetSpatialIndex) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.spatial_index = 2; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetSpatialIndex(), 2); +} + +TEST(RTPVideoHeaderTest, + GetAsMetadataSpatialIndexIsZeroForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + ASSERT_FALSE(video_header.generic); + EXPECT_EQ(metadata.GetSpatialIndex(), 0); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetTemporalIndex) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.temporal_index = 3; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetTemporalIndex(), 3); +} + +TEST(RTPVideoHeaderTest, + GetAsMetadataTemporalIndexIsZeroForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + ASSERT_FALSE(video_header.generic); + EXPECT_EQ(metadata.GetTemporalIndex(), 0); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetFrameDependencies) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.dependencies = {5, 6, 7}; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5, 6, 7)); +} + +TEST(RTPVideoHeaderTest, + GetAsMetadataFrameDependencyIsEmptyForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + ASSERT_FALSE(video_header.generic); + EXPECT_THAT(metadata.GetFrameDependencies(), IsEmpty()); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetDecodeTargetIndications) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.decode_target_indications = {DecodeTargetIndication::kSwitch}; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_THAT(metadata.GetDecodeTargetIndications(), + ElementsAre(DecodeTargetIndication::kSwitch)); +} + +TEST(RTPVideoHeaderTest, + GetAsMetadataGetDecodeTargetIndicationsIsEmptyForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + ASSERT_FALSE(video_header.generic); + EXPECT_THAT(metadata.GetDecodeTargetIndications(), IsEmpty()); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetIsLastFrameInPicture) { + RTPVideoHeader video_header; + video_header.is_last_frame_in_picture = false; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_FALSE(metadata.GetIsLastFrameInPicture()); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetSimulcastIdx) { + RTPVideoHeader video_header; + video_header.simulcastIdx = 123; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetSimulcastIdx(), 123); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetCodec) { + RTPVideoHeader video_header; + video_header.codec = VideoCodecType::kVideoCodecVP9; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetCodec(), VideoCodecType::kVideoCodecVP9); +} + +} // namespace +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/device_info_impl.cc b/third_party/libwebrtc/modules/video_capture/device_info_impl.cc index 5313fe90be8e..2a6afb31473d 100644 --- a/third_party/libwebrtc/modules/video_capture/device_info_impl.cc +++ b/third_party/libwebrtc/modules/video_capture/device_info_impl.cc @@ -161,7 +161,8 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability( (capability.videoType == requested.videoType || capability.videoType == VideoType::kI420 || capability.videoType == VideoType::kYUY2 || - capability.videoType == VideoType::kYV12)) { + capability.videoType == VideoType::kYV12 || + capability.videoType == VideoType::kNV12)) { bestVideoType = capability.videoType; bestformatIndex = tmp; } diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc index 39852016d385..7651dd665116 100644 --- a/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc +++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc @@ -391,9 +391,10 @@ int32_t DeviceInfoV4l2::FillCapabilities(int fd) { video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; video_fmt.fmt.pix.sizeimage = 0; - int totalFmts = 4; + int totalFmts = 5; unsigned int videoFormats[] = {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_YUV420, - V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_UYVY}; + V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_UYVY, + V4L2_PIX_FMT_NV12}; int sizes = 13; unsigned int size[][2] = {{128, 96}, {160, 120}, {176, 144}, {320, 240}, @@ -421,6 +422,8 @@ int32_t DeviceInfoV4l2::FillCapabilities(int fd) { cap.videoType = VideoType::kMJPEG; } else if (videoFormats[fmts] == V4L2_PIX_FMT_UYVY) { cap.videoType = VideoType::kUYVY; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_NV12) { + cap.videoType = VideoType::kNV12; } // get fps of current camera mode diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc index d8b9351227a1..c7dcb722bca2 100644 --- a/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc +++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc @@ -130,20 +130,22 @@ int32_t VideoCaptureModuleV4L2::StartCapture( // Supported video formats in preferred order. // If the requested resolution is larger than VGA, we prefer MJPEG. Go for // I420 otherwise. - const int nFormats = 5; + const int nFormats = 6; unsigned int fmts[nFormats]; if (capability.width > 640 || capability.height > 480) { fmts[0] = V4L2_PIX_FMT_MJPEG; fmts[1] = V4L2_PIX_FMT_YUV420; fmts[2] = V4L2_PIX_FMT_YUYV; fmts[3] = V4L2_PIX_FMT_UYVY; - fmts[4] = V4L2_PIX_FMT_JPEG; + fmts[4] = V4L2_PIX_FMT_NV12; + fmts[5] = V4L2_PIX_FMT_JPEG; } else { fmts[0] = V4L2_PIX_FMT_YUV420; fmts[1] = V4L2_PIX_FMT_YUYV; fmts[2] = V4L2_PIX_FMT_UYVY; - fmts[3] = V4L2_PIX_FMT_MJPEG; - fmts[4] = V4L2_PIX_FMT_JPEG; + fmts[3] = V4L2_PIX_FMT_NV12; + fmts[4] = V4L2_PIX_FMT_MJPEG; + fmts[5] = V4L2_PIX_FMT_JPEG; } // Enumerate image formats. @@ -188,6 +190,8 @@ int32_t VideoCaptureModuleV4L2::StartCapture( _captureVideoType = VideoType::kI420; else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY) _captureVideoType = VideoType::kUYVY; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_NV12) + _captureVideoType = VideoType::kNV12; else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG || video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG) _captureVideoType = VideoType::kMJPEG; diff --git a/third_party/libwebrtc/modules/video_coding/BUILD.gn b/third_party/libwebrtc/modules/video_coding/BUILD.gn index c29024628232..3f82a5fa4122 100644 --- a/third_party/libwebrtc/modules/video_coding/BUILD.gn +++ b/third_party/libwebrtc/modules/video_coding/BUILD.gn @@ -337,6 +337,7 @@ rtc_library("video_codec_interface") { ":codec_globals_headers", "../../api/video:video_frame", "../../api/video:video_rtp_headers", + "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", "../../common_video", "../../common_video/generic_frame_descriptor", @@ -522,6 +523,8 @@ rtc_library("webrtc_h264") { deps = [ ":video_codec_interface", ":video_coding_utility", + "../../api/transport/rtp:dependency_descriptor", + "../../api/video:video_codec_constants", "../../api/video:video_frame", "../../api/video:video_frame_i010", "../../api/video:video_rtp_headers", @@ -550,7 +553,6 @@ rtc_library("webrtc_h264") { if (rtc_use_h264) { deps += [ "//third_party/ffmpeg", - "//third_party/openh264:buildflags", "//third_party/openh264:encoder", ] if (!build_with_mozilla) { @@ -637,6 +639,7 @@ rtc_library("webrtc_vp8") { "../../api/video:encoded_image", "../../api/video:video_frame", "../../api/video:video_rtp_headers", + "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", "../../api/video_codecs:vp8_temporal_layers_factory", "../../common_video", @@ -784,6 +787,7 @@ rtc_library("webrtc_vp9") { "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings:strings", + "//third_party/abseil-cpp/absl/types:optional", ] if (rtc_build_libvpx) { deps += [ rtc_libvpx_dir ] @@ -879,6 +883,8 @@ if (rtc_include_tests) { rtc_library("video_codecs_test_framework") { testonly = true sources = [ + "codecs/test/video_codec_analyzer.cc", + "codecs/test/video_codec_analyzer.h", "codecs/test/video_codec_unittest.cc", "codecs/test/video_codec_unittest.h", "codecs/test/videoprocessor.cc", @@ -897,13 +903,17 @@ if (rtc_include_tests) { "../../api:frame_generator_api", "../../api:scoped_refptr", "../../api:sequence_checker", + "../../api:video_codec_tester_api", "../../api:videocodec_test_fixture_api", "../../api/task_queue", + "../../api/task_queue:default_task_queue_factory", "../../api/video:builtin_video_bitrate_allocator_factory", "../../api/video:encoded_image", + "../../api/video:resolution", "../../api/video:video_bitrate_allocation", "../../api/video:video_bitrate_allocator", "../../api/video:video_bitrate_allocator_factory", + "../../api/video:video_codec_constants", "../../api/video:video_frame", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", @@ -912,6 +922,7 @@ if (rtc_include_tests) { "../../rtc_base:checks", "../../rtc_base:macromagic", "../../rtc_base:rtc_event", + "../../rtc_base:task_queue_for_test", "../../rtc_base:timeutils", "../../rtc_base/synchronization:mutex", "../../rtc_base/system:no_unique_address", @@ -960,6 +971,8 @@ if (rtc_include_tests) { rtc_library("videocodec_test_impl") { testonly = true sources = [ + "codecs/test/video_codec_tester_impl.cc", + "codecs/test/video_codec_tester_impl.h", "codecs/test/videocodec_test_fixture_impl.cc", "codecs/test/videocodec_test_fixture_impl.h", ] @@ -971,12 +984,20 @@ if (rtc_include_tests) { ":videocodec_test_stats_impl", ":webrtc_vp9_helpers", "../../api:array_view", + "../../api:video_codec_tester_api", "../../api:videocodec_test_fixture_api", + "../../api/task_queue:default_task_queue_factory", + "../../api/task_queue:task_queue", "../../api/test/metrics:global_metrics_logger_and_exporter", "../../api/test/metrics:metric", "../../api/test/video:function_video_factory", "../../api/transport:field_trial_based_config", + "../../api/units:frequency", + "../../api/units:time_delta", + "../../api/units:timestamp", + "../../api/video:encoded_image", "../../api/video:video_bitrate_allocation", + "../../api/video:video_frame", "../../api/video_codecs:video_codecs_api", "../../api/video_codecs:video_decoder_factory_template", "../../api/video_codecs:video_decoder_factory_template_dav1d_adapter", @@ -995,6 +1016,7 @@ if (rtc_include_tests) { "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:rtc_base_tests_utils", + "../../rtc_base:rtc_event", "../../rtc_base:stringutils", "../../rtc_base:task_queue_for_test", "../../rtc_base:timeutils", @@ -1019,7 +1041,7 @@ if (rtc_include_tests) { "codecs/test/videocodec_test_stats_impl.h", ] deps = [ - "../../api:videocodec_test_fixture_api", + "../../api:videocodec_test_stats_api", "../../api/numerics", "../../rtc_base:checks", "../../rtc_base:rtc_numerics", @@ -1036,6 +1058,7 @@ if (rtc_include_tests) { sources = [ "codecs/h264/test/h264_impl_unittest.cc", "codecs/multiplex/test/multiplex_adapter_unittest.cc", + "codecs/test/video_codec_test.cc", "codecs/test/video_encoder_decoder_instantiation_tests.cc", "codecs/test/videocodec_test_av1.cc", "codecs/test/videocodec_test_libvpx.cc", @@ -1064,18 +1087,27 @@ if (rtc_include_tests) { ":webrtc_vp9", ":webrtc_vp9_helpers", "../../api:create_frame_generator", + "../../api:create_video_codec_tester_api", "../../api:create_videocodec_test_fixture_api", "../../api:frame_generator_api", "../../api:mock_video_codec_factory", "../../api:mock_video_decoder", "../../api:mock_video_encoder", "../../api:scoped_refptr", + "../../api:video_codec_tester_api", "../../api:videocodec_test_fixture_api", + "../../api:videocodec_test_stats_api", "../../api/test/video:function_video_factory", + "../../api/units:data_rate", + "../../api/units:frequency", "../../api/video:encoded_image", + "../../api/video:resolution", "../../api/video:video_frame", "../../api/video:video_rtp_headers", + "../../api/video_codecs:builtin_video_decoder_factory", + "../../api/video_codecs:builtin_video_encoder_factory", "../../api/video_codecs:rtc_software_fallback_wrappers", + "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", "../../common_video", "../../common_video/test:utilities", @@ -1091,11 +1123,14 @@ if (rtc_include_tests) { "../../test:fileutils", "../../test:test_support", "../../test:video_test_common", + "../../test:video_test_support", "../rtp_rtcp:rtp_rtcp_format", "codecs/av1:dav1d_decoder", + "svc:scalability_mode_util", "//third_party/libyuv", ] absl_deps = [ + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/types:optional", ] @@ -1131,6 +1166,8 @@ if (rtc_include_tests) { sources = [ "chain_diff_calculator_unittest.cc", + "codecs/test/video_codec_analyzer_unittest.cc", + "codecs/test/video_codec_tester_impl_unittest.cc", "codecs/test/videocodec_test_fixture_config_unittest.cc", "codecs/test/videocodec_test_stats_impl_unittest.cc", "codecs/test/videoprocessor_unittest.cc", @@ -1214,9 +1251,11 @@ if (rtc_include_tests) { "../../api:rtp_packet_info", "../../api:scoped_refptr", "../../api:simulcast_test_fixture_api", + "../../api:video_codec_tester_api", "../../api:videocodec_test_fixture_api", "../../api/task_queue", "../../api/task_queue:default_task_queue_factory", + "../../api/task_queue/test:mock_task_queue_base", "../../api/test/video:function_video_factory", "../../api/units:data_size", "../../api/units:frequency", @@ -1224,6 +1263,7 @@ if (rtc_include_tests) { "../../api/units:timestamp", "../../api/video:builtin_video_bitrate_allocator_factory", "../../api/video:encoded_frame", + "../../api/video:encoded_image", "../../api/video:render_resolution", "../../api/video:video_adaptation", "../../api/video:video_bitrate_allocation", @@ -1240,6 +1280,7 @@ if (rtc_include_tests) { "../../media:rtc_media_base", "../../rtc_base", "../../rtc_base:checks", + "../../rtc_base:gunit_helpers", "../../rtc_base:histogram_percentile_counter", "../../rtc_base:platform_thread", "../../rtc_base:random", @@ -1266,6 +1307,7 @@ if (rtc_include_tests) { "../../test:video_test_common", "../../test:video_test_support", "../../test/time_controller:time_controller", + "../../third_party/libyuv:libyuv", "../rtp_rtcp:rtp_rtcp_format", "../rtp_rtcp:rtp_video_header", "codecs/av1:video_coding_codecs_av1_tests", diff --git a/third_party/libwebrtc/modules/video_coding/codecs/av1/BUILD.gn b/third_party/libwebrtc/modules/video_coding/codecs/av1/BUILD.gn index 24be86c0baa5..610f958ad117 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/av1/BUILD.gn +++ b/third_party/libwebrtc/modules/video_coding/codecs/av1/BUILD.gn @@ -57,10 +57,12 @@ rtc_library("libaom_av1_encoder") { "../../../../api:scoped_refptr", "../../../../api/video:encoded_image", "../../../../api/video:video_frame", + "../../../../api/video_codecs:scalability_mode", "../../../../api/video_codecs:video_codecs_api", "../../../../common_video", "../../../../rtc_base:checks", "../../../../rtc_base:logging", + "../../../../rtc_base:rtc_numerics", "../../svc:scalability_structures", "../../svc:scalable_video_controller", "//third_party/libaom", diff --git a/third_party/libwebrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc b/third_party/libwebrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc index 807513bc7be1..4d8786c82471 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc @@ -23,6 +23,7 @@ #include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" #include "api/video/video_frame.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "modules/video_coding/include/video_codec_interface.h" @@ -32,6 +33,7 @@ #include "modules/video_coding/svc/scalable_video_controller_no_layering.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/sequence_number_util.h" #include "third_party/libaom/source/libaom/aom/aom_codec.h" #include "third_party/libaom/source/libaom/aom/aom_encoder.h" #include "third_party/libaom/source/libaom/aom/aomcx.h" @@ -108,6 +110,7 @@ class LibaomAv1Encoder final : public VideoEncoder { void MaybeRewrapImgWithFormat(const aom_img_fmt_t fmt); std::unique_ptr svc_controller_; + absl::optional scalability_mode_; bool inited_; bool rates_configured_; absl::optional svc_params_; @@ -117,6 +120,7 @@ class LibaomAv1Encoder final : public VideoEncoder { aom_codec_ctx_t ctx_; aom_codec_enc_cfg_t cfg_; EncodedImageCallback* encoded_image_callback_; + SeqNumUnwrapper rtp_timestamp_unwrapper_; }; int32_t VerifyCodecSettings(const VideoCodec& codec_settings) { @@ -183,16 +187,15 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, RTC_LOG(LS_WARNING) << "Simulcast is not implemented by LibaomAv1Encoder."; return result; } - absl::optional scalability_mode = - encoder_settings_.GetScalabilityMode(); - if (!scalability_mode.has_value()) { + scalability_mode_ = encoder_settings_.GetScalabilityMode(); + if (!scalability_mode_.has_value()) { RTC_LOG(LS_WARNING) << "Scalability mode is not set, using 'L1T1'."; - scalability_mode = ScalabilityMode::kL1T1; + scalability_mode_ = ScalabilityMode::kL1T1; } - svc_controller_ = CreateScalabilityStructure(*scalability_mode); + svc_controller_ = CreateScalabilityStructure(*scalability_mode_); if (svc_controller_ == nullptr) { RTC_LOG(LS_WARNING) << "Failed to set scalability mode " - << static_cast(*scalability_mode); + << static_cast(*scalability_mode_); return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } @@ -636,9 +639,11 @@ int32_t LibaomAv1Encoder::Encode( layer_frame->TemporalId() > 0 ? 1 : 0); } - // Encode a frame. - aom_codec_err_t ret = aom_codec_encode(&ctx_, frame_for_encode_, - frame.timestamp(), duration, flags); + // Encode a frame. The presentation timestamp `pts` should never wrap, hence + // the unwrapping. + aom_codec_err_t ret = aom_codec_encode( + &ctx_, frame_for_encode_, + rtp_timestamp_unwrapper_.Unwrap(frame.timestamp()), duration, flags); if (ret != AOM_CODEC_OK) { RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret << " on aom_codec_encode."; @@ -705,6 +710,7 @@ int32_t LibaomAv1Encoder::Encode( CodecSpecificInfo codec_specific_info; codec_specific_info.codecType = kVideoCodecAV1; codec_specific_info.end_of_picture = end_of_picture; + codec_specific_info.scalability_mode = scalability_mode_; bool is_keyframe = layer_frame->IsKeyframe(); codec_specific_info.generic_frame_info = svc_controller_->OnEncodeDone(*layer_frame); diff --git a/third_party/libwebrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc index 5243edc1e4ae..d194cef35b2d 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc @@ -10,6 +10,7 @@ #include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" +#include #include #include @@ -235,5 +236,29 @@ TEST(LibaomAv1EncoderTest, PopulatesEncodedFrameSize) { codec_settings.height))))); } +TEST(LibaomAv1EncoderTest, RtpTimestampWrap) { + std::unique_ptr encoder = CreateLibaomAv1Encoder(); + VideoCodec codec_settings = DefaultCodecSettings(); + codec_settings.SetScalabilityMode(ScalabilityMode::kL1T1); + ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), + WEBRTC_VIDEO_CODEC_OK); + + VideoEncoder::RateControlParameters rate_parameters; + rate_parameters.framerate_fps = 30; + rate_parameters.bitrate.SetBitrate(/*spatial_index=*/0, 0, 300'000); + encoder->SetRates(rate_parameters); + + std::vector encoded_frames = + EncodedVideoFrameProducer(*encoder) + .SetNumInputFrames(2) + .SetRtpTimestamp(std::numeric_limits::max()) + .Encode(); + ASSERT_THAT(encoded_frames, SizeIs(2)); + EXPECT_THAT(encoded_frames[0].encoded_image._frameType, + Eq(VideoFrameType::kVideoFrameKey)); + EXPECT_THAT(encoded_frames[1].encoded_image._frameType, + Eq(VideoFrameType::kVideoFrameDelta)); +} + } // namespace } // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_coding/codecs/av1/libaom_av1_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/av1/libaom_av1_unittest.cc index dbb62ea6dc87..86e317f94b58 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/av1/libaom_av1_unittest.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/av1/libaom_av1_unittest.cc @@ -44,6 +44,7 @@ using ::testing::Ge; using ::testing::IsEmpty; using ::testing::Not; using ::testing::NotNull; +using ::testing::Optional; using ::testing::Pointwise; using ::testing::SizeIs; using ::testing::Truly; @@ -248,6 +249,8 @@ TEST_P(LibaomAv1SvcTest, EncodeAndDecodeAllDecodeTargets) { requested_ids.push_back(frame_id); decoder.Decode(frame_id, frame.encoded_image); } + EXPECT_THAT(frame.codec_specific_info.scalability_mode, + Optional(param.GetScalabilityMode())); } ASSERT_THAT(requested_ids, SizeIs(Ge(2u))); diff --git a/third_party/libwebrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/third_party/libwebrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc index 5029dc976357..b8055ac85f8f 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc @@ -21,6 +21,9 @@ #include #include "absl/strings/match.h" +#include "absl/types/optional.h" +#include "api/video/video_codec_constants.h" +#include "api/video_codecs/scalability_mode.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" @@ -31,19 +34,10 @@ #include "system_wrappers/include/metrics.h" #include "third_party/libyuv/include/libyuv/convert.h" #include "third_party/libyuv/include/libyuv/scale.h" -// TODO(crbug.com/1218384): Remove after new openh264 is rolled. -#include "third_party/openh264/buildflags.h" -#if BUILDFLAG(OPENH264_API_WELS) #include "third_party/openh264/src/codec/api/wels/codec_api.h" #include "third_party/openh264/src/codec/api/wels/codec_app_def.h" #include "third_party/openh264/src/codec/api/wels/codec_def.h" #include "third_party/openh264/src/codec/api/wels/codec_ver.h" -#else -#include "third_party/openh264/src/codec/api/svc/codec_api.h" -#include "third_party/openh264/src/codec/api/svc/codec_app_def.h" -#include "third_party/openh264/src/codec/api/svc/codec_def.h" -#include "third_party/openh264/src/codec/api/svc/codec_ver.h" -#endif namespace webrtc { @@ -95,6 +89,23 @@ VideoFrameType ConvertToVideoFrameType(EVideoFrameType type) { return VideoFrameType::kEmptyFrame; } +absl::optional ScalabilityModeFromTemporalLayers( + int num_temporal_layers) { + switch (num_temporal_layers) { + case 0: + break; + case 1: + return ScalabilityMode::kL1T1; + case 2: + return ScalabilityMode::kL1T2; + case 3: + return ScalabilityMode::kL1T3; + default: + RTC_DCHECK_NOTREACHED(); + } + return absl::nullopt; +} + } // namespace // Helper method used by H264EncoderImpl::Encode. @@ -208,6 +219,7 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst, encoders_.resize(number_of_streams); pictures_.resize(number_of_streams); svc_controllers_.resize(number_of_streams); + scalability_modes_.resize(number_of_streams); configurations_.resize(number_of_streams); tl0sync_limit_.resize(number_of_streams); @@ -293,25 +305,10 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst, encoded_images_[i].set_size(0); tl0sync_limit_[i] = configurations_[i].num_temporal_layers; - absl::optional scalability_mode; - switch (configurations_[i].num_temporal_layers) { - case 0: - break; - case 1: - scalability_mode = ScalabilityMode::kL1T1; - break; - case 2: - scalability_mode = ScalabilityMode::kL1T2; - break; - case 3: - scalability_mode = ScalabilityMode::kL1T3; - break; - default: - RTC_DCHECK_NOTREACHED(); - } - if (scalability_mode.has_value()) { - svc_controllers_[i] = - CreateScalabilityStructure(scalability_mode.value()); + scalability_modes_[i] = ScalabilityModeFromTemporalLayers( + configurations_[i].num_temporal_layers); + if (scalability_modes_[i].has_value()) { + svc_controllers_[i] = CreateScalabilityStructure(*scalability_modes_[i]); if (svc_controllers_[i] == nullptr) { RTC_LOG(LS_ERROR) << "Failed to create scalability structure"; Release(); @@ -344,6 +341,7 @@ int32_t H264EncoderImpl::Release() { pictures_.clear(); tl0sync_limit_.clear(); svc_controllers_.clear(); + scalability_modes_.clear(); return WEBRTC_VIDEO_CODEC_OK; } @@ -425,26 +423,17 @@ int32_t H264EncoderImpl::Encode( RTC_CHECK(frame_buffer->type() == VideoFrameBuffer::Type::kI420 || frame_buffer->type() == VideoFrameBuffer::Type::kI420A); - bool send_key_frame = false; + bool is_keyframe_needed = false; for (size_t i = 0; i < configurations_.size(); ++i) { if (configurations_[i].key_frame_request && configurations_[i].sending) { - send_key_frame = true; + // This is legacy behavior, generating a keyframe on all layers + // when generating one for a layer that became active for the first time + // or after being disabled. + is_keyframe_needed = true; break; } } - if (!send_key_frame && frame_types) { - for (size_t i = 0; i < configurations_.size(); ++i) { - const size_t simulcast_idx = - static_cast(configurations_[i].simulcast_idx); - if (configurations_[i].sending && simulcast_idx < frame_types->size() && - (*frame_types)[simulcast_idx] == VideoFrameType::kVideoFrameKey) { - send_key_frame = true; - break; - } - } - } - RTC_DCHECK_EQ(configurations_[0].width, frame_buffer->width()); RTC_DCHECK_EQ(configurations_[0].height, frame_buffer->height()); @@ -489,12 +478,20 @@ int32_t H264EncoderImpl::Encode( if (!configurations_[i].sending) { continue; } - if (frame_types != nullptr) { + if (frame_types != nullptr && i < frame_types->size()) { // Skip frame? if ((*frame_types)[i] == VideoFrameType::kEmptyFrame) { continue; } } + // Send a key frame either when this layer is configured to require one + // or we have explicitly been asked to. + const size_t simulcast_idx = + static_cast(configurations_[i].simulcast_idx); + bool send_key_frame = + is_keyframe_needed || + (frame_types && simulcast_idx < frame_types->size() && + (*frame_types)[simulcast_idx] == VideoFrameType::kVideoFrameKey); if (send_key_frame) { // API doc says ForceIntraFrame(false) does nothing, but calling this // function forces a key frame regardless of the `bIDR` argument's value. @@ -578,6 +575,7 @@ int32_t H264EncoderImpl::Encode( codec_specific.template_structure = svc_controllers_[i]->DependencyStructure(); } + codec_specific.scalability_mode = scalability_modes_[i]; } encoded_image_callback_->OnEncodedImage(encoded_images_[i], &codec_specific); diff --git a/third_party/libwebrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h b/third_party/libwebrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h index c1d81915cf44..f02521f0dc8e 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h +++ b/third_party/libwebrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h @@ -24,19 +24,17 @@ #include #include +#include "absl/container/inlined_vector.h" +#include "api/transport/rtp/dependency_descriptor.h" #include "api/video/i420_buffer.h" +#include "api/video/video_codec_constants.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_encoder.h" #include "common_video/h264/h264_bitstream_parser.h" #include "modules/video_coding/codecs/h264/include/h264.h" #include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/utility/quality_scaler.h" -// TODO(crbug.com/1218384): Remove after new openh264 is rolled. -#include "third_party/openh264/buildflags.h" -#if BUILDFLAG(OPENH264_API_WELS) #include "third_party/openh264/src/codec/api/wels/codec_app_def.h" -#else -#include "third_party/openh264/src/codec/api/svc/codec_app_def.h" -#endif class ISVCEncoder; @@ -105,6 +103,8 @@ class H264EncoderImpl : public H264Encoder { std::vector configurations_; std::vector encoded_images_; std::vector> svc_controllers_; + absl::InlinedVector, kMaxSimulcastStreams> + scalability_modes_; VideoCodec codec_; H264PacketizationMode packetization_mode_; diff --git a/third_party/libwebrtc/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc index 12b5da1404a6..2acb629a7608 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc @@ -35,8 +35,12 @@ std::unique_ptr CreateSpecificSimulcastTestFixture() { } // namespace TEST(TestH264Simulcast, TestKeyFrameRequestsOnAllStreams) { + GTEST_SKIP() << "Not applicable to H264."; +} + +TEST(TestH264Simulcast, TestKeyFrameRequestsOnSpecificStreams) { auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestKeyFrameRequestsOnAllStreams(); + fixture->TestKeyFrameRequestsOnSpecificStreams(); } TEST(TestH264Simulcast, TestPaddingAllStreams) { diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/encoded_video_frame_producer.h b/third_party/libwebrtc/modules/video_coding/codecs/test/encoded_video_frame_producer.h index 2216287b9228..04f4a64950a3 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/test/encoded_video_frame_producer.h +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/encoded_video_frame_producer.h @@ -47,6 +47,8 @@ class EncodedVideoFrameProducer { EncodedVideoFrameProducer& SetFramerateFps(int value); + EncodedVideoFrameProducer& SetRtpTimestamp(uint32_t value); + // Generates input video frames and encodes them with `encoder` provided in // the constructor. Returns frame passed to the `OnEncodedImage` by wraping // `EncodedImageCallback` underneath. @@ -88,5 +90,11 @@ inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetFramerateFps( return *this; } +inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetRtpTimestamp( + uint32_t value) { + rtp_timestamp_ = value; + return *this; +} + } // namespace webrtc #endif // MODULES_VIDEO_CODING_CODECS_TEST_ENCODED_VIDEO_FRAME_PRODUCER_H_ diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.cc new file mode 100644 index 000000000000..50af417bcf02 --- /dev/null +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.cc @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/codecs/test/video_codec_analyzer.h" + +#include + +#include "api/task_queue/default_task_queue_factory.h" +#include "api/test/video_codec_tester.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_frame.h" +#include "rtc_base/checks.h" +#include "rtc_base/event.h" +#include "rtc_base/time_utils.h" +#include "third_party/libyuv/include/libyuv/compare.h" + +namespace webrtc { +namespace test { + +namespace { + +struct Psnr { + double y; + double u; + double v; + double yuv; +}; + +Psnr CalcPsnr(const I420BufferInterface& ref_buffer, + const I420BufferInterface& dec_buffer) { + RTC_CHECK_EQ(ref_buffer.width(), dec_buffer.width()); + RTC_CHECK_EQ(ref_buffer.height(), dec_buffer.height()); + + uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane( + dec_buffer.DataY(), dec_buffer.StrideY(), ref_buffer.DataY(), + ref_buffer.StrideY(), dec_buffer.width(), dec_buffer.height()); + + uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane( + dec_buffer.DataU(), dec_buffer.StrideU(), ref_buffer.DataU(), + ref_buffer.StrideU(), dec_buffer.width() / 2, dec_buffer.height() / 2); + + uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane( + dec_buffer.DataV(), dec_buffer.StrideV(), ref_buffer.DataV(), + ref_buffer.StrideV(), dec_buffer.width() / 2, dec_buffer.height() / 2); + + int num_y_samples = dec_buffer.width() * dec_buffer.height(); + Psnr psnr; + psnr.y = libyuv::SumSquareErrorToPsnr(sse_y, num_y_samples); + psnr.u = libyuv::SumSquareErrorToPsnr(sse_u, num_y_samples / 4); + psnr.v = libyuv::SumSquareErrorToPsnr(sse_v, num_y_samples / 4); + psnr.yuv = libyuv::SumSquareErrorToPsnr(sse_y + sse_u + sse_v, + num_y_samples + num_y_samples / 2); + return psnr; +} + +} // namespace + +VideoCodecAnalyzer::VideoCodecAnalyzer( + rtc::TaskQueue& task_queue, + ReferenceVideoSource* reference_video_source) + : task_queue_(task_queue), reference_video_source_(reference_video_source) { + sequence_checker_.Detach(); +} + +void VideoCodecAnalyzer::StartEncode(const VideoFrame& input_frame) { + int64_t encode_started_ns = rtc::TimeNanos(); + task_queue_.PostTask( + [this, timestamp_rtp = input_frame.timestamp(), encode_started_ns]() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoCodecTestStats::FrameStatistics* fs = + stats_.GetOrAddFrame(timestamp_rtp, /*spatial_idx=*/0); + fs->encode_start_ns = encode_started_ns; + }); +} + +void VideoCodecAnalyzer::FinishEncode(const EncodedImage& frame) { + int64_t encode_finished_ns = rtc::TimeNanos(); + + task_queue_.PostTask([this, timestamp_rtp = frame.Timestamp(), + spatial_idx = frame.SpatialIndex().value_or(0), + temporal_idx = frame.TemporalIndex().value_or(0), + frame_type = frame._frameType, qp = frame.qp_, + frame_size_bytes = frame.size(), encode_finished_ns]() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoCodecTestStats::FrameStatistics* fs = + stats_.GetOrAddFrame(timestamp_rtp, spatial_idx); + VideoCodecTestStats::FrameStatistics* fs_base = + stats_.GetOrAddFrame(timestamp_rtp, 0); + + fs->encode_start_ns = fs_base->encode_start_ns; + fs->spatial_idx = spatial_idx; + fs->temporal_idx = temporal_idx; + fs->frame_type = frame_type; + fs->qp = qp; + + fs->encode_time_us = (encode_finished_ns - fs->encode_start_ns) / + rtc::kNumNanosecsPerMicrosec; + fs->length_bytes = frame_size_bytes; + + fs->encoding_successful = true; + }); +} + +void VideoCodecAnalyzer::StartDecode(const EncodedImage& frame) { + int64_t decode_start_ns = rtc::TimeNanos(); + task_queue_.PostTask([this, timestamp_rtp = frame.Timestamp(), + spatial_idx = frame.SpatialIndex().value_or(0), + frame_size_bytes = frame.size(), decode_start_ns]() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoCodecTestStats::FrameStatistics* fs = + stats_.GetOrAddFrame(timestamp_rtp, spatial_idx); + if (fs->length_bytes == 0) { + // In encode-decode test the frame size is set in EncodeFinished. In + // decode-only test set it here. + fs->length_bytes = frame_size_bytes; + } + fs->decode_start_ns = decode_start_ns; + }); +} + +void VideoCodecAnalyzer::FinishDecode(const VideoFrame& frame, + int spatial_idx) { + int64_t decode_finished_ns = rtc::TimeNanos(); + task_queue_.PostTask([this, timestamp_rtp = frame.timestamp(), spatial_idx, + width = frame.width(), height = frame.height(), + decode_finished_ns]() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoCodecTestStats::FrameStatistics* fs = + stats_.GetFrameWithTimestamp(timestamp_rtp, spatial_idx); + fs->decode_time_us = (decode_finished_ns - fs->decode_start_ns) / + rtc::kNumNanosecsPerMicrosec; + fs->decoded_width = width; + fs->decoded_height = height; + fs->decoding_successful = true; + }); + + if (reference_video_source_ != nullptr) { + // Copy hardware-backed frame into main memory to release output buffers + // which number may be limited in hardware decoders. + rtc::scoped_refptr decoded_buffer = + frame.video_frame_buffer()->ToI420(); + + task_queue_.PostTask([this, decoded_buffer, + timestamp_rtp = frame.timestamp(), spatial_idx]() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoFrame ref_frame = reference_video_source_->GetFrame( + timestamp_rtp, {.width = decoded_buffer->width(), + .height = decoded_buffer->height()}); + rtc::scoped_refptr ref_buffer = + ref_frame.video_frame_buffer()->ToI420(); + + Psnr psnr = CalcPsnr(*decoded_buffer, *ref_buffer); + VideoCodecTestStats::FrameStatistics* fs = + this->stats_.GetFrameWithTimestamp(timestamp_rtp, spatial_idx); + fs->psnr_y = static_cast(psnr.y); + fs->psnr_u = static_cast(psnr.u); + fs->psnr_v = static_cast(psnr.v); + fs->psnr = static_cast(psnr.yuv); + + fs->quality_analysis_successful = true; + }); + } +} + +std::unique_ptr VideoCodecAnalyzer::GetStats() { + std::unique_ptr stats; + rtc::Event ready; + task_queue_.PostTask([this, &stats, &ready]() mutable { + RTC_DCHECK_RUN_ON(&sequence_checker_); + stats.reset(new VideoCodecTestStatsImpl(stats_)); + ready.Set(); + }); + ready.Wait(rtc::Event::kForever); + return stats; +} + +} // namespace test +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.h b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.h new file mode 100644 index 000000000000..63a864e8102e --- /dev/null +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.h @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_ +#define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_ + +#include + +#include "absl/types/optional.h" +#include "api/sequence_checker.h" +#include "api/video/encoded_image.h" +#include "api/video/resolution.h" +#include "api/video/video_frame.h" +#include "modules/video_coding/codecs/test/videocodec_test_stats_impl.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/no_unique_address.h" +#include "rtc_base/task_queue_for_test.h" + +namespace webrtc { +namespace test { + +// Analyzer measures and collects metrics necessary for evaluation of video +// codec quality and performance. This class is thread-safe. +class VideoCodecAnalyzer { + public: + // An interface that provides reference frames for spatial quality analysis. + class ReferenceVideoSource { + public: + virtual ~ReferenceVideoSource() = default; + + virtual VideoFrame GetFrame(uint32_t timestamp_rtp, + Resolution resolution) = 0; + }; + + VideoCodecAnalyzer(rtc::TaskQueue& task_queue, + ReferenceVideoSource* reference_video_source = nullptr); + + void StartEncode(const VideoFrame& frame); + + void FinishEncode(const EncodedImage& frame); + + void StartDecode(const EncodedImage& frame); + + void FinishDecode(const VideoFrame& frame, int spatial_idx); + + std::unique_ptr GetStats(); + + protected: + rtc::TaskQueue& task_queue_; + ReferenceVideoSource* const reference_video_source_; + VideoCodecTestStatsImpl stats_ RTC_GUARDED_BY(sequence_checker_); + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; +}; + +} // namespace test +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_ diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc new file mode 100644 index 000000000000..3f9de6dac2fb --- /dev/null +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc @@ -0,0 +1,141 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/codecs/test/video_codec_analyzer.h" + +#include "absl/types/optional.h" +#include "api/video/i420_buffer.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "third_party/libyuv/include/libyuv/planar_functions.h" + +namespace webrtc { +namespace test { + +namespace { +using ::testing::Return; +using ::testing::Values; + +const size_t kTimestamp = 3000; +const size_t kSpatialIdx = 2; + +class MockReferenceVideoSource + : public VideoCodecAnalyzer::ReferenceVideoSource { + public: + MOCK_METHOD(VideoFrame, GetFrame, (uint32_t, Resolution), (override)); +}; + +VideoFrame CreateVideoFrame(uint32_t timestamp_rtp, + uint8_t y = 0, + uint8_t u = 0, + uint8_t v = 0) { + rtc::scoped_refptr buffer(I420Buffer::Create(2, 2)); + + libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(), + buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), 0, 0, + buffer->width(), buffer->height(), y, u, v); + + return VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(timestamp_rtp) + .build(); +} + +EncodedImage CreateEncodedImage(uint32_t timestamp_rtp, int spatial_idx = 0) { + EncodedImage encoded_image; + encoded_image.SetTimestamp(timestamp_rtp); + encoded_image.SetSpatialIndex(spatial_idx); + return encoded_image; +} +} // namespace + +TEST(VideoCodecAnalyzerTest, EncodeStartedCreatesFrameStats) { + TaskQueueForTest task_queue; + VideoCodecAnalyzer analyzer(task_queue); + analyzer.StartEncode(CreateVideoFrame(kTimestamp)); + + auto fs = analyzer.GetStats()->GetFrameStatistics(); + EXPECT_EQ(1u, fs.size()); + EXPECT_EQ(fs[0].rtp_timestamp, kTimestamp); +} + +TEST(VideoCodecAnalyzerTest, EncodeFinishedUpdatesFrameStats) { + TaskQueueForTest task_queue; + VideoCodecAnalyzer analyzer(task_queue); + analyzer.StartEncode(CreateVideoFrame(kTimestamp)); + + EncodedImage encoded_frame = CreateEncodedImage(kTimestamp, kSpatialIdx); + analyzer.FinishEncode(encoded_frame); + + auto fs = analyzer.GetStats()->GetFrameStatistics(); + EXPECT_EQ(2u, fs.size()); + EXPECT_TRUE(fs[1].encoding_successful); +} + +TEST(VideoCodecAnalyzerTest, DecodeStartedNoFrameStatsCreatesFrameStats) { + TaskQueueForTest task_queue; + VideoCodecAnalyzer analyzer(task_queue); + analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx)); + + auto fs = analyzer.GetStats()->GetFrameStatistics(); + EXPECT_EQ(1u, fs.size()); + EXPECT_EQ(fs[0].rtp_timestamp, kTimestamp); +} + +TEST(VideoCodecAnalyzerTest, DecodeStartedFrameStatsExistsReusesFrameStats) { + TaskQueueForTest task_queue; + VideoCodecAnalyzer analyzer(task_queue); + analyzer.StartEncode(CreateVideoFrame(kTimestamp)); + analyzer.StartDecode(CreateEncodedImage(kTimestamp, /*spatial_idx=*/0)); + + auto fs = analyzer.GetStats()->GetFrameStatistics(); + EXPECT_EQ(1u, fs.size()); +} + +TEST(VideoCodecAnalyzerTest, DecodeFinishedUpdatesFrameStats) { + TaskQueueForTest task_queue; + VideoCodecAnalyzer analyzer(task_queue); + analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx)); + VideoFrame decoded_frame = CreateVideoFrame(kTimestamp); + analyzer.FinishDecode(decoded_frame, kSpatialIdx); + + auto fs = analyzer.GetStats()->GetFrameStatistics(); + EXPECT_EQ(1u, fs.size()); + + EXPECT_TRUE(fs[0].decoding_successful); + EXPECT_EQ(static_cast(fs[0].decoded_width), decoded_frame.width()); + EXPECT_EQ(static_cast(fs[0].decoded_height), decoded_frame.height()); +} + +TEST(VideoCodecAnalyzerTest, DecodeFinishedComputesPsnr) { + TaskQueueForTest task_queue; + MockReferenceVideoSource reference_video_source; + VideoCodecAnalyzer analyzer(task_queue, &reference_video_source); + analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx)); + + EXPECT_CALL(reference_video_source, GetFrame) + .WillOnce(Return(CreateVideoFrame(kTimestamp, /*y=*/0, + /*u=*/0, /*v=*/0))); + + analyzer.FinishDecode( + CreateVideoFrame(kTimestamp, /*value_y=*/1, /*value_u=*/2, /*value_v=*/3), + kSpatialIdx); + + auto fs = analyzer.GetStats()->GetFrameStatistics(); + EXPECT_EQ(1u, fs.size()); + + EXPECT_NEAR(fs[0].psnr_y, 48, 1); + EXPECT_NEAR(fs[0].psnr_u, 42, 1); + EXPECT_NEAR(fs[0].psnr_v, 38, 1); +} + +} // namespace test +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc new file mode 100644 index 000000000000..bd4c8e07f293 --- /dev/null +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc @@ -0,0 +1,456 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/video_codec.h" + +#include +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/test/create_video_codec_tester.h" +#include "api/test/videocodec_test_stats.h" +#include "api/units/data_rate.h" +#include "api/units/frequency.h" +#include "api/video/i420_buffer.h" +#include "api/video/resolution.h" +#include "api/video_codecs/builtin_video_decoder_factory.h" +#include "api/video_codecs/builtin_video_encoder_factory.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_encoder.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "media/base/media_constants.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/svc/scalability_mode_util.h" +#include "rtc_base/strings/string_builder.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" +#include "test/testsupport/frame_reader.h" + +namespace webrtc { +namespace test { + +namespace { +using ::testing::Combine; +using ::testing::Values; +using Layer = std::pair; + +struct VideoInfo { + std::string name; + Resolution resolution; +}; + +struct CodecInfo { + std::string type; + std::string encoder; + std::string decoder; +}; + +struct EncodingSettings { + ScalabilityMode scalability_mode; + // Spatial layer resolution. + std::map resolution; + // Top temporal layer frame rate. + Frequency framerate; + // Bitrate of spatial and temporal layers. + std::map bitrate; +}; + +struct EncodingTestSettings { + std::string name; + int num_frames = 1; + std::map frame_settings; +}; + +struct DecodingTestSettings { + std::string name; +}; + +struct QualityExpectations { + double min_apsnr_y; +}; + +struct EncodeDecodeTestParams { + CodecInfo codec; + VideoInfo video; + VideoCodecTester::EncoderSettings encoder_settings; + VideoCodecTester::DecoderSettings decoder_settings; + EncodingTestSettings encoding_settings; + DecodingTestSettings decoding_settings; + QualityExpectations quality_expectations; +}; + +const EncodingSettings kQvga64Kbps30Fps = { + .scalability_mode = ScalabilityMode::kL1T1, + .resolution = {{0, {.width = 320, .height = 180}}}, + .framerate = Frequency::Hertz(30), + .bitrate = {{Layer(0, 0), DataRate::KilobitsPerSec(64)}}}; + +const EncodingTestSettings kConstantRateQvga64Kbps30Fps = { + .name = "ConstantRateQvga64Kbps30Fps", + .num_frames = 300, + .frame_settings = {{/*frame_num=*/0, kQvga64Kbps30Fps}}}; + +const QualityExpectations kLowQuality = {.min_apsnr_y = 30}; + +const VideoInfo kFourPeople_1280x720_30 = { + .name = "FourPeople_1280x720_30", + .resolution = {.width = 1280, .height = 720}}; + +const CodecInfo kLibvpxVp8 = {.type = "VP8", + .encoder = "libvpx", + .decoder = "libvpx"}; + +const CodecInfo kLibvpxVp9 = {.type = "VP9", + .encoder = "libvpx", + .decoder = "libvpx"}; + +const CodecInfo kOpenH264 = {.type = "H264", + .encoder = "openh264", + .decoder = "ffmpeg"}; + +class TestRawVideoSource : public VideoCodecTester::RawVideoSource { + public: + static constexpr Frequency k90kHz = Frequency::Hertz(90000); + + TestRawVideoSource(std::unique_ptr frame_reader, + const EncodingTestSettings& test_settings) + : frame_reader_(std::move(frame_reader)), + test_settings_(test_settings), + frame_num_(0), + timestamp_rtp_(0) { + // Ensure settings for the first frame are provided. + RTC_CHECK_GT(test_settings_.frame_settings.size(), 0u); + RTC_CHECK_EQ(test_settings_.frame_settings.begin()->first, 0); + } + + // Pulls next frame. Frame RTP timestamp is set accordingly to + // `EncodingSettings::framerate`. + absl::optional PullFrame() override { + if (frame_num_ >= test_settings_.num_frames) { + // End of stream. + return absl::nullopt; + } + + EncodingSettings frame_settings = + std::prev(test_settings_.frame_settings.upper_bound(frame_num_)) + ->second; + + int pulled_frame; + auto buffer = frame_reader_->PullFrame( + &pulled_frame, frame_settings.resolution.rbegin()->second, + {.num = 30, .den = static_cast(frame_settings.framerate.hertz())}); + RTC_CHECK(buffer) << "Cannot pull frame " << frame_num_; + + auto frame = VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(timestamp_rtp_) + .build(); + + pulled_frames_[timestamp_rtp_] = pulled_frame; + timestamp_rtp_ += k90kHz / frame_settings.framerate; + ++frame_num_; + + return frame; + } + + // Reads frame specified by `timestamp_rtp`, scales it to `resolution` and + // returns. Frame with the given `timestamp_rtp` is expected to be pulled + // before. + VideoFrame GetFrame(uint32_t timestamp_rtp, Resolution resolution) override { + RTC_CHECK(pulled_frames_.find(timestamp_rtp) != pulled_frames_.end()) + << "Frame with RTP timestamp " << timestamp_rtp + << " was not pulled before"; + auto buffer = + frame_reader_->ReadFrame(pulled_frames_[timestamp_rtp], resolution); + return VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(timestamp_rtp) + .build(); + } + + protected: + std::unique_ptr frame_reader_; + const EncodingTestSettings& test_settings_; + int frame_num_; + uint32_t timestamp_rtp_; + std::map pulled_frames_; +}; + +class TestEncoder : public VideoCodecTester::Encoder, + public EncodedImageCallback { + public: + TestEncoder(std::unique_ptr encoder, + const CodecInfo& codec_info, + const std::map& frame_settings) + : encoder_(std::move(encoder)), + codec_info_(codec_info), + frame_settings_(frame_settings), + frame_num_(0) { + // Ensure settings for the first frame is provided. + RTC_CHECK_GT(frame_settings_.size(), 0u); + RTC_CHECK_EQ(frame_settings_.begin()->first, 0); + + encoder_->RegisterEncodeCompleteCallback(this); + } + + void Encode(const VideoFrame& frame, EncodeCallback callback) override { + callbacks_[frame.timestamp()] = std::move(callback); + + if (auto fs = frame_settings_.find(frame_num_); + fs != frame_settings_.end()) { + if (fs == frame_settings_.begin() || + ConfigChanged(fs->second, std::prev(fs)->second)) { + Configure(fs->second); + } + if (fs == frame_settings_.begin() || + RateChanged(fs->second, std::prev(fs)->second)) { + SetRates(fs->second); + } + } + + int result = encoder_->Encode(frame, nullptr); + RTC_CHECK_EQ(result, WEBRTC_VIDEO_CODEC_OK); + ++frame_num_; + } + + protected: + Result OnEncodedImage(const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info) override { + auto cb = callbacks_.find(encoded_image.Timestamp()); + RTC_CHECK(cb != callbacks_.end()); + cb->second(encoded_image); + + callbacks_.erase(callbacks_.begin(), cb); + return Result(Result::Error::OK); + } + + void Configure(const EncodingSettings& es) { + VideoCodec vc; + const Resolution& resolution = es.resolution.rbegin()->second; + vc.width = resolution.width; + vc.height = resolution.height; + const DataRate& bitrate = es.bitrate.rbegin()->second; + vc.startBitrate = bitrate.kbps(); + vc.maxBitrate = bitrate.kbps(); + vc.minBitrate = 0; + vc.maxFramerate = static_cast(es.framerate.hertz()); + vc.active = true; + vc.qpMax = 0; + vc.numberOfSimulcastStreams = 0; + vc.mode = webrtc::VideoCodecMode::kRealtimeVideo; + vc.SetFrameDropEnabled(true); + + vc.codecType = PayloadStringToCodecType(codec_info_.type); + if (vc.codecType == kVideoCodecVP8) { + *(vc.VP8()) = VideoEncoder::GetDefaultVp8Settings(); + } else if (vc.codecType == kVideoCodecVP9) { + *(vc.VP9()) = VideoEncoder::GetDefaultVp9Settings(); + } else if (vc.codecType == kVideoCodecH264) { + *(vc.H264()) = VideoEncoder::GetDefaultH264Settings(); + } + + VideoEncoder::Settings ves( + VideoEncoder::Capabilities(/*loss_notification=*/false), + /*number_of_cores=*/1, + /*max_payload_size=*/1440); + + int result = encoder_->InitEncode(&vc, ves); + RTC_CHECK_EQ(result, WEBRTC_VIDEO_CODEC_OK); + } + + void SetRates(const EncodingSettings& es) { + VideoEncoder::RateControlParameters rc; + int num_spatial_layers = + ScalabilityModeToNumSpatialLayers(es.scalability_mode); + int num_temporal_layers = + ScalabilityModeToNumSpatialLayers(es.scalability_mode); + for (int sidx = 0; sidx < num_spatial_layers; ++sidx) { + for (int tidx = 0; tidx < num_temporal_layers; ++tidx) { + RTC_CHECK(es.bitrate.find(Layer(sidx, tidx)) != es.bitrate.end()) + << "Bitrate for layer S=" << sidx << " T=" << tidx << " is not set"; + rc.bitrate.SetBitrate(sidx, tidx, + es.bitrate.at(Layer(sidx, tidx)).bps()); + } + } + + rc.framerate_fps = es.framerate.millihertz() / 1000.0; + encoder_->SetRates(rc); + } + + bool ConfigChanged(const EncodingSettings& es, + const EncodingSettings& prev_es) const { + return es.scalability_mode != prev_es.scalability_mode || + es.resolution != prev_es.resolution; + } + + bool RateChanged(const EncodingSettings& es, + const EncodingSettings& prev_es) const { + return es.bitrate != prev_es.bitrate || es.framerate != prev_es.framerate; + } + + std::unique_ptr encoder_; + const CodecInfo& codec_info_; + const std::map& frame_settings_; + int frame_num_; + std::map callbacks_; +}; + +class TestDecoder : public VideoCodecTester::Decoder, + public DecodedImageCallback { + public: + TestDecoder(std::unique_ptr decoder, + const CodecInfo& codec_info) + : decoder_(std::move(decoder)), codec_info_(codec_info), frame_num_(0) { + decoder_->RegisterDecodeCompleteCallback(this); + } + void Decode(const EncodedImage& frame, DecodeCallback callback) override { + callbacks_[frame.Timestamp()] = std::move(callback); + + if (frame_num_ == 0) { + Configure(); + } + + decoder_->Decode(frame, /*missing_frames=*/false, + /*render_time_ms=*/0); + ++frame_num_; + } + + void Configure() { + VideoDecoder::Settings ds; + ds.set_codec_type(PayloadStringToCodecType(codec_info_.type)); + ds.set_number_of_cores(1); + + bool result = decoder_->Configure(ds); + RTC_CHECK(result); + } + + protected: + int Decoded(VideoFrame& decoded_frame) override { + auto cb = callbacks_.find(decoded_frame.timestamp()); + RTC_CHECK(cb != callbacks_.end()); + cb->second(decoded_frame); + + callbacks_.erase(callbacks_.begin(), cb); + return WEBRTC_VIDEO_CODEC_OK; + } + + std::unique_ptr decoder_; + const CodecInfo& codec_info_; + int frame_num_; + std::map callbacks_; +}; + +std::unique_ptr CreateEncoder( + const CodecInfo& codec_info, + const std::map& frame_settings) { + auto factory = CreateBuiltinVideoEncoderFactory(); + auto encoder = factory->CreateVideoEncoder(SdpVideoFormat(codec_info.type)); + return std::make_unique(std::move(encoder), codec_info, + frame_settings); +} + +std::unique_ptr CreateDecoder( + const CodecInfo& codec_info) { + auto factory = CreateBuiltinVideoDecoderFactory(); + auto decoder = factory->CreateVideoDecoder(SdpVideoFormat(codec_info.type)); + return std::make_unique(std::move(decoder), codec_info); +} + +} // namespace + +class EncodeDecodeTest + : public ::testing::TestWithParam { + public: + EncodeDecodeTest() : test_params_(GetParam()) {} + + void SetUp() override { + std::unique_ptr frame_reader = + CreateYuvFrameReader(ResourcePath(test_params_.video.name, "yuv"), + test_params_.video.resolution, + YuvFrameReaderImpl::RepeatMode::kPingPong); + video_source_ = std::make_unique( + std::move(frame_reader), test_params_.encoding_settings); + + encoder_ = CreateEncoder(test_params_.codec, + test_params_.encoding_settings.frame_settings); + decoder_ = CreateDecoder(test_params_.codec); + + tester_ = CreateVideoCodecTester(); + } + + static std::string TestParametersToStr( + const ::testing::TestParamInfo& info) { + return std::string(info.param.encoding_settings.name + + info.param.codec.type + info.param.codec.encoder + + info.param.codec.decoder); + } + + protected: + EncodeDecodeTestParams test_params_; + std::unique_ptr video_source_; + std::unique_ptr encoder_; + std::unique_ptr decoder_; + std::unique_ptr tester_; +}; + +TEST_P(EncodeDecodeTest, DISABLED_TestEncodeDecode) { + std::unique_ptr stats = tester_->RunEncodeDecodeTest( + std::move(video_source_), std::move(encoder_), std::move(decoder_), + test_params_.encoder_settings, test_params_.decoder_settings); + + const auto& frame_settings = test_params_.encoding_settings.frame_settings; + for (auto fs = frame_settings.begin(); fs != frame_settings.end(); ++fs) { + int first_frame = fs->first; + int last_frame = std::next(fs) != frame_settings.end() + ? std::next(fs)->first - 1 + : test_params_.encoding_settings.num_frames - 1; + + const EncodingSettings& encoding_settings = fs->second; + auto metrics = stats->CalcVideoStatistic( + first_frame, last_frame, encoding_settings.bitrate.rbegin()->second, + encoding_settings.framerate); + + EXPECT_GE(metrics.avg_psnr_y, + test_params_.quality_expectations.min_apsnr_y); + } +} + +std::list ConstantRateTestParameters() { + std::list test_params; + std::vector codecs = {kLibvpxVp8}; + std::vector videos = {kFourPeople_1280x720_30}; + std::vector> + encoding_settings = {{kConstantRateQvga64Kbps30Fps, kLowQuality}}; + for (const CodecInfo& codec : codecs) { + for (const VideoInfo& video : videos) { + for (const auto& es : encoding_settings) { + EncodeDecodeTestParams p; + p.codec = codec; + p.video = video; + p.encoding_settings = es.first; + p.quality_expectations = es.second; + test_params.push_back(p); + } + } + } + return test_params; +} + +INSTANTIATE_TEST_SUITE_P(ConstantRate, + EncodeDecodeTest, + ::testing::ValuesIn(ConstantRateTestParameters()), + EncodeDecodeTest::TestParametersToStr); +} // namespace test + +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.cc new file mode 100644 index 000000000000..3000c1adeed3 --- /dev/null +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.cc @@ -0,0 +1,325 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/codecs/test/video_codec_tester_impl.h" + +#include +#include +#include + +#include "api/task_queue/default_task_queue_factory.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "modules/video_coding/codecs/test/video_codec_analyzer.h" +#include "rtc_base/event.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/sleep.h" + +namespace webrtc { +namespace test { + +namespace { +using RawVideoSource = VideoCodecTester::RawVideoSource; +using CodedVideoSource = VideoCodecTester::CodedVideoSource; +using Decoder = VideoCodecTester::Decoder; +using Encoder = VideoCodecTester::Encoder; +using EncoderSettings = VideoCodecTester::EncoderSettings; +using DecoderSettings = VideoCodecTester::DecoderSettings; +using PacingSettings = VideoCodecTester::PacingSettings; +using PacingMode = PacingSettings::PacingMode; + +constexpr Frequency k90kHz = Frequency::Hertz(90000); + +// A thread-safe wrapper for video source to be shared with the quality analyzer +// that reads reference frames from a separate thread. +class SyncRawVideoSource : public VideoCodecAnalyzer::ReferenceVideoSource { + public: + explicit SyncRawVideoSource(std::unique_ptr video_source) + : video_source_(std::move(video_source)) {} + + absl::optional PullFrame() { + MutexLock lock(&mutex_); + return video_source_->PullFrame(); + } + + VideoFrame GetFrame(uint32_t timestamp_rtp, Resolution resolution) override { + MutexLock lock(&mutex_); + return video_source_->GetFrame(timestamp_rtp, resolution); + } + + protected: + std::unique_ptr video_source_ RTC_GUARDED_BY(mutex_); + Mutex mutex_; +}; + +// Pacer calculates delay necessary to keep frame encode or decode call spaced +// from the previous calls by the pacing time. `Delay` is expected to be called +// as close as possible to posting frame encode or decode task. This class is +// not thread safe. +class Pacer { + public: + explicit Pacer(PacingSettings settings) + : settings_(settings), delay_(TimeDelta::Zero()) {} + TimeDelta Delay(Timestamp beat) { + if (settings_.mode == PacingMode::kNoPacing) { + return TimeDelta::Zero(); + } + + Timestamp now = Timestamp::Micros(rtc::TimeMicros()); + if (prev_time_.has_value()) { + delay_ += PacingTime(beat); + delay_ -= (now - *prev_time_); + if (delay_.ns() < 0) { + delay_ = TimeDelta::Zero(); + } + } + + prev_beat_ = beat; + prev_time_ = now; + return delay_; + } + + private: + TimeDelta PacingTime(Timestamp beat) { + if (settings_.mode == PacingMode::kRealTime) { + return beat - *prev_beat_; + } + RTC_CHECK_EQ(PacingMode::kConstantRate, settings_.mode); + return 1 / settings_.constant_rate; + } + + PacingSettings settings_; + absl::optional prev_beat_; + absl::optional prev_time_; + TimeDelta delay_; +}; + +// Task queue that keeps the number of queued tasks below a certain limit. If +// the limit is reached, posting of a next task is blocked until execution of a +// previously posted task starts. This class is not thread-safe. +class LimitedTaskQueue { + public: + // The codec tester reads frames from video source in the main thread. + // Encoding and decoding are done in separate threads. If encoding or + // decoding is slow, the reading may go far ahead and may buffer too many + // frames in memory. To prevent this we limit the encoding/decoding queue + // size. When the queue is full, the main thread and, hence, reading frames + // from video source is blocked until a previously posted encoding/decoding + // task starts. + static constexpr int kMaxTaskQueueSize = 3; + + explicit LimitedTaskQueue(rtc::TaskQueue& task_queue) + : task_queue_(task_queue), queue_size_(0) {} + + void PostDelayedTask(absl::AnyInvocable task, TimeDelta delay) { + ++queue_size_; + task_queue_.PostDelayedTask( + [this, task = std::move(task)]() mutable { + std::move(task)(); + --queue_size_; + task_executed_.Set(); + }, + delay); + + task_executed_.Reset(); + if (queue_size_ > kMaxTaskQueueSize) { + task_executed_.Wait(rtc::Event::kForever); + } + RTC_CHECK(queue_size_ <= kMaxTaskQueueSize); + } + + void WaitForPreviouslyPostedTasks() { + while (queue_size_ > 0) { + task_executed_.Wait(rtc::Event::kForever); + task_executed_.Reset(); + } + } + + rtc::TaskQueue& task_queue_; + std::atomic_int queue_size_; + rtc::Event task_executed_; +}; + +class TesterDecoder { + public: + TesterDecoder(std::unique_ptr decoder, + VideoCodecAnalyzer* analyzer, + const DecoderSettings& settings, + rtc::TaskQueue& task_queue) + : decoder_(std::move(decoder)), + analyzer_(analyzer), + settings_(settings), + pacer_(settings.pacing), + task_queue_(task_queue) { + RTC_CHECK(analyzer_) << "Analyzer must be provided"; + } + + void Decode(const EncodedImage& frame) { + Timestamp timestamp = Timestamp::Micros((frame.Timestamp() / k90kHz).us()); + + task_queue_.PostDelayedTask( + [this, frame] { + analyzer_->StartDecode(frame); + decoder_->Decode(frame, [this](const VideoFrame& decoded_frame) { + this->analyzer_->FinishDecode(decoded_frame, /*spatial_idx=*/0); + }); + }, + pacer_.Delay(timestamp)); + } + + void Flush() { task_queue_.WaitForPreviouslyPostedTasks(); } + + protected: + std::unique_ptr decoder_; + VideoCodecAnalyzer* const analyzer_; + const DecoderSettings& settings_; + Pacer pacer_; + LimitedTaskQueue task_queue_; +}; + +class TesterEncoder { + public: + TesterEncoder(std::unique_ptr encoder, + TesterDecoder* decoder, + VideoCodecAnalyzer* analyzer, + const EncoderSettings& settings, + rtc::TaskQueue& task_queue) + : encoder_(std::move(encoder)), + decoder_(decoder), + analyzer_(analyzer), + settings_(settings), + pacer_(settings.pacing), + task_queue_(task_queue) { + RTC_CHECK(analyzer_) << "Analyzer must be provided"; + } + + void Encode(const VideoFrame& frame) { + Timestamp timestamp = Timestamp::Micros((frame.timestamp() / k90kHz).us()); + + task_queue_.PostDelayedTask( + [this, frame] { + analyzer_->StartEncode(frame); + encoder_->Encode(frame, [this](const EncodedImage& encoded_frame) { + this->analyzer_->FinishEncode(encoded_frame); + if (decoder_ != nullptr) { + this->decoder_->Decode(encoded_frame); + } + }); + }, + pacer_.Delay(timestamp)); + } + + void Flush() { task_queue_.WaitForPreviouslyPostedTasks(); } + + protected: + std::unique_ptr encoder_; + TesterDecoder* const decoder_; + VideoCodecAnalyzer* const analyzer_; + const EncoderSettings& settings_; + Pacer pacer_; + LimitedTaskQueue task_queue_; +}; + +} // namespace + +VideoCodecTesterImpl::VideoCodecTesterImpl() + : VideoCodecTesterImpl(/*task_queue_factory=*/nullptr) {} + +VideoCodecTesterImpl::VideoCodecTesterImpl(TaskQueueFactory* task_queue_factory) + : task_queue_factory_(task_queue_factory) { + if (task_queue_factory_ == nullptr) { + owned_task_queue_factory_ = CreateDefaultTaskQueueFactory(); + task_queue_factory_ = owned_task_queue_factory_.get(); + } +} + +std::unique_ptr VideoCodecTesterImpl::RunDecodeTest( + std::unique_ptr video_source, + std::unique_ptr decoder, + const DecoderSettings& decoder_settings) { + rtc::TaskQueue analyser_task_queue(task_queue_factory_->CreateTaskQueue( + "Analyzer", TaskQueueFactory::Priority::NORMAL)); + rtc::TaskQueue decoder_task_queue(task_queue_factory_->CreateTaskQueue( + "Decoder", TaskQueueFactory::Priority::NORMAL)); + + VideoCodecAnalyzer perf_analyzer(analyser_task_queue); + TesterDecoder tester_decoder(std::move(decoder), &perf_analyzer, + decoder_settings, decoder_task_queue); + + while (auto frame = video_source->PullFrame()) { + tester_decoder.Decode(*frame); + } + + tester_decoder.Flush(); + + return perf_analyzer.GetStats(); +} + +std::unique_ptr VideoCodecTesterImpl::RunEncodeTest( + std::unique_ptr video_source, + std::unique_ptr encoder, + const EncoderSettings& encoder_settings) { + rtc::TaskQueue analyser_task_queue(task_queue_factory_->CreateTaskQueue( + "Analyzer", TaskQueueFactory::Priority::NORMAL)); + rtc::TaskQueue encoder_task_queue(task_queue_factory_->CreateTaskQueue( + "Encoder", TaskQueueFactory::Priority::NORMAL)); + + SyncRawVideoSource sync_source(std::move(video_source)); + VideoCodecAnalyzer perf_analyzer(analyser_task_queue); + TesterEncoder tester_encoder(std::move(encoder), /*decoder=*/nullptr, + &perf_analyzer, encoder_settings, + encoder_task_queue); + + while (auto frame = sync_source.PullFrame()) { + tester_encoder.Encode(*frame); + } + + tester_encoder.Flush(); + + return perf_analyzer.GetStats(); +} + +std::unique_ptr VideoCodecTesterImpl::RunEncodeDecodeTest( + std::unique_ptr video_source, + std::unique_ptr encoder, + std::unique_ptr decoder, + const EncoderSettings& encoder_settings, + const DecoderSettings& decoder_settings) { + rtc::TaskQueue analyser_task_queue(task_queue_factory_->CreateTaskQueue( + "Analyzer", TaskQueueFactory::Priority::NORMAL)); + rtc::TaskQueue decoder_task_queue(task_queue_factory_->CreateTaskQueue( + "Decoder", TaskQueueFactory::Priority::NORMAL)); + rtc::TaskQueue encoder_task_queue(task_queue_factory_->CreateTaskQueue( + "Encoder", TaskQueueFactory::Priority::NORMAL)); + + SyncRawVideoSource sync_source(std::move(video_source)); + VideoCodecAnalyzer perf_analyzer(analyser_task_queue, &sync_source); + TesterDecoder tester_decoder(std::move(decoder), &perf_analyzer, + decoder_settings, decoder_task_queue); + TesterEncoder tester_encoder(std::move(encoder), &tester_decoder, + &perf_analyzer, encoder_settings, + encoder_task_queue); + + while (auto frame = sync_source.PullFrame()) { + tester_encoder.Encode(*frame); + } + + tester_encoder.Flush(); + tester_decoder.Flush(); + + return perf_analyzer.GetStats(); +} + +} // namespace test +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.h b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.h new file mode 100644 index 000000000000..b64adeb882c4 --- /dev/null +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_ +#define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_ + +#include + +#include "api/task_queue/task_queue_factory.h" +#include "api/test/video_codec_tester.h" + +namespace webrtc { +namespace test { + +// A stateless implementation of `VideoCodecTester`. This class is thread safe. +class VideoCodecTesterImpl : public VideoCodecTester { + public: + VideoCodecTesterImpl(); + explicit VideoCodecTesterImpl(TaskQueueFactory* task_queue_factory); + + std::unique_ptr RunDecodeTest( + std::unique_ptr video_source, + std::unique_ptr decoder, + const DecoderSettings& decoder_settings) override; + + std::unique_ptr RunEncodeTest( + std::unique_ptr video_source, + std::unique_ptr encoder, + const EncoderSettings& encoder_settings) override; + + std::unique_ptr RunEncodeDecodeTest( + std::unique_ptr video_source, + std::unique_ptr encoder, + std::unique_ptr decoder, + const EncoderSettings& encoder_settings, + const DecoderSettings& decoder_settings) override; + + protected: + std::unique_ptr owned_task_queue_factory_; + TaskQueueFactory* task_queue_factory_; +}; + +} // namespace test +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_ diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc new file mode 100644 index 000000000000..29fb006fb5b0 --- /dev/null +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc @@ -0,0 +1,259 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/codecs/test/video_codec_tester_impl.h" + +#include +#include +#include +#include + +#include "api/task_queue/task_queue_factory.h" +#include "api/task_queue/test/mock_task_queue_base.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "rtc_base/fake_clock.h" +#include "rtc_base/gunit.h" +#include "rtc_base/task_queue_for_test.h" +#include "rtc_base/time_utils.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { + +namespace { +using ::testing::_; +using ::testing::Invoke; +using ::testing::InvokeWithoutArgs; +using ::testing::Return; + +using Decoder = VideoCodecTester::Decoder; +using Encoder = VideoCodecTester::Encoder; +using CodedVideoSource = VideoCodecTester::CodedVideoSource; +using RawVideoSource = VideoCodecTester::RawVideoSource; +using DecoderSettings = VideoCodecTester::DecoderSettings; +using EncoderSettings = VideoCodecTester::EncoderSettings; +using PacingSettings = VideoCodecTester::PacingSettings; +using PacingMode = PacingSettings::PacingMode; + +constexpr Frequency k90kHz = Frequency::Hertz(90000); + +VideoFrame CreateVideoFrame(uint32_t timestamp_rtp) { + rtc::scoped_refptr buffer(I420Buffer::Create(2, 2)); + return VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(timestamp_rtp) + .build(); +} + +EncodedImage CreateEncodedImage(uint32_t timestamp_rtp) { + EncodedImage encoded_image; + encoded_image.SetTimestamp(timestamp_rtp); + return encoded_image; +} + +class MockRawVideoSource : public RawVideoSource { + public: + MOCK_METHOD(absl::optional, PullFrame, (), (override)); + MOCK_METHOD(VideoFrame, + GetFrame, + (uint32_t timestamp_rtp, Resolution), + (override)); +}; + +class MockCodedVideoSource : public CodedVideoSource { + public: + MOCK_METHOD(absl::optional, PullFrame, (), (override)); +}; + +class MockDecoder : public Decoder { + public: + MOCK_METHOD(void, + Decode, + (const EncodedImage& frame, DecodeCallback callback), + (override)); +}; + +class MockEncoder : public Encoder { + public: + MOCK_METHOD(void, + Encode, + (const VideoFrame& frame, EncodeCallback callback), + (override)); +}; + +class MockTaskQueueFactory : public TaskQueueFactory { + public: + explicit MockTaskQueueFactory(TaskQueueBase& task_queue) + : task_queue_(task_queue) {} + + std::unique_ptr CreateTaskQueue( + absl::string_view name, + Priority priority) const override { + return std::unique_ptr(&task_queue_); + } + + protected: + TaskQueueBase& task_queue_; +}; +} // namespace + +class VideoCodecTesterImplPacingTest + : public ::testing::TestWithParam, + std::vector, + std::vector>> { + public: + VideoCodecTesterImplPacingTest() + : pacing_settings_(std::get<0>(GetParam())), + frame_timestamp_ms_(std::get<1>(GetParam())), + frame_capture_delay_ms_(std::get<2>(GetParam())), + expected_frame_start_ms_(std::get<3>(GetParam())), + num_frames_(frame_timestamp_ms_.size()), + task_queue_factory_(task_queue_) {} + + void SetUp() override { + ON_CALL(task_queue_, PostTask) + .WillByDefault(Invoke( + [](absl::AnyInvocable task) { std::move(task)(); })); + + ON_CALL(task_queue_, PostDelayedTask) + .WillByDefault( + Invoke([&](absl::AnyInvocable task, TimeDelta delay) { + clock_.AdvanceTime(delay); + std::move(task)(); + })); + } + + protected: + PacingSettings pacing_settings_; + std::vector frame_timestamp_ms_; + std::vector frame_capture_delay_ms_; + std::vector expected_frame_start_ms_; + size_t num_frames_; + + rtc::ScopedFakeClock clock_; + MockTaskQueueBase task_queue_; + MockTaskQueueFactory task_queue_factory_; +}; + +TEST_P(VideoCodecTesterImplPacingTest, PaceEncode) { + auto video_source = std::make_unique(); + + size_t frame_num = 0; + EXPECT_CALL(*video_source, PullFrame).WillRepeatedly(Invoke([&]() mutable { + if (frame_num >= num_frames_) { + return absl::optional(); + } + clock_.AdvanceTime(TimeDelta::Millis(frame_capture_delay_ms_[frame_num])); + + uint32_t timestamp_rtp = frame_timestamp_ms_[frame_num] * k90kHz.hertz() / + rtc::kNumMillisecsPerSec; + ++frame_num; + return absl::optional(CreateVideoFrame(timestamp_rtp)); + })); + + auto encoder = std::make_unique(); + EncoderSettings encoder_settings; + encoder_settings.pacing = pacing_settings_; + + VideoCodecTesterImpl tester(&task_queue_factory_); + auto fs = tester + .RunEncodeTest(std::move(video_source), std::move(encoder), + encoder_settings) + ->GetFrameStatistics(); + ASSERT_EQ(fs.size(), num_frames_); + + for (size_t i = 0; i < fs.size(); ++i) { + int encode_start_ms = (fs[i].encode_start_ns - fs[0].encode_start_ns) / + rtc::kNumNanosecsPerMillisec; + EXPECT_NEAR(encode_start_ms, expected_frame_start_ms_[i], 10); + } +} + +TEST_P(VideoCodecTesterImplPacingTest, PaceDecode) { + auto video_source = std::make_unique(); + + size_t frame_num = 0; + EXPECT_CALL(*video_source, PullFrame).WillRepeatedly(Invoke([&]() mutable { + if (frame_num >= num_frames_) { + return absl::optional(); + } + clock_.AdvanceTime(TimeDelta::Millis(frame_capture_delay_ms_[frame_num])); + + uint32_t timestamp_rtp = frame_timestamp_ms_[frame_num] * k90kHz.hertz() / + rtc::kNumMillisecsPerSec; + ++frame_num; + return absl::optional(CreateEncodedImage(timestamp_rtp)); + })); + + auto decoder = std::make_unique(); + DecoderSettings decoder_settings; + decoder_settings.pacing = pacing_settings_; + + VideoCodecTesterImpl tester(&task_queue_factory_); + auto fs = tester + .RunDecodeTest(std::move(video_source), std::move(decoder), + decoder_settings) + ->GetFrameStatistics(); + ASSERT_EQ(fs.size(), num_frames_); + + for (size_t i = 0; i < fs.size(); ++i) { + int decode_start_ms = (fs[i].decode_start_ns - fs[0].decode_start_ns) / + rtc::kNumNanosecsPerMillisec; + EXPECT_NEAR(decode_start_ms, expected_frame_start_ms_[i], 10); + } +} + +INSTANTIATE_TEST_SUITE_P( + All, + VideoCodecTesterImplPacingTest, + ::testing::ValuesIn( + {std::make_tuple(PacingSettings({.mode = PacingMode::kNoPacing}), + /*frame_timestamp_ms=*/std::vector{0, 100}, + /*frame_capture_delay_ms=*/std::vector{0, 0}, + /*expected_frame_start_ms=*/std::vector{0, 0}), + // Pace with rate equal to the source frame rate. Frames are captured + // instantly. Verify that frames are paced with the source frame rate. + std::make_tuple(PacingSettings({.mode = PacingMode::kRealTime}), + /*frame_timestamp_ms=*/std::vector{0, 100}, + /*frame_capture_delay_ms=*/std::vector{0, 0}, + /*expected_frame_start_ms=*/std::vector{0, 100}), + // Pace with rate equal to the source frame rate. Frame capture is + // delayed by more than pacing time. Verify that no extra delay is + // added. + std::make_tuple(PacingSettings({.mode = PacingMode::kRealTime}), + /*frame_timestamp_ms=*/std::vector{0, 100}, + /*frame_capture_delay_ms=*/std::vector{0, 200}, + /*expected_frame_start_ms=*/std::vector{0, 200}), + // Pace with constant rate less then source frame rate. Frames are + // captured instantly. Verify that frames are paced with the requested + // constant rate. + std::make_tuple( + PacingSettings({.mode = PacingMode::kConstantRate, + .constant_rate = Frequency::Hertz(20)}), + /*frame_timestamp_ms=*/std::vector{0, 100}, + /*frame_capture_delay_ms=*/std::vector{0, 0}, + /*expected_frame_start_ms=*/std::vector{0, 50}), + // Pace with constant rate less then source frame rate. Frame capture + // is delayed by more than the pacing time. Verify that no extra delay + // is added. + std::make_tuple( + PacingSettings({.mode = PacingMode::kConstantRate, + .constant_rate = Frequency::Hertz(20)}), + /*frame_timestamp_ms=*/std::vector{0, 100}, + /*frame_capture_delay_ms=*/std::vector{0, 200}, + /*expected_frame_start_ms=*/std::vector{0, 200})})); +} // namespace test +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc index e7028f6fe1f6..e56e8a92af4c 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc @@ -736,13 +736,10 @@ bool VideoCodecTestFixtureImpl::SetUpAndInitObjects( int clip_height = config_.clip_height.value_or(config_.codec_settings.height); // Create file objects for quality analysis. - source_frame_reader_.reset(new YuvFrameReaderImpl( - config_.filepath, clip_width, clip_height, - config_.reference_width.value_or(clip_width), - config_.reference_height.value_or(clip_height), - YuvFrameReaderImpl::RepeatMode::kPingPong, config_.clip_fps, - config_.codec_settings.maxFramerate)); - EXPECT_TRUE(source_frame_reader_->Init()); + source_frame_reader_ = CreateYuvFrameReader( + config_.filepath, + Resolution({.width = clip_width, .height = clip_height}), + YuvFrameReaderImpl::RepeatMode::kPingPong); RTC_DCHECK(encoded_frame_writers_.empty()); RTC_DCHECK(decoded_frame_writers_.empty()); @@ -820,7 +817,7 @@ void VideoCodecTestFixtureImpl::ReleaseAndCloseObjects( DestroyEncoderAndDecoder(); }); - source_frame_reader_->Close(); + source_frame_reader_.reset(); // Close visualization files. for (auto& encoded_frame_writer : encoded_frame_writers_) { diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc index efb7502e5d51..390348b97aa6 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc @@ -58,7 +58,20 @@ FrameStatistics* VideoCodecTestStatsImpl::GetFrameWithTimestamp( return GetFrame(rtp_timestamp_to_frame_num_[layer_idx][timestamp], layer_idx); } -std::vector VideoCodecTestStatsImpl::GetFrameStatistics() { +FrameStatistics* VideoCodecTestStatsImpl::GetOrAddFrame(size_t timestamp_rtp, + size_t spatial_idx) { + if (rtp_timestamp_to_frame_num_[spatial_idx].count(timestamp_rtp) > 0) { + return GetFrameWithTimestamp(timestamp_rtp, spatial_idx); + } + + size_t frame_num = layer_stats_[spatial_idx].size(); + AddFrame(FrameStatistics(frame_num, timestamp_rtp, spatial_idx)); + + return GetFrameWithTimestamp(timestamp_rtp, spatial_idx); +} + +std::vector VideoCodecTestStatsImpl::GetFrameStatistics() + const { size_t capacity = 0; for (const auto& layer_stat : layer_stats_) { capacity += layer_stat.second.size(); @@ -92,7 +105,8 @@ VideoCodecTestStatsImpl::SliceAndCalcLayerVideoStatistic( for (size_t temporal_idx = 0; temporal_idx < num_temporal_layers; ++temporal_idx) { VideoStatistics layer_stat = SliceAndCalcVideoStatistic( - first_frame_num, last_frame_num, spatial_idx, temporal_idx, false); + first_frame_num, last_frame_num, spatial_idx, temporal_idx, false, + /*target_bitrate=*/absl::nullopt, /*target_framerate=*/absl::nullopt); layer_stats.push_back(layer_stat); } } @@ -110,9 +124,24 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcAggregatedVideoStatistic( RTC_CHECK_GT(num_spatial_layers, 0); RTC_CHECK_GT(num_temporal_layers, 0); - return SliceAndCalcVideoStatistic(first_frame_num, last_frame_num, - num_spatial_layers - 1, - num_temporal_layers - 1, true); + return SliceAndCalcVideoStatistic( + first_frame_num, last_frame_num, num_spatial_layers - 1, + num_temporal_layers - 1, true, /*target_bitrate=*/absl::nullopt, + /*target_framerate=*/absl::nullopt); +} + +VideoStatistics VideoCodecTestStatsImpl::CalcVideoStatistic( + size_t first_frame_num, + size_t last_frame_num, + DataRate target_bitrate, + Frequency target_framerate) { + size_t num_spatial_layers = 0; + size_t num_temporal_layers = 0; + GetNumberOfEncodedLayers(first_frame_num, last_frame_num, &num_spatial_layers, + &num_temporal_layers); + return SliceAndCalcVideoStatistic( + first_frame_num, last_frame_num, num_spatial_layers - 1, + num_temporal_layers - 1, true, target_bitrate, target_framerate); } size_t VideoCodecTestStatsImpl::Size(size_t spatial_idx) { @@ -175,7 +204,9 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcVideoStatistic( size_t last_frame_num, size_t spatial_idx, size_t temporal_idx, - bool aggregate_independent_layers) { + bool aggregate_independent_layers, + absl::optional target_bitrate, + absl::optional target_framerate) { VideoStatistics video_stat; float buffer_level_bits = 0.0f; @@ -200,8 +231,11 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcVideoStatistic( FrameStatistics last_successfully_decoded_frame(0, 0, 0); const size_t target_bitrate_kbps = - CalcLayerTargetBitrateKbps(first_frame_num, last_frame_num, spatial_idx, - temporal_idx, aggregate_independent_layers); + target_bitrate.has_value() + ? target_bitrate->kbps() + : CalcLayerTargetBitrateKbps(first_frame_num, last_frame_num, + spatial_idx, temporal_idx, + aggregate_independent_layers); const size_t target_bitrate_bps = 1000 * target_bitrate_kbps; RTC_CHECK_GT(target_bitrate_kbps, 0); // We divide by `target_bitrate_kbps`. @@ -303,7 +337,9 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcVideoStatistic( GetFrame(first_frame_num, spatial_idx)->rtp_timestamp; RTC_CHECK_GT(timestamp_delta, 0); const float input_framerate_fps = - 1.0 * kVideoPayloadTypeFrequency / timestamp_delta; + target_framerate.has_value() + ? target_framerate->millihertz() / 1000.0 + : 1.0 * kVideoPayloadTypeFrequency / timestamp_delta; RTC_CHECK_GT(input_framerate_fps, 0); const float duration_sec = num_frames / input_framerate_fps; diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_stats_impl.h b/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_stats_impl.h index 61850d362212..1a7980aa0a03 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_stats_impl.h +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_stats_impl.h @@ -35,8 +35,12 @@ class VideoCodecTestStatsImpl : public VideoCodecTestStats { FrameStatistics* GetFrame(size_t frame_number, size_t spatial_idx); FrameStatistics* GetFrameWithTimestamp(size_t timestamp, size_t spatial_idx); + // Creates FrameStatisticts if it doesn't exists and/or returns + // created/existing FrameStatisticts. + FrameStatistics* GetOrAddFrame(size_t timestamp_rtp, size_t spatial_idx); + // Implements VideoCodecTestStats. - std::vector GetFrameStatistics() override; + std::vector GetFrameStatistics() const override; std::vector SliceAndCalcLayerVideoStatistic( size_t first_frame_num, size_t last_frame_num) override; @@ -44,6 +48,11 @@ class VideoCodecTestStatsImpl : public VideoCodecTestStats { VideoStatistics SliceAndCalcAggregatedVideoStatistic(size_t first_frame_num, size_t last_frame_num); + VideoStatistics CalcVideoStatistic(size_t first_frame, + size_t last_frame, + DataRate target_bitrate, + Frequency target_framerate) override; + size_t Size(size_t spatial_idx); void Clear(); @@ -65,7 +74,9 @@ class VideoCodecTestStatsImpl : public VideoCodecTestStats { size_t last_frame_num, size_t spatial_idx, size_t temporal_idx, - bool aggregate_independent_layers); + bool aggregate_independent_layers, + absl::optional target_bitrate, + absl::optional target_framerate); void GetNumberOfEncodedLayers(size_t first_frame_num, size_t last_frame_num, diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_stats_impl_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_stats_impl_unittest.cc index 6477b6ab8c42..89e7d2e1c415 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_stats_impl_unittest.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/videocodec_test_stats_impl_unittest.cc @@ -38,6 +38,21 @@ TEST(StatsTest, AddAndGetFrame) { EXPECT_EQ(kTimestamp, frame_stat->rtp_timestamp); } +TEST(StatsTest, GetOrAddFrame_noFrame_createsNewFrameStat) { + VideoCodecTestStatsImpl stats; + stats.GetOrAddFrame(kTimestamp, 0); + FrameStatistics* frame_stat = stats.GetFrameWithTimestamp(kTimestamp, 0); + EXPECT_EQ(kTimestamp, frame_stat->rtp_timestamp); +} + +TEST(StatsTest, GetOrAddFrame_frameExists_returnsExistingFrameStat) { + VideoCodecTestStatsImpl stats; + stats.AddFrame(FrameStatistics(0, kTimestamp, 0)); + FrameStatistics* frame_stat1 = stats.GetFrameWithTimestamp(kTimestamp, 0); + FrameStatistics* frame_stat2 = stats.GetOrAddFrame(kTimestamp, 0); + EXPECT_EQ(frame_stat1, frame_stat2); +} + TEST(StatsTest, AddAndGetFrames) { VideoCodecTestStatsImpl stats; const size_t kNumFrames = 1000; diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/videoprocessor.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/videoprocessor.cc index 353a00df7967..13266c40df3b 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/test/videoprocessor.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/videoprocessor.cc @@ -153,7 +153,6 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder, bitrate_allocator_( CreateBuiltinVideoBitrateAllocatorFactory() ->CreateVideoBitrateAllocator(config_.codec_settings)), - framerate_fps_(0), encode_callback_(this), input_frame_reader_(input_frame_reader), merged_encoded_frames_(num_simulcast_or_spatial_layers_), @@ -231,15 +230,27 @@ void VideoProcessor::ProcessFrame() { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK(!is_finalized_); + RTC_DCHECK_GT(target_rates_.size(), 0u); + RTC_DCHECK_EQ(target_rates_.begin()->first, 0u); + RateProfile target_rate = + std::prev(target_rates_.upper_bound(last_inputed_frame_num_))->second; + const size_t frame_number = last_inputed_frame_num_++; // Get input frame and store for future quality calculation. + Resolution resolution = Resolution({.width = config_.codec_settings.width, + .height = config_.codec_settings.height}); + FrameReader::Ratio framerate_scale = FrameReader::Ratio( + {.num = config_.clip_fps.value_or(config_.codec_settings.maxFramerate), + .den = static_cast(config_.codec_settings.maxFramerate)}); rtc::scoped_refptr buffer = - input_frame_reader_->ReadFrame(); + input_frame_reader_->PullFrame( + /*frame_num*/ nullptr, resolution, framerate_scale); + RTC_CHECK(buffer) << "Tried to read too many frames from the file."; const size_t timestamp = last_inputed_timestamp_ + - static_cast(kVideoPayloadTypeFrequency / framerate_fps_); + static_cast(kVideoPayloadTypeFrequency / target_rate.input_fps); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) @@ -303,8 +314,10 @@ void VideoProcessor::ProcessFrame() { // Encode. const std::vector frame_types = (frame_number == 0) - ? std::vector{VideoFrameType::kVideoFrameKey} - : std::vector{VideoFrameType::kVideoFrameDelta}; + ? std::vector(num_simulcast_or_spatial_layers_, + VideoFrameType::kVideoFrameKey) + : std::vector(num_simulcast_or_spatial_layers_, + VideoFrameType::kVideoFrameDelta); const int encode_return_code = encoder_->Encode(input_frame, &frame_types); for (size_t i = 0; i < num_simulcast_or_spatial_layers_; ++i) { FrameStatistics* frame_stat = stats_->GetFrame(frame_number, i); @@ -316,12 +329,14 @@ void VideoProcessor::SetRates(size_t bitrate_kbps, double framerate_fps) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK(!is_finalized_); - framerate_fps_ = framerate_fps; - bitrate_allocation_ = + target_rates_[last_inputed_frame_num_] = + RateProfile({.target_kbps = bitrate_kbps, .input_fps = framerate_fps}); + + auto bitrate_allocation = bitrate_allocator_->Allocate(VideoBitrateAllocationParameters( - static_cast(bitrate_kbps * 1000), framerate_fps_)); + static_cast(bitrate_kbps * 1000), framerate_fps)); encoder_->SetRates( - VideoEncoder::RateControlParameters(bitrate_allocation_, framerate_fps_)); + VideoEncoder::RateControlParameters(bitrate_allocation, framerate_fps)); } int32_t VideoProcessor::VideoProcessorDecodeCompleteCallback::Decoded( @@ -389,13 +404,20 @@ void VideoProcessor::FrameEncoded( first_encoded_frame_[spatial_idx] = false; last_encoded_frame_num_[spatial_idx] = frame_number; + RateProfile target_rate = + std::prev(target_rates_.upper_bound(frame_number))->second; + auto bitrate_allocation = + bitrate_allocator_->Allocate(VideoBitrateAllocationParameters( + static_cast(target_rate.target_kbps * 1000), + target_rate.input_fps)); + // Update frame statistics. frame_stat->encoding_successful = true; frame_stat->encode_time_us = GetElapsedTimeMicroseconds( frame_stat->encode_start_ns, encode_stop_ns - post_encode_time_ns_); frame_stat->target_bitrate_kbps = - bitrate_allocation_.GetTemporalLayerSum(spatial_idx, temporal_idx) / 1000; - frame_stat->target_framerate_fps = framerate_fps_; + bitrate_allocation.GetTemporalLayerSum(spatial_idx, temporal_idx) / 1000; + frame_stat->target_framerate_fps = target_rate.input_fps; frame_stat->length_bytes = encoded_image.size(); frame_stat->frame_type = encoded_image._frameType; frame_stat->temporal_idx = temporal_idx; diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/videoprocessor.h b/third_party/libwebrtc/modules/video_coding/codecs/test/videoprocessor.h index 4c89c790a9f9..0a5fdf862278 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/test/videoprocessor.h +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/videoprocessor.h @@ -25,6 +25,7 @@ #include "api/test/videocodec_test_fixture.h" #include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" +#include "api/video/resolution.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" #include "api/video/video_frame.h" @@ -191,8 +192,9 @@ class VideoProcessor { webrtc::VideoEncoder* const encoder_; VideoDecoderList* const decoders_; const std::unique_ptr bitrate_allocator_; - VideoBitrateAllocation bitrate_allocation_ RTC_GUARDED_BY(sequence_checker_); - double framerate_fps_ RTC_GUARDED_BY(sequence_checker_); + + // Target bitrate and framerate per frame. + std::map target_rates_ RTC_GUARDED_BY(sequence_checker_); // Adapters for the codec callbacks. VideoProcessorEncodeCompleteCallback encode_callback_; diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc index 6af775cecefd..f1774af5df12 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc @@ -38,7 +38,6 @@ namespace { const int kWidth = 352; const int kHeight = 288; -const int kFrameSize = kWidth * kHeight * 3 / 2; // I420. } // namespace @@ -52,8 +51,6 @@ class VideoProcessorTest : public ::testing::Test { decoders_.push_back(std::unique_ptr(decoder_mock_)); ExpectInit(); - EXPECT_CALL(frame_reader_mock_, FrameLength()) - .WillRepeatedly(Return(kFrameSize)); q_.SendTask( [this] { video_processor_ = std::make_unique( @@ -107,7 +104,7 @@ TEST_F(VideoProcessorTest, ProcessFrames_FixedFramerate) { .Times(1); q_.SendTask([=] { video_processor_->SetRates(kBitrateKbps, kFramerateFps); }); - EXPECT_CALL(frame_reader_mock_, ReadFrame()) + EXPECT_CALL(frame_reader_mock_, PullFrame(_, _, _)) .WillRepeatedly(Return(I420Buffer::Create(kWidth, kHeight))); EXPECT_CALL( encoder_mock_, @@ -136,7 +133,7 @@ TEST_F(VideoProcessorTest, ProcessFrames_VariableFramerate) { q_.SendTask( [=] { video_processor_->SetRates(kBitrateKbps, kStartFramerateFps); }); - EXPECT_CALL(frame_reader_mock_, ReadFrame()) + EXPECT_CALL(frame_reader_mock_, PullFrame(_, _, _)) .WillRepeatedly(Return(I420Buffer::Create(kWidth, kHeight))); EXPECT_CALL(encoder_mock_, Encode(Property(&VideoFrame::timestamp, kStartTimestamp), _)) diff --git a/third_party/libwebrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/third_party/libwebrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc index ddd56d9eebca..cc84605ce7f8 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc @@ -25,6 +25,7 @@ #include "api/video/video_content_type.h" #include "api/video/video_frame_buffer.h" #include "api/video/video_timing.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/vp8_temporal_layers.h" #include "api/video_codecs/vp8_temporal_layers_factory.h" #include "modules/video_coding/codecs/interface/common_constants.h" @@ -1103,6 +1104,17 @@ void LibvpxVp8Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, codec_specific->template_structure->resolutions = { RenderResolution(pkt.data.frame.width[0], pkt.data.frame.height[0])}; } + switch (vpx_configs_[encoder_idx].ts_number_layers) { + case 1: + codec_specific->scalability_mode = ScalabilityMode::kL1T1; + break; + case 2: + codec_specific->scalability_mode = ScalabilityMode::kL1T2; + break; + case 3: + codec_specific->scalability_mode = ScalabilityMode::kL1T3; + break; + } } int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, diff --git a/third_party/libwebrtc/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc b/third_party/libwebrtc/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc index 67c9110b3c61..4ca3de20d551 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc @@ -39,6 +39,10 @@ TEST(LibvpxVp8SimulcastTest, TestKeyFrameRequestsOnAllStreams) { fixture->TestKeyFrameRequestsOnAllStreams(); } +TEST(LibvpxVp8SimulcastTest, TestKeyFrameRequestsOnSpecificStreams) { + GTEST_SKIP() << "Not applicable to VP8."; +} + TEST(LibvpxVp8SimulcastTest, TestPaddingAllStreams) { auto fixture = CreateSpecificSimulcastTestFixture(); fixture->TestPaddingAllStreams(); diff --git a/third_party/libwebrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc index 8cf761742ed1..c5a8b659c442 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc @@ -640,7 +640,7 @@ TEST(LibvpxVp8EncoderTest, GetEncoderInfoReturnsStaticInformation) { EXPECT_FALSE(info.is_hardware_accelerated); EXPECT_TRUE(info.supports_simulcast); EXPECT_EQ(info.implementation_name, "libvpx"); - EXPECT_EQ(info.requested_resolution_alignment, 1); + EXPECT_EQ(info.requested_resolution_alignment, 1u); EXPECT_THAT(info.preferred_pixel_formats, testing::UnorderedElementsAre(VideoFrameBuffer::Type::kNV12, VideoFrameBuffer::Type::kI420)); @@ -655,7 +655,7 @@ TEST(LibvpxVp8EncoderTest, RequestedResolutionAlignmentFromFieldTrial) { LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), VP8Encoder::Settings()); - EXPECT_EQ(encoder.GetEncoderInfo().requested_resolution_alignment, 10); + EXPECT_EQ(encoder.GetEncoderInfo().requested_resolution_alignment, 10u); EXPECT_FALSE( encoder.GetEncoderInfo().apply_alignment_to_all_simulcast_layers); EXPECT_TRUE(encoder.GetEncoderInfo().resolution_bitrate_limits.empty()); diff --git a/third_party/libwebrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc b/third_party/libwebrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc index ec306d690a93..5877373b7650 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc @@ -9,23 +9,26 @@ * */ +#include #ifdef RTC_ENABLE_VP9 -#include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" - #include #include +#include #include #include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/match.h" +#include "absl/types/optional.h" #include "api/video/color_space.h" #include "api/video/i010_buffer.h" +#include "api/video_codecs/scalability_mode.h" #include "common_video/include/video_frame_buffer.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "modules/video_coding/svc/scalable_video_controller.h" @@ -79,13 +82,17 @@ std::pair GetActiveLayers( return {0, 0}; } -std::unique_ptr CreateVp9ScalabilityStructure( +using Vp9ScalabilityStructure = + std::tuple, ScalabilityMode>; +absl::optional CreateVp9ScalabilityStructure( const VideoCodec& codec) { int num_spatial_layers = codec.VP9().numberOfSpatialLayers; int num_temporal_layers = std::max(1, int{codec.VP9().numberOfTemporalLayers}); if (num_spatial_layers == 1 && num_temporal_layers == 1) { - return std::make_unique(); + return absl::make_optional( + std::make_unique(), + ScalabilityMode::kL1T1); } char name[20]; @@ -93,7 +100,7 @@ std::unique_ptr CreateVp9ScalabilityStructure( if (codec.mode == VideoCodecMode::kScreensharing) { // TODO(bugs.webrtc.org/11999): Compose names of the structures when they // are implemented. - return nullptr; + return absl::nullopt; } else if (codec.VP9().interLayerPred == InterLayerPredMode::kOn || num_spatial_layers == 1) { ss << "L" << num_spatial_layers << "T" << num_temporal_layers; @@ -110,7 +117,7 @@ std::unique_ptr CreateVp9ScalabilityStructure( codec.height != codec.spatialLayers[num_spatial_layers - 1].height) { RTC_LOG(LS_WARNING) << "Top layer resolution expected to match overall resolution"; - return nullptr; + return absl::nullopt; } // Check if the ratio is one of the supported. int numerator; @@ -128,7 +135,7 @@ std::unique_ptr CreateVp9ScalabilityStructure( RTC_LOG(LS_WARNING) << "Unsupported scalability ratio " << codec.spatialLayers[0].width << ":" << codec.spatialLayers[1].width; - return nullptr; + return absl::nullopt; } // Validate ratio is consistent for all spatial layer transitions. for (int sid = 1; sid < num_spatial_layers; ++sid) { @@ -138,7 +145,7 @@ std::unique_ptr CreateVp9ScalabilityStructure( codec.spatialLayers[sid - 1].height * denominator) { RTC_LOG(LS_WARNING) << "Inconsistent scalability ratio " << numerator << ":" << denominator; - return nullptr; + return absl::nullopt; } } } @@ -147,7 +154,7 @@ std::unique_ptr CreateVp9ScalabilityStructure( ScalabilityModeFromString(name); if (!scalability_mode.has_value()) { RTC_LOG(LS_WARNING) << "Invalid scalability mode " << name; - return nullptr; + return absl::nullopt; } auto scalability_structure_controller = CreateScalabilityStructure(*scalability_mode); @@ -156,7 +163,8 @@ std::unique_ptr CreateVp9ScalabilityStructure( } else { RTC_LOG(LS_INFO) << "Created scalability structure " << name; } - return scalability_structure_controller; + return absl::make_optional( + std::move(scalability_structure_controller), *scalability_mode); } vpx_svc_ref_frame_config_t Vp9References( @@ -572,12 +580,12 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, pics_since_key_ = 0; num_cores_ = settings.number_of_cores; - absl::optional scalability_mode = inst->GetScalabilityMode(); - if (scalability_mode.has_value()) { + scalability_mode_ = inst->GetScalabilityMode(); + if (scalability_mode_.has_value()) { // Use settings from `ScalabilityMode` identifier. RTC_LOG(LS_INFO) << "Create scalability structure " - << ScalabilityModeToString(*scalability_mode); - svc_controller_ = CreateScalabilityStructure(*scalability_mode); + << ScalabilityModeToString(*scalability_mode_); + svc_controller_ = CreateScalabilityStructure(*scalability_mode_); if (!svc_controller_) { RTC_LOG(LS_WARNING) << "Failed to create scalability structure."; return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; @@ -586,7 +594,7 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, svc_controller_->StreamConfig(); num_spatial_layers_ = info.num_spatial_layers; num_temporal_layers_ = info.num_temporal_layers; - inter_layer_pred_ = ScalabilityModeToInterLayerPredMode(*scalability_mode); + inter_layer_pred_ = ScalabilityModeToInterLayerPredMode(*scalability_mode_); } else { num_spatial_layers_ = inst->VP9().numberOfSpatialLayers; RTC_DCHECK_GT(num_spatial_layers_, 0); @@ -595,7 +603,14 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, num_temporal_layers_ = 1; } inter_layer_pred_ = inst->VP9().interLayerPred; - svc_controller_ = CreateVp9ScalabilityStructure(*inst); + auto vp9_scalability = CreateVp9ScalabilityStructure(*inst); + if (vp9_scalability.has_value()) { + std::tie(svc_controller_, scalability_mode_) = + std::move(vp9_scalability.value()); + } else { + svc_controller_ = nullptr; + scalability_mode_ = absl::nullopt; + } } framerate_controller_ = std::vector( @@ -1495,6 +1510,7 @@ bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, } } } + codec_specific->scalability_mode = scalability_mode_; return true; } diff --git a/third_party/libwebrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h b/third_party/libwebrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h index 427e721c1bc8..bb871f849890 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h +++ b/third_party/libwebrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h @@ -20,6 +20,7 @@ #include "api/fec_controller_override.h" #include "api/field_trials_view.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp9_profile.h" #include "common_video/include/video_frame_buffer_pool.h" @@ -155,6 +156,7 @@ class LibvpxVp9Encoder : public VP9Encoder { uint8_t num_cores_; std::unique_ptr svc_controller_; + absl::optional scalability_mode_; std::vector framerate_controller_; // Used for flexible mode. diff --git a/third_party/libwebrtc/modules/video_coding/include/video_codec_interface.h b/third_party/libwebrtc/modules/video_coding/include/video_codec_interface.h index 261ffb11c1fd..46ae0d29e16f 100644 --- a/third_party/libwebrtc/modules/video_coding/include/video_codec_interface.h +++ b/third_party/libwebrtc/modules/video_coding/include/video_codec_interface.h @@ -16,6 +16,7 @@ #include "absl/base/attributes.h" #include "absl/types/optional.h" #include "api/video/video_frame.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" @@ -112,6 +113,7 @@ struct RTC_EXPORT CodecSpecificInfo { bool end_of_picture = true; absl::optional generic_frame_info; absl::optional template_structure; + absl::optional scalability_mode; }; } // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_coding/utility/ivf_file_reader.cc b/third_party/libwebrtc/modules/video_coding/utility/ivf_file_reader.cc index 85d1fa00d7cf..13092b5e24dc 100644 --- a/third_party/libwebrtc/modules/video_coding/utility/ivf_file_reader.cc +++ b/third_party/libwebrtc/modules/video_coding/utility/ivf_file_reader.cc @@ -30,6 +30,9 @@ constexpr uint8_t kVp9Header[kCodecTypeBytesCount] = {'V', 'P', '9', '0'}; constexpr uint8_t kAv1Header[kCodecTypeBytesCount] = {'A', 'V', '0', '1'}; constexpr uint8_t kH264Header[kCodecTypeBytesCount] = {'H', '2', '6', '4'}; +// RTP standard required 90kHz clock rate. +constexpr int32_t kRtpClockRateHz = 90000; + } // namespace std::unique_ptr IvfFileReader::Create(FileWrapper file) { @@ -77,13 +80,9 @@ bool IvfFileReader::Reset() { return false; } - uint32_t time_scale = ByteReader::ReadLittleEndian(&ivf_header[16]); - if (time_scale == 1000) { - using_capture_timestamps_ = true; - } else if (time_scale == 90000) { - using_capture_timestamps_ = false; - } else { - RTC_LOG(LS_ERROR) << "Invalid IVF header: Unknown time scale"; + time_scale_ = ByteReader::ReadLittleEndian(&ivf_header[16]); + if (time_scale_ == 0) { + RTC_LOG(LS_ERROR) << "Invalid IVF header: time scale can't be 0"; return false; } @@ -106,8 +105,7 @@ bool IvfFileReader::Reset() { const char* codec_name = CodecTypeToPayloadString(codec_type_); RTC_LOG(LS_INFO) << "Opened IVF file with codec data of type " << codec_name << " at resolution " << width_ << " x " << height_ - << ", using " << (using_capture_timestamps_ ? "1" : "90") - << "kHz clock resolution."; + << ", using " << time_scale_ << "Hz clock resolution."; return true; } @@ -157,12 +155,9 @@ absl::optional IvfFileReader::NextFrame() { } EncodedImage image; - if (using_capture_timestamps_) { - image.capture_time_ms_ = current_timestamp; - image.SetTimestamp(static_cast(90 * current_timestamp)); - } else { - image.SetTimestamp(static_cast(current_timestamp)); - } + image.capture_time_ms_ = current_timestamp; + image.SetTimestamp( + static_cast(current_timestamp * kRtpClockRateHz / time_scale_)); image.SetEncodedData(payload); image.SetSpatialIndex(static_cast(layer_sizes.size()) - 1); for (size_t i = 0; i < layer_sizes.size(); ++i) { diff --git a/third_party/libwebrtc/modules/video_coding/utility/ivf_file_reader.h b/third_party/libwebrtc/modules/video_coding/utility/ivf_file_reader.h index 75f2e3ac8cd1..db4fc255755e 100644 --- a/third_party/libwebrtc/modules/video_coding/utility/ivf_file_reader.h +++ b/third_party/libwebrtc/modules/video_coding/utility/ivf_file_reader.h @@ -70,7 +70,7 @@ class IvfFileReader { size_t num_read_frames_; uint16_t width_; uint16_t height_; - bool using_capture_timestamps_; + uint32_t time_scale_; FileWrapper file_; absl::optional next_frame_header_; diff --git a/third_party/libwebrtc/modules/video_coding/utility/simulcast_test_fixture_impl.cc b/third_party/libwebrtc/modules/video_coding/utility/simulcast_test_fixture_impl.cc index 84cd2e158989..35224b17edd1 100644 --- a/third_party/libwebrtc/modules/video_coding/utility/simulcast_test_fixture_impl.cc +++ b/third_party/libwebrtc/modules/video_coding/utility/simulcast_test_fixture_impl.cc @@ -44,6 +44,7 @@ const int kMaxBitrates[kNumberOfSimulcastStreams] = {150, 600, 1200}; const int kMinBitrates[kNumberOfSimulcastStreams] = {50, 150, 600}; const int kTargetBitrates[kNumberOfSimulcastStreams] = {100, 450, 1000}; const float kMaxFramerates[kNumberOfSimulcastStreams] = {30, 30, 30}; +const int kScaleResolutionDownBy[kNumberOfSimulcastStreams] = {4, 2, 1}; const int kDefaultTemporalLayerProfile[3] = {3, 3, 3}; const int kNoTemporalLayerProfile[3] = {0, 0, 0}; @@ -333,45 +334,30 @@ void SimulcastTestFixtureImpl::UpdateActiveStreams( EXPECT_EQ(0, encoder_->InitEncode(&settings_, kSettings)); } +void SimulcastTestFixtureImpl::ExpectStream(VideoFrameType frame_type, + int scaleResolutionDownBy) { + EXPECT_CALL( + encoder_callback_, + OnEncodedImage(AllOf(Field(&EncodedImage::_frameType, frame_type), + Field(&EncodedImage::_encodedWidth, + kDefaultWidth / scaleResolutionDownBy), + Field(&EncodedImage::_encodedHeight, + kDefaultHeight / scaleResolutionDownBy)), + _)) + .Times(1) + .WillRepeatedly(Return( + EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); +} + void SimulcastTestFixtureImpl::ExpectStreams( VideoFrameType frame_type, const std::vector expected_streams_active) { ASSERT_EQ(static_cast(expected_streams_active.size()), kNumberOfSimulcastStreams); - if (expected_streams_active[0]) { - EXPECT_CALL( - encoder_callback_, - OnEncodedImage( - AllOf(Field(&EncodedImage::_frameType, frame_type), - Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4), - Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)), - _)) - .Times(1) - .WillRepeatedly(Return( - EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); - } - if (expected_streams_active[1]) { - EXPECT_CALL( - encoder_callback_, - OnEncodedImage( - AllOf(Field(&EncodedImage::_frameType, frame_type), - Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2), - Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)), - _)) - .Times(1) - .WillRepeatedly(Return( - EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); - } - if (expected_streams_active[2]) { - EXPECT_CALL(encoder_callback_, - OnEncodedImage( - AllOf(Field(&EncodedImage::_frameType, frame_type), - Field(&EncodedImage::_encodedWidth, kDefaultWidth), - Field(&EncodedImage::_encodedHeight, kDefaultHeight)), - _)) - .Times(1) - .WillRepeatedly(Return( - EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); + for (size_t i = 0; i < kNumberOfSimulcastStreams; i++) { + if (expected_streams_active[i]) { + ExpectStream(frame_type, kScaleResolutionDownBy[i]); + } } } @@ -400,8 +386,8 @@ void SimulcastTestFixtureImpl::VerifyTemporalIdxAndSyncForAllSpatialLayers( } } -// We currently expect all active streams to generate a key frame even though -// a key frame was only requested for some of them. +// For some codecs (VP8) expect all active streams to generate a key frame even +// though a key frame was only requested for some of them. void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnAllStreams() { SetRates(kMaxBitrates[2], 30); // To get all three streams. std::vector frame_types(kNumberOfSimulcastStreams, @@ -439,6 +425,69 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnAllStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } +// For some codecs (H264) expect only particular active streams to generate a +// key frame when a key frame was only requested for some of them. +void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnSpecificStreams() { + SetRates(kMaxBitrates[2], 30); // To get all three streams. + std::vector frame_types(kNumberOfSimulcastStreams, + VideoFrameType::kVideoFrameDelta); + ExpectStreams(VideoFrameType::kVideoFrameKey, kNumberOfSimulcastStreams); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + frame_types[0] = VideoFrameType::kVideoFrameKey; + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[0]); + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[1]); + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[2]); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + std::fill(frame_types.begin(), frame_types.end(), + VideoFrameType::kVideoFrameDelta); + frame_types[1] = VideoFrameType::kVideoFrameKey; + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[0]); + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[1]); + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[2]); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + std::fill(frame_types.begin(), frame_types.end(), + VideoFrameType::kVideoFrameDelta); + frame_types[2] = VideoFrameType::kVideoFrameKey; + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[0]); + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[1]); + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[2]); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + std::fill(frame_types.begin(), frame_types.end(), + VideoFrameType::kVideoFrameDelta); + frame_types[0] = VideoFrameType::kVideoFrameKey; + frame_types[2] = VideoFrameType::kVideoFrameKey; + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[0]); + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[1]); + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[2]); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + std::fill(frame_types.begin(), frame_types.end(), + VideoFrameType::kVideoFrameKey); + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[0]); + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[1]); + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[2]); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + std::fill(frame_types.begin(), frame_types.end(), + VideoFrameType::kVideoFrameDelta); + ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); +} + void SimulcastTestFixtureImpl::TestPaddingAllStreams() { // We should always encode the base layer. SetRates(kMinBitrates[0] - 1, 30); diff --git a/third_party/libwebrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h b/third_party/libwebrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h index cdfdc609d5b1..f142ab481375 100644 --- a/third_party/libwebrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h +++ b/third_party/libwebrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h @@ -35,6 +35,7 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture { // Implements SimulcastTestFixture. void TestKeyFrameRequestsOnAllStreams() override; + void TestKeyFrameRequestsOnSpecificStreams() override; void TestPaddingAllStreams() override; void TestPaddingTwoStreams() override; void TestPaddingTwoStreamsOneMaxedOut() override; @@ -66,6 +67,7 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture { void SetRates(uint32_t bitrate_kbps, uint32_t fps); void RunActiveStreamsTest(std::vector active_streams); void UpdateActiveStreams(std::vector active_streams); + void ExpectStream(VideoFrameType frame_type, int scaleResolutionDownBy); void ExpectStreams(VideoFrameType frame_type, std::vector expected_streams_active); void ExpectStreams(VideoFrameType frame_type, int expected_video_streams); diff --git a/third_party/libwebrtc/moz.build b/third_party/libwebrtc/moz.build index 08c1b1e5cece..976cf373047c 100644 --- a/third_party/libwebrtc/moz.build +++ b/third_party/libwebrtc/moz.build @@ -26,8 +26,6 @@ DIRS += [ "/third_party/libwebrtc/api/audio_codecs/ilbc/audio_decoder_ilbc_gn", "/third_party/libwebrtc/api/audio_codecs/ilbc/audio_encoder_ilbc_config_gn", "/third_party/libwebrtc/api/audio_codecs/ilbc/audio_encoder_ilbc_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_gn", "/third_party/libwebrtc/api/audio_codecs/L16/audio_decoder_L16_gn", "/third_party/libwebrtc/api/audio_codecs/L16/audio_encoder_L16_gn", "/third_party/libwebrtc/api/audio_codecs/opus/audio_decoder_multiopus_gn", @@ -66,6 +64,7 @@ DIRS += [ "/third_party/libwebrtc/api/rtp_headers_gn", "/third_party/libwebrtc/api/rtp_packet_info_gn", "/third_party/libwebrtc/api/rtp_parameters_gn", + "/third_party/libwebrtc/api/rtp_sender_setparameters_callback_gn", "/third_party/libwebrtc/api/rtp_transceiver_direction_gn", "/third_party/libwebrtc/api/scoped_refptr_gn", "/third_party/libwebrtc/api/sequence_checker_gn", @@ -167,7 +166,6 @@ DIRS += [ "/third_party/libwebrtc/modules/audio_coding/ilbc_c_gn", "/third_party/libwebrtc/modules/audio_coding/ilbc_gn", "/third_party/libwebrtc/modules/audio_coding/isac_bwinfo_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_common_gn", "/third_party/libwebrtc/modules/audio_coding/isac_vad_gn", "/third_party/libwebrtc/modules/audio_coding/legacy_encoded_audio_frame_gn", "/third_party/libwebrtc/modules/audio_coding/neteq_gn", @@ -198,7 +196,7 @@ DIRS += [ "/third_party/libwebrtc/modules/audio_processing/agc/gain_control_interface_gn", "/third_party/libwebrtc/modules/audio_processing/agc/legacy_agc_gn", "/third_party/libwebrtc/modules/audio_processing/agc/level_estimation_gn", - "/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gn", + "/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller_gn", "/third_party/libwebrtc/modules/audio_processing/agc2/biquad_filter_gn", "/third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor_gn", "/third_party/libwebrtc/modules/audio_processing/agc2/common_gn", @@ -220,6 +218,7 @@ DIRS += [ "/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_spectral_features_gn", "/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_symmetric_matrix_buffer_gn", "/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_gn", + "/third_party/libwebrtc/modules/audio_processing/agc2/saturation_protector_gn", "/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator_gn", "/third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper_gn", "/third_party/libwebrtc/modules/audio_processing/api_gn", @@ -458,10 +457,6 @@ if CONFIG["OS_TARGET"] == "Android": if CONFIG["OS_TARGET"] == "Darwin": DIRS += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_c_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_gn", "/third_party/libwebrtc/modules/desktop_capture/desktop_capture_gn", "/third_party/libwebrtc/modules/desktop_capture/desktop_capture_objc_gn", "/third_party/libwebrtc/modules/desktop_capture/primitives_gn", @@ -476,10 +471,6 @@ if CONFIG["OS_TARGET"] == "Darwin": if CONFIG["OS_TARGET"] == "OpenBSD": DIRS += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_c_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_gn", "/third_party/libwebrtc/modules/desktop_capture/desktop_capture_gn", "/third_party/libwebrtc/modules/desktop_capture/primitives_gn" ] @@ -487,10 +478,6 @@ if CONFIG["OS_TARGET"] == "OpenBSD": if CONFIG["OS_TARGET"] == "WINNT": DIRS += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_c_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_gn", "/third_party/libwebrtc/modules/desktop_capture/desktop_capture_gn", "/third_party/libwebrtc/modules/desktop_capture/primitives_gn", "/third_party/libwebrtc/rtc_base/win/create_direct3d_device_gn", @@ -503,38 +490,23 @@ if CONFIG["OS_TARGET"] == "WINNT": if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Android": DIRS += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float_gn", "/third_party/libwebrtc/common_audio/common_audio_neon_c_gn", - "/third_party/libwebrtc/common_audio/common_audio_neon_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_c_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_gn" + "/third_party/libwebrtc/common_audio/common_audio_neon_gn" ] if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android": DIRS += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_fix_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_fix_gn", "/third_party/libwebrtc/common_audio/common_audio_neon_c_gn", "/third_party/libwebrtc/common_audio/common_audio_neon_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_fix_c_arm_asm_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_fix_c_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_fix_common_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_fix_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_neon_gn", "/third_party/libwebrtc/rtc_base/system/asm_defines_gn" ] if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": DIRS += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float_gn", "/third_party/libwebrtc/common_audio/common_audio_avx2_gn", "/third_party/libwebrtc/common_audio/common_audio_sse2_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_c_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_gn", "/third_party/libwebrtc/modules/audio_processing/aec3/aec3_avx2_gn", "/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_avx2_gn" ] @@ -542,12 +514,8 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Android": DIRS += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float_gn", "/third_party/libwebrtc/common_audio/common_audio_avx2_gn", "/third_party/libwebrtc/common_audio/common_audio_sse2_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_c_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_gn", "/third_party/libwebrtc/modules/audio_processing/aec3/aec3_avx2_gn", "/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_avx2_gn" ] @@ -572,14 +540,11 @@ if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Darwin": if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": DIRS += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float_gn", "/third_party/libwebrtc/common_audio/common_audio_neon_c_gn", "/third_party/libwebrtc/common_audio/common_audio_neon_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_c_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_gn", "/third_party/libwebrtc/modules/desktop_capture/desktop_capture_gn", "/third_party/libwebrtc/modules/desktop_capture/primitives_gn", + "/third_party/libwebrtc/modules/portal/portal_gn", "/third_party/libwebrtc/third_party/drm/drm_gn", "/third_party/libwebrtc/third_party/gbm/gbm_gn", "/third_party/libwebrtc/third_party/libepoxy/libepoxy_gn", @@ -589,17 +554,11 @@ if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux": DIRS += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_fix_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_fix_gn", "/third_party/libwebrtc/common_audio/common_audio_neon_c_gn", "/third_party/libwebrtc/common_audio/common_audio_neon_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_fix_c_arm_asm_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_fix_c_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_fix_common_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_fix_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_neon_gn", "/third_party/libwebrtc/modules/desktop_capture/desktop_capture_gn", "/third_party/libwebrtc/modules/desktop_capture/primitives_gn", + "/third_party/libwebrtc/modules/portal/portal_gn", "/third_party/libwebrtc/rtc_base/system/asm_defines_gn", "/third_party/libwebrtc/third_party/drm/drm_gn", "/third_party/libwebrtc/third_party/gbm/gbm_gn", @@ -607,29 +566,17 @@ if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux": "/third_party/libwebrtc/third_party/pipewire/pipewire_gn" ] -if CONFIG["CPU_ARCH"] == "ppc64" and CONFIG["OS_TARGET"] == "Linux": - - DIRS += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_c_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_gn" - ] - if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": DIRS += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float_gn", "/third_party/libwebrtc/common_audio/common_audio_avx2_gn", "/third_party/libwebrtc/common_audio/common_audio_sse2_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_c_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_gn", "/third_party/libwebrtc/modules/audio_processing/aec3/aec3_avx2_gn", "/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_avx2_gn", "/third_party/libwebrtc/modules/desktop_capture/desktop_capture_differ_sse2_gn", "/third_party/libwebrtc/modules/desktop_capture/desktop_capture_gn", "/third_party/libwebrtc/modules/desktop_capture/primitives_gn", + "/third_party/libwebrtc/modules/portal/portal_gn", "/third_party/libwebrtc/third_party/drm/drm_gn", "/third_party/libwebrtc/third_party/gbm/gbm_gn", "/third_party/libwebrtc/third_party/libepoxy/libepoxy_gn", @@ -639,17 +586,14 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": DIRS += [ - "/third_party/libwebrtc/api/audio_codecs/isac/audio_decoder_isac_float_gn", - "/third_party/libwebrtc/api/audio_codecs/isac/audio_encoder_isac_float_gn", "/third_party/libwebrtc/common_audio/common_audio_avx2_gn", "/third_party/libwebrtc/common_audio/common_audio_sse2_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_c_gn", - "/third_party/libwebrtc/modules/audio_coding/isac_gn", "/third_party/libwebrtc/modules/audio_processing/aec3/aec3_avx2_gn", "/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_avx2_gn", "/third_party/libwebrtc/modules/desktop_capture/desktop_capture_differ_sse2_gn", "/third_party/libwebrtc/modules/desktop_capture/desktop_capture_gn", "/third_party/libwebrtc/modules/desktop_capture/primitives_gn", + "/third_party/libwebrtc/modules/portal/portal_gn", "/third_party/libwebrtc/third_party/drm/drm_gn", "/third_party/libwebrtc/third_party/gbm/gbm_gn", "/third_party/libwebrtc/third_party/libepoxy/libepoxy_gn", diff --git a/third_party/libwebrtc/net/dcsctp/common/sequence_numbers.h b/third_party/libwebrtc/net/dcsctp/common/sequence_numbers.h index 919fc5014a7f..c3422c2ccd9f 100644 --- a/third_party/libwebrtc/net/dcsctp/common/sequence_numbers.h +++ b/third_party/libwebrtc/net/dcsctp/common/sequence_numbers.h @@ -119,6 +119,14 @@ class UnwrappedSequenceNumber { return value_ <= other.value_; } + // Const accessors for underlying value. + constexpr const int64_t* operator->() const { return &value_; } + constexpr const int64_t& operator*() const& { return value_; } + constexpr const int64_t&& operator*() const&& { return std::move(value_); } + constexpr const int64_t& value() const& { return value_; } + constexpr const int64_t&& value() const&& { return std::move(value_); } + constexpr explicit operator const int64_t&() const& { return value_; } + // Increments the value. void Increment() { ++value_; } diff --git a/third_party/libwebrtc/p2p/base/dtls_transport.cc b/third_party/libwebrtc/p2p/base/dtls_transport.cc index 7d9c0b5cce15..af16efad7836 100644 --- a/third_party/libwebrtc/p2p/base/dtls_transport.cc +++ b/third_party/libwebrtc/p2p/base/dtls_transport.cc @@ -72,10 +72,9 @@ StreamInterfaceChannel::StreamInterfaceChannel( state_(rtc::SS_OPEN), packets_(kMaxPendingPackets, kMaxDtlsPacketLen) {} -rtc::StreamResult StreamInterfaceChannel::Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) { +rtc::StreamResult StreamInterfaceChannel::Read(rtc::ArrayView buffer, + size_t& read, + int& error) { RTC_DCHECK_RUN_ON(&sequence_checker_); if (state_ == rtc::SS_CLOSED) @@ -83,27 +82,25 @@ rtc::StreamResult StreamInterfaceChannel::Read(void* buffer, if (state_ == rtc::SS_OPENING) return rtc::SR_BLOCK; - if (!packets_.ReadFront(buffer, buffer_len, read)) { + if (!packets_.ReadFront(buffer.data(), buffer.size(), &read)) { return rtc::SR_BLOCK; } return rtc::SR_SUCCESS; } -rtc::StreamResult StreamInterfaceChannel::Write(const void* data, - size_t data_len, - size_t* written, - int* error) { +rtc::StreamResult StreamInterfaceChannel::Write( + rtc::ArrayView data, + size_t& written, + int& error) { RTC_DCHECK_RUN_ON(&sequence_checker_); // Always succeeds, since this is an unreliable transport anyway. // TODO(zhihuang): Should this block if ice_transport_'s temporarily // unwritable? rtc::PacketOptions packet_options; - ice_transport_->SendPacket(static_cast(data), data_len, - packet_options); - if (written) { - *written = data_len; - } + ice_transport_->SendPacket(reinterpret_cast(data.data()), + data.size(), packet_options); + written = data.size(); return rtc::SR_SUCCESS; } @@ -761,7 +758,9 @@ void DtlsTransport::MaybeStartDtls() { set_dtls_state(webrtc::DtlsTransportState::kFailed); return; } - RTC_LOG(LS_INFO) << ToString() << ": DtlsTransport: Started DTLS handshake"; + RTC_LOG(LS_INFO) << ToString() + << ": DtlsTransport: Started DTLS handshake active=" + << IsDtlsActive(); set_dtls_state(webrtc::DtlsTransportState::kConnecting); // Now that the handshake has started, we can process a cached ClientHello // (if one exists). diff --git a/third_party/libwebrtc/p2p/base/dtls_transport.h b/third_party/libwebrtc/p2p/base/dtls_transport.h index 2b26e2553f92..4e21410b762e 100644 --- a/third_party/libwebrtc/p2p/base/dtls_transport.h +++ b/third_party/libwebrtc/p2p/base/dtls_transport.h @@ -49,14 +49,12 @@ class StreamInterfaceChannel : public rtc::StreamInterface { // Implementations of StreamInterface rtc::StreamState GetState() const override; void Close() override; - rtc::StreamResult Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) override; - rtc::StreamResult Write(const void* data, - size_t data_len, - size_t* written, - int* error) override; + rtc::StreamResult Read(rtc::ArrayView buffer, + size_t& read, + int& error) override; + rtc::StreamResult Write(rtc::ArrayView data, + size_t& written, + int& error) override; private: RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; diff --git a/third_party/libwebrtc/p2p/base/pseudo_tcp_unittest.cc b/third_party/libwebrtc/p2p/base/pseudo_tcp_unittest.cc index debddb217ed1..e56c6fa2c55e 100644 --- a/third_party/libwebrtc/p2p/base/pseudo_tcp_unittest.cc +++ b/third_party/libwebrtc/p2p/base/pseudo_tcp_unittest.cc @@ -232,8 +232,10 @@ class PseudoTcpTest : public PseudoTcpTestBase { // Create some dummy data to send. send_stream_.ReserveSize(size); for (int i = 0; i < size; ++i) { - char ch = static_cast(i); - send_stream_.Write(&ch, 1, NULL, NULL); + uint8_t ch = static_cast(i); + size_t written; + int error; + send_stream_.Write(rtc::MakeArrayView(&ch, 1), written, error); } send_stream_.Rewind(); // Prepare the receive stream. @@ -296,7 +298,11 @@ class PseudoTcpTest : public PseudoTcpTestBase { do { rcvd = remote_.Recv(block, sizeof(block)); if (rcvd != -1) { - recv_stream_.Write(block, rcvd, NULL, NULL); + size_t written; + int error; + recv_stream_.Write( + rtc::MakeArrayView(reinterpret_cast(block), rcvd), + written, error); recv_stream_.GetPosition(&position); RTC_LOG(LS_VERBOSE) << "Received: " << position; } @@ -308,8 +314,10 @@ class PseudoTcpTest : public PseudoTcpTestBase { char block[kBlockSize]; do { send_stream_.GetPosition(&position); - if (send_stream_.Read(block, sizeof(block), &tosend, NULL) != - rtc::SR_EOS) { + int error; + if (send_stream_.Read( + rtc::MakeArrayView(reinterpret_cast(block), kBlockSize), + tosend, error) != rtc::SR_EOS) { sent = local_.Send(block, tosend); UpdateLocalClock(); if (sent != -1) { @@ -347,8 +355,10 @@ class PseudoTcpTestPingPong : public PseudoTcpTestBase { // Create some dummy data to send. send_stream_.ReserveSize(size); for (int i = 0; i < size; ++i) { - char ch = static_cast(i); - send_stream_.Write(&ch, 1, NULL, NULL); + uint8_t ch = static_cast(i); + size_t written; + int error; + send_stream_.Write(rtc::MakeArrayView(&ch, 1), written, error); } send_stream_.Rewind(); // Prepare the receive stream. @@ -411,7 +421,11 @@ class PseudoTcpTestPingPong : public PseudoTcpTestBase { do { rcvd = receiver_->Recv(block, sizeof(block)); if (rcvd != -1) { - recv_stream_.Write(block, rcvd, NULL, NULL); + size_t written; + int error; + recv_stream_.Write( + rtc::MakeArrayView(reinterpret_cast(block), rcvd), + written, error); recv_stream_.GetPosition(&position); RTC_LOG(LS_VERBOSE) << "Received: " << position; } @@ -424,7 +438,10 @@ class PseudoTcpTestPingPong : public PseudoTcpTestBase { do { send_stream_.GetPosition(&position); tosend = bytes_per_send_ ? bytes_per_send_ : sizeof(block); - if (send_stream_.Read(block, tosend, &tosend, NULL) != rtc::SR_EOS) { + int error; + if (send_stream_.Read( + rtc::MakeArrayView(reinterpret_cast(block), tosend), + tosend, error) != rtc::SR_EOS) { sent = sender_->Send(block, tosend); UpdateLocalClock(); if (sent != -1) { @@ -458,8 +475,10 @@ class PseudoTcpTestReceiveWindow : public PseudoTcpTestBase { // Create some dummy data to send. send_stream_.ReserveSize(size); for (int i = 0; i < size; ++i) { - char ch = static_cast(i); - send_stream_.Write(&ch, 1, NULL, NULL); + uint8_t ch = static_cast(i); + size_t written; + int error; + send_stream_.Write(rtc::MakeArrayView(&ch, 1), written, error); } send_stream_.Rewind(); @@ -510,7 +529,11 @@ class PseudoTcpTestReceiveWindow : public PseudoTcpTestBase { do { rcvd = remote_.Recv(block, sizeof(block)); if (rcvd != -1) { - recv_stream_.Write(block, rcvd, NULL, NULL); + size_t written; + int error; + recv_stream_.Write( + rtc::MakeArrayView(reinterpret_cast(block), rcvd), + written, error); recv_stream_.GetPosition(&position); RTC_LOG(LS_VERBOSE) << "Received: " << position; } @@ -534,8 +557,11 @@ class PseudoTcpTestReceiveWindow : public PseudoTcpTestBase { char block[kBlockSize]; do { send_stream_.GetPosition(&position); - if (send_stream_.Read(block, sizeof(block), &tosend, NULL) != - rtc::SR_EOS) { + int error; + if (send_stream_.Read( + rtc::MakeArrayView(reinterpret_cast(block), + sizeof(block)), + tosend, error) != rtc::SR_EOS) { sent = local_.Send(block, tosend); UpdateLocalClock(); if (sent != -1) { diff --git a/third_party/libwebrtc/p2p/base/stun_port.cc b/third_party/libwebrtc/p2p/base/stun_port.cc index 5d57d1ac542d..fdb7edce57af 100644 --- a/third_party/libwebrtc/p2p/base/stun_port.cc +++ b/third_party/libwebrtc/p2p/base/stun_port.cc @@ -550,11 +550,12 @@ void UDPPort::OnStunBindingRequestSucceeded( } bind_request_succeeded_servers_.insert(stun_server_addr); // If socket is shared and `stun_reflected_addr` is equal to local socket - // address, or if the same address has been added by another STUN server, - // then discarding the stun address. + // address and mDNS obfuscation is not enabled, or if the same address has + // been added by another STUN server, then discarding the stun address. // For STUN, related address is the local socket address. - if ((!SharedSocket() || stun_reflected_addr != socket_->GetLocalAddress()) && - !HasCandidateWithAddress(stun_reflected_addr)) { + if ((!SharedSocket() || stun_reflected_addr != socket_->GetLocalAddress() || + Network()->GetMdnsResponder() != nullptr) && + !HasStunCandidateWithAddress(stun_reflected_addr)) { rtc::SocketAddress related_address = socket_->GetLocalAddress(); // If we can't stamp the related address correctly, empty it to avoid leak. if (!MaybeSetDefaultLocalAddress(&related_address)) { @@ -637,11 +638,12 @@ void UDPPort::OnSendPacket(const void* data, size_t size, StunRequest* req) { stats_.stun_binding_requests_sent++; } -bool UDPPort::HasCandidateWithAddress(const rtc::SocketAddress& addr) const { +bool UDPPort::HasStunCandidateWithAddress( + const rtc::SocketAddress& addr) const { const std::vector& existing_candidates = Candidates(); std::vector::const_iterator it = existing_candidates.begin(); for (; it != existing_candidates.end(); ++it) { - if (it->address() == addr) + if (it->type() == STUN_PORT_TYPE && it->address() == addr) return true; } return false; diff --git a/third_party/libwebrtc/p2p/base/stun_port.h b/third_party/libwebrtc/p2p/base/stun_port.h index 06b5e1ae1c52..13970070eda8 100644 --- a/third_party/libwebrtc/p2p/base/stun_port.h +++ b/third_party/libwebrtc/p2p/base/stun_port.h @@ -234,7 +234,7 @@ class UDPPort : public Port { // changed to SignalPortReady. void MaybeSetPortCompleteOrError(); - bool HasCandidateWithAddress(const rtc::SocketAddress& addr) const; + bool HasStunCandidateWithAddress(const rtc::SocketAddress& addr) const; // If this is a low-cost network, it will keep on sending STUN binding // requests indefinitely to keep the NAT binding alive. Otherwise, stop diff --git a/third_party/libwebrtc/p2p/base/stun_port_unittest.cc b/third_party/libwebrtc/p2p/base/stun_port_unittest.cc index 9c8cd929c4b9..3d56636a9b28 100644 --- a/third_party/libwebrtc/p2p/base/stun_port_unittest.cc +++ b/third_party/libwebrtc/p2p/base/stun_port_unittest.cc @@ -58,6 +58,29 @@ static const int kHighCostPortKeepaliveLifetimeMs = 2 * 60 * 1000; constexpr uint64_t kTiebreakerDefault = 44444; +class FakeMdnsResponder : public webrtc::MdnsResponderInterface { + public: + void CreateNameForAddress(const rtc::IPAddress& addr, + NameCreatedCallback callback) override { + callback(addr, std::string("unittest-mdns-host-name.local")); + } + + void RemoveNameForAddress(const rtc::IPAddress& addr, + NameRemovedCallback callback) override {} +}; + +class FakeMdnsResponderProvider : public rtc::MdnsResponderProvider { + public: + FakeMdnsResponderProvider() : mdns_responder_(new FakeMdnsResponder()) {} + + webrtc::MdnsResponderInterface* GetMdnsResponder() const override { + return mdns_responder_.get(); + } + + private: + std::unique_ptr mdns_responder_; +}; + // Base class for tests connecting a StunPort to a fake STUN server // (cricket::StunServer). class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { @@ -74,6 +97,7 @@ class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { socket_factory_(ss_.get()), stun_server_1_(cricket::TestStunServer::Create(ss_.get(), kStunAddr1)), stun_server_2_(cricket::TestStunServer::Create(ss_.get(), kStunAddr2)), + mdns_responder_provider_(new FakeMdnsResponderProvider()), done_(false), error_(false), stun_keepalive_delay_(1), @@ -169,6 +193,10 @@ class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { /* packet_time_us */ -1); } + void EnableMdnsObfuscation() { + network_.set_mdns_responder_provider(mdns_responder_provider_.get()); + } + protected: static void SetUpTestSuite() { // Ensure the RNG is inited. @@ -206,6 +234,7 @@ class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { std::unique_ptr stun_server_1_; std::unique_ptr stun_server_2_; std::unique_ptr socket_; + std::unique_ptr mdns_responder_provider_; bool done_; bool error_; int stun_keepalive_delay_; @@ -356,6 +385,41 @@ TEST_F(StunPortTestWithRealClock, // No crash is success. } +// Test that a stun candidate (srflx candidate) is discarded whose address is +// equal to that of a local candidate if mDNS obfuscation is not enabled. +TEST_F(StunPortTest, TestStunCandidateDiscardedWithMdnsObfuscationNotEnabled) { + CreateSharedUdpPort(kStunAddr1, nullptr); + PrepareAddress(); + EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + ASSERT_EQ(1U, port()->Candidates().size()); + EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); + EXPECT_EQ(port()->Candidates()[0].type(), cricket::LOCAL_PORT_TYPE); +} + +// Test that a stun candidate (srflx candidate) is generated whose address is +// equal to that of a local candidate if mDNS obfuscation is enabled. +TEST_F(StunPortTest, TestStunCandidateGeneratedWithMdnsObfuscationEnabled) { + EnableMdnsObfuscation(); + CreateSharedUdpPort(kStunAddr1, nullptr); + PrepareAddress(); + EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + ASSERT_EQ(2U, port()->Candidates().size()); + + // The addresses of the candidates are both equal to kLocalAddr. + EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); + EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[1].address())); + + // One of the generated candidates is a local candidate and the other is a + // stun candidate. + EXPECT_NE(port()->Candidates()[0].type(), port()->Candidates()[1].type()); + if (port()->Candidates()[0].type() == cricket::LOCAL_PORT_TYPE) { + EXPECT_EQ(port()->Candidates()[1].type(), cricket::STUN_PORT_TYPE); + } else { + EXPECT_EQ(port()->Candidates()[0].type(), cricket::STUN_PORT_TYPE); + EXPECT_EQ(port()->Candidates()[1].type(), cricket::LOCAL_PORT_TYPE); + } +} + // Test that the same address is added only once if two STUN servers are in // use. TEST_F(StunPortTest, TestNoDuplicatedAddressWithTwoStunServers) { diff --git a/third_party/libwebrtc/pc/BUILD.gn b/third_party/libwebrtc/pc/BUILD.gn index 8126fedb8339..15c87dda25d6 100644 --- a/third_party/libwebrtc/pc/BUILD.gn +++ b/third_party/libwebrtc/pc/BUILD.gn @@ -415,6 +415,7 @@ rtc_source_set("rtp_sender_proxy") { deps = [ ":proxy", "../api:libjingle_peerconnection_api", + "../api:rtp_sender_interface", ] } @@ -1007,6 +1008,7 @@ rtc_source_set("rtc_stats_collector") { "../api/task_queue:task_queue", "../api/units:time_delta", "../api/video:video_rtp_headers", + "../api/video_codecs:scalability_mode", "../call:call_interfaces", "../common_video:common_video", "../media:rtc_media_base", @@ -1091,6 +1093,7 @@ rtc_source_set("sdp_offer_answer") { "../api:media_stream_interface", "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", @@ -1181,6 +1184,7 @@ rtc_source_set("peer_connection") { "../api:rtc_error", "../api:rtc_stats_api", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", @@ -1276,6 +1280,7 @@ rtc_source_set("legacy_stats_collector") { "../api:libjingle_peerconnection_api", "../api:media_stream_interface", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:sequence_checker", "../api/audio_codecs:audio_codecs_api", @@ -1544,6 +1549,7 @@ rtc_library("rtp_transceiver") { "../api:libjingle_peerconnection_api", "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", @@ -1588,6 +1594,7 @@ rtc_library("rtp_transmission_manager") { "../api:media_stream_interface", "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", @@ -1618,6 +1625,7 @@ rtc_library("transceiver_list") { "../api:libjingle_peerconnection_api", "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:sequence_checker", "../rtc_base:checks", @@ -1853,12 +1861,14 @@ rtc_library("rtp_sender") { ":legacy_stats_collector_interface", "../api:audio_options_api", "../api:dtls_transport_interface", + "../api:dtmf_sender_interface", "../api:frame_transformer_interface", "../api:libjingle_peerconnection_api", "../api:media_stream_interface", "../api:priority", "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:sequence_checker", "../api/crypto:frame_encryptor_interface", @@ -1912,6 +1922,7 @@ rtc_library("dtmf_sender") { ] deps = [ ":proxy", + "../api:dtmf_sender_interface", "../api:libjingle_peerconnection_api", "../api:scoped_refptr", "../api:sequence_checker", @@ -2179,6 +2190,7 @@ if (rtc_include_tests && !build_with_chromium) { "../api:rtc_error", "../api:rtc_stats_api", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../rtc_base:checks", "../rtc_base:gunit_helpers", @@ -2193,6 +2205,7 @@ if (rtc_include_tests && !build_with_chromium) { deps = [ ":integration_test_helpers", ":pc_test_utils", + "../api:dtmf_sender_interface", "../api:libjingle_peerconnection_api", "../api:scoped_refptr", "../api/units:time_delta", @@ -2314,6 +2327,7 @@ if (rtc_include_tests && !build_with_chromium) { "../api:candidate", "../api:create_peerconnection_factory", "../api:dtls_transport_interface", + "../api:dtmf_sender_interface", "../api:fake_frame_decryptor", "../api:fake_frame_encryptor", "../api:field_trials_view", @@ -2329,6 +2343,7 @@ if (rtc_include_tests && !build_with_chromium) { "../api:packet_socket_factory", "../api:priority", "../api:rtc_error", + "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api/adaptation:resource_adaptation_api", @@ -2353,6 +2368,7 @@ if (rtc_include_tests && !build_with_chromium) { "../api/video:video_codec_constants", "../api/video:video_frame", "../api/video:video_rtp_headers", + "../api/video_codecs:scalability_mode", "../call/adaptation:resource_adaptation_test_utilities", "../common_video", "../logging:fake_rtc_event_log", @@ -2523,6 +2539,7 @@ if (rtc_include_tests && !build_with_chromium) { "../api:rtc_error", "../api:rtc_stats_api", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api/audio:audio_mixer_api", @@ -2711,6 +2728,7 @@ if (rtc_include_tests && !build_with_chromium) { "../api:media_stream_interface", "../api:network_emulation_manager_api", "../api:peer_connection_quality_test_fixture_api", + "../api:rtc_stats_api", "../api:simulated_network_api", "../api:time_controller", "../api/test/metrics:global_metrics_logger_and_exporter", @@ -2727,8 +2745,8 @@ if (rtc_include_tests && !build_with_chromium) { "../test:fileutils", "../test:test_main", "../test:test_support", - "../test/pc/e2e:default_video_quality_analyzer", "../test/pc/e2e:network_quality_metrics_reporter", + "../test/pc/e2e/analyzer/video:default_video_quality_analyzer", ] if (is_ios) { diff --git a/third_party/libwebrtc/pc/audio_rtp_receiver.cc b/third_party/libwebrtc/pc/audio_rtp_receiver.cc index 0dbdf0b71398..7af460b80e2c 100644 --- a/third_party/libwebrtc/pc/audio_rtp_receiver.cc +++ b/third_party/libwebrtc/pc/audio_rtp_receiver.cc @@ -28,7 +28,7 @@ AudioRtpReceiver::AudioRtpReceiver( std::string receiver_id, std::vector stream_ids, bool is_unified_plan, - cricket::VoiceMediaChannel* voice_channel /*= nullptr*/) + cricket::VoiceMediaReceiveChannelInterface* voice_channel /*= nullptr*/) : AudioRtpReceiver(worker_thread, receiver_id, CreateStreamsFromIds(std::move(stream_ids)), @@ -40,7 +40,7 @@ AudioRtpReceiver::AudioRtpReceiver( const std::string& receiver_id, const std::vector>& streams, bool is_unified_plan, - cricket::VoiceMediaChannel* voice_channel /*= nullptr*/) + cricket::VoiceMediaReceiveChannelInterface* voice_channel /*= nullptr*/) : worker_thread_(worker_thread), id_(receiver_id), source_(rtc::make_ref_counted( @@ -314,7 +314,8 @@ void AudioRtpReceiver::SetJitterBufferMinimumDelay( media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); } -void AudioRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { +void AudioRtpReceiver::SetMediaChannel( + cricket::MediaReceiveChannelInterface* media_channel) { RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); @@ -323,7 +324,8 @@ void AudioRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { media_channel ? worker_thread_safety_->SetAlive() : worker_thread_safety_->SetNotAlive(); - media_channel_ = static_cast(media_channel); + media_channel_ = + static_cast(media_channel); } void AudioRtpReceiver::NotifyFirstPacketReceived() { diff --git a/third_party/libwebrtc/pc/audio_rtp_receiver.h b/third_party/libwebrtc/pc/audio_rtp_receiver.h index c68315882caf..2e0f77c85c52 100644 --- a/third_party/libwebrtc/pc/audio_rtp_receiver.h +++ b/third_party/libwebrtc/pc/audio_rtp_receiver.h @@ -50,18 +50,19 @@ class AudioRtpReceiver : public ObserverInterface, // However, when using that, the assumption is that right after construction, // a call to either `SetupUnsignaledMediaChannel` or `SetupMediaChannel` // will be made, which will internally start the source on the worker thread. - AudioRtpReceiver(rtc::Thread* worker_thread, - std::string receiver_id, - std::vector stream_ids, - bool is_unified_plan, - cricket::VoiceMediaChannel* voice_channel = nullptr); + AudioRtpReceiver( + rtc::Thread* worker_thread, + std::string receiver_id, + std::vector stream_ids, + bool is_unified_plan, + cricket::VoiceMediaReceiveChannelInterface* voice_channel = nullptr); // TODO(https://crbug.com/webrtc/9480): Remove this when streams() is removed. AudioRtpReceiver( rtc::Thread* worker_thread, const std::string& receiver_id, const std::vector>& streams, bool is_unified_plan, - cricket::VoiceMediaChannel* media_channel = nullptr); + cricket::VoiceMediaReceiveChannelInterface* media_channel = nullptr); virtual ~AudioRtpReceiver(); // ObserverInterface implementation @@ -111,7 +112,8 @@ class AudioRtpReceiver : public ObserverInterface, void SetJitterBufferMinimumDelay( absl::optional delay_seconds) override; - void SetMediaChannel(cricket::MediaChannel* media_channel) override; + void SetMediaChannel( + cricket::MediaReceiveChannelInterface* media_channel) override; std::vector GetSources() const override; int AttachmentId() const override { return attachment_id_; } @@ -134,8 +136,8 @@ class AudioRtpReceiver : public ObserverInterface, const std::string id_; const rtc::scoped_refptr source_; const rtc::scoped_refptr> track_; - cricket::VoiceMediaChannel* media_channel_ RTC_GUARDED_BY(worker_thread_) = - nullptr; + cricket::VoiceMediaReceiveChannelInterface* media_channel_ + RTC_GUARDED_BY(worker_thread_) = nullptr; absl::optional ssrc_ RTC_GUARDED_BY(worker_thread_); std::vector> streams_ RTC_GUARDED_BY(&signaling_thread_checker_); diff --git a/third_party/libwebrtc/pc/audio_rtp_receiver_unittest.cc b/third_party/libwebrtc/pc/audio_rtp_receiver_unittest.cc index bab6b74f9f0a..eb77212b2a7c 100644 --- a/third_party/libwebrtc/pc/audio_rtp_receiver_unittest.cc +++ b/third_party/libwebrtc/pc/audio_rtp_receiver_unittest.cc @@ -66,7 +66,7 @@ TEST_F(AudioRtpReceiverTest, SetOutputVolumeIsCalled) { receiver_->track(); receiver_->track()->set_enabled(true); - receiver_->SetMediaChannel(&media_channel_); + receiver_->SetMediaChannel(media_channel_.AsVoiceReceiveChannel()); EXPECT_CALL(media_channel_, SetDefaultRawAudioSink(_)).Times(0); receiver_->SetupMediaChannel(kSsrc); @@ -86,7 +86,7 @@ TEST_F(AudioRtpReceiverTest, VolumesSetBeforeStartingAreRespected) { receiver_->OnSetVolume(kVolume); receiver_->track()->set_enabled(true); - receiver_->SetMediaChannel(&media_channel_); + receiver_->SetMediaChannel(media_channel_.AsVoiceReceiveChannel()); // The previosly set initial volume should be propagated to the provided // media_channel_ as soon as SetupMediaChannel is called. diff --git a/third_party/libwebrtc/pc/channel.cc b/third_party/libwebrtc/pc/channel.cc index 0e2345b2c94f..4c078def17bb 100644 --- a/third_party/libwebrtc/pc/channel.cc +++ b/third_party/libwebrtc/pc/channel.cc @@ -154,7 +154,7 @@ std::string BaseChannel::ToString() const { bool BaseChannel::ConnectToRtpTransport_n() { RTC_DCHECK(rtp_transport_); - RTC_DCHECK(media_channel()); + RTC_DCHECK(media_send_channel()); // We don't need to call OnDemuxerCriteriaUpdatePending/Complete because // there's no previous criteria to worry about. @@ -174,7 +174,7 @@ bool BaseChannel::ConnectToRtpTransport_n() { void BaseChannel::DisconnectFromRtpTransport_n() { RTC_DCHECK(rtp_transport_); - RTC_DCHECK(media_channel()); + RTC_DCHECK(media_send_channel()); rtp_transport_->UnregisterRtpDemuxerSink(this); rtp_transport_->SignalReadyToSend.disconnect(this); rtp_transport_->SignalNetworkRouteChanged.disconnect(this); @@ -458,7 +458,7 @@ bool BaseChannel::MaybeUpdateDemuxerAndRtpExtensions_w( // TODO(bugs.webrtc.org/13536): See if we can do this asynchronously. if (update_demuxer) - media_channel()->OnDemuxerCriteriaUpdatePending(); + media_receive_channel()->OnDemuxerCriteriaUpdatePending(); bool success = network_thread()->BlockingCall([&]() mutable { RTC_DCHECK_RUN_ON(network_thread()); @@ -481,7 +481,7 @@ bool BaseChannel::MaybeUpdateDemuxerAndRtpExtensions_w( }); if (update_demuxer) - media_channel()->OnDemuxerCriteriaUpdateComplete(); + media_receive_channel()->OnDemuxerCriteriaUpdateComplete(); return success; } @@ -584,7 +584,7 @@ bool BaseChannel::SetPayloadTypeDemuxingEnabled_w(bool enabled) { // were matched to this channel by MID or RID. Ideally we'd remove only the // streams that were matched based on payload type alone, but currently // there is no straightforward way to identify those streams. - media_channel()->ResetUnsignaledRecvStream(); + media_receive_channel()->ResetUnsignaledRecvStream(); if (!demuxer_criteria_.payload_types().empty()) { config_changed = true; demuxer_criteria_.payload_types().clear(); @@ -629,7 +629,7 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, GetStream(streams, StreamFinder(&old_stream))) { continue; } - if (!media_channel()->RemoveSendStream(old_stream.first_ssrc())) { + if (!media_send_channel()->RemoveSendStream(old_stream.first_ssrc())) { error_desc = StringFormat( "Failed to remove send stream with ssrc %u from m-section with " "mid='%s'.", @@ -672,7 +672,7 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, /* flex_fec = */ false, ssrc_generator_); } - if (media_channel()->AddSendStream(new_stream)) { + if (media_send_channel()->AddSendStream(new_stream)) { RTC_LOG(LS_INFO) << "Add send stream ssrc: " << new_stream.ssrcs[0] << " into " << ToString(); } else { @@ -709,12 +709,12 @@ bool BaseChannel::UpdateRemoteStreams_w(const MediaContentDescription* content, // If we no longer have an unsignaled stream, we would like to remove // the unsignaled stream params that are cached. if (!old_stream.has_ssrcs() && !new_has_unsignaled_ssrcs) { - media_channel()->ResetUnsignaledRecvStream(); + media_receive_channel()->ResetUnsignaledRecvStream(); RTC_LOG(LS_INFO) << "Reset unsignaled remote stream for " << ToString() << "."; } else if (old_stream.has_ssrcs() && !GetStreamBySsrc(streams, old_stream.first_ssrc())) { - if (media_channel()->RemoveRecvStream(old_stream.first_ssrc())) { + if (media_receive_channel()->RemoveRecvStream(old_stream.first_ssrc())) { RTC_LOG(LS_INFO) << "Remove remote ssrc: " << old_stream.first_ssrc() << " from " << ToString() << "."; } else { @@ -735,7 +735,7 @@ bool BaseChannel::UpdateRemoteStreams_w(const MediaContentDescription* content, // stream received later. if ((!new_stream.has_ssrcs() && !old_has_unsignaled_ssrcs) || !GetStreamBySsrc(remote_streams_, new_stream.first_ssrc())) { - if (media_channel()->AddRecvStream(new_stream)) { + if (media_receive_channel()->AddRecvStream(new_stream)) { RTC_LOG(LS_INFO) << "Add remote ssrc: " << (new_stream.has_ssrcs() ? std::to_string(new_stream.first_ssrc()) @@ -808,7 +808,7 @@ bool BaseChannel::ClearHandledPayloadTypes() { void BaseChannel::SignalSentPacket_n(const rtc::SentPacket& sent_packet) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(network_initialized()); - media_channel()->OnPacketSent(sent_packet); + media_send_channel()->OnPacketSent(sent_packet); } VoiceChannel::VoiceChannel(rtc::Thread* worker_thread, @@ -839,12 +839,12 @@ void VoiceChannel::UpdateMediaSendRecvState_w() { // content. We receive data on the default channel and multiplexed streams. bool ready_to_receive = enabled() && webrtc::RtpTransceiverDirectionHasRecv( local_content_direction()); - media_channel()->SetPlayout(ready_to_receive); + media_receive_channel()->SetPlayout(ready_to_receive); // Send outgoing data if we're the active call, we have the remote content, // and we have had some form of connectivity. bool send = IsReadyToSendMedia_w(); - media_channel()->SetSend(send); + media_send_channel()->SetSend(send); RTC_LOG(LS_INFO) << "Changing voice state, recv=" << ready_to_receive << " send=" << send << " for " << ToString(); @@ -861,7 +861,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, RtpHeaderExtensions header_extensions = GetDeduplicatedRtpHeaderExtensions(content->rtp_header_extensions()); bool update_header_extensions = true; - media_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); + media_send_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); AudioRecvParameters recv_params = last_recv_params_; RtpParametersFromMediaDescription( @@ -869,7 +869,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, webrtc::RtpTransceiverDirectionHasRecv(content->direction()), &recv_params); - if (!media_channel()->SetRecvParameters(recv_params)) { + if (!media_receive_channel()->SetRecvParameters(recv_params)) { error_desc = StringFormat( "Failed to set local audio description recv parameters for m-section " "with mid='%s'.", @@ -921,7 +921,8 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content, extensions_filter(), &send_params); send_params.mid = mid(); - bool parameters_applied = media_channel()->SetSendParameters(send_params); + bool parameters_applied = + media_send_channel()->SetSendParameters(send_params); if (!parameters_applied) { error_desc = StringFormat( "Failed to set remote audio description send parameters for m-section " @@ -961,7 +962,7 @@ void VideoChannel::UpdateMediaSendRecvState_w() { // Send outgoing data if we're the active call, we have the remote content, // and we have had some form of connectivity. bool send = IsReadyToSendMedia_w(); - media_channel()->SetSend(send); + media_send_channel()->SetSend(send); RTC_LOG(LS_INFO) << "Changing video state, send=" << send << " for " << ToString(); } @@ -977,7 +978,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, RtpHeaderExtensions header_extensions = GetDeduplicatedRtpHeaderExtensions(content->rtp_header_extensions()); bool update_header_extensions = true; - media_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); + media_send_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); VideoRecvParameters recv_params = last_recv_params_; @@ -1007,7 +1008,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, } } - if (!media_channel()->SetRecvParameters(recv_params)) { + if (!media_receive_channel()->SetRecvParameters(recv_params)) { error_desc = StringFormat( "Failed to set local video description recv parameters for m-section " "with mid='%s'.", @@ -1026,7 +1027,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, last_recv_params_ = recv_params; if (needs_send_params_update) { - if (!media_channel()->SetSendParameters(send_params)) { + if (!media_send_channel()->SetSendParameters(send_params)) { error_desc = StringFormat( "Failed to set send parameters for m-section with mid='%s'.", mid().c_str()); @@ -1092,7 +1093,7 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, } } - if (!media_channel()->SetSendParameters(send_params)) { + if (!media_send_channel()->SetSendParameters(send_params)) { error_desc = StringFormat( "Failed to set remote video description send parameters for m-section " "with mid='%s'.", @@ -1102,7 +1103,7 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, last_send_params_ = send_params; if (needs_recv_params_update) { - if (!media_channel()->SetRecvParameters(recv_params)) { + if (!media_receive_channel()->SetRecvParameters(recv_params)) { error_desc = StringFormat( "Failed to set recv parameters for m-section with mid='%s'.", mid().c_str()); diff --git a/third_party/libwebrtc/pc/channel.h b/third_party/libwebrtc/pc/channel.h index 5bf4823be25e..08a66793c7ee 100644 --- a/third_party/libwebrtc/pc/channel.h +++ b/third_party/libwebrtc/pc/channel.h @@ -32,6 +32,7 @@ #include "call/rtp_demuxer.h" #include "call/rtp_packet_sink_interface.h" #include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" #include "media/base/stream_params.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "pc/channel_interface.h" @@ -70,7 +71,7 @@ class BaseChannel : public ChannelInterface, public sigslot::has_slots<>, // TODO(tommi): Consider implementing these interfaces // via composition. - public MediaChannel::NetworkInterface, + public MediaChannelNetworkInterface, public webrtc::RtpPacketSinkInterface { public: // If `srtp_required` is true, the channel will not send or receive any @@ -155,14 +156,29 @@ class BaseChannel : public ChannelInterface, // RtpPacketSinkInterface overrides. void OnRtpPacket(const webrtc::RtpPacketReceived& packet) override; - MediaChannel* media_channel() const override { - return media_channel_.get(); + MediaChannel* media_channel() const override { return media_channel_.get(); } + + MediaSendChannelInterface* media_send_channel() const override { + return media_channel_->AsSendChannel(); } - VideoMediaChannel* video_media_channel() const override { + VideoMediaSendChannelInterface* video_media_send_channel() const override { RTC_CHECK(false) << "Attempt to fetch video channel from non-video"; return nullptr; } - VoiceMediaChannel* voice_media_channel() const override { + VoiceMediaSendChannelInterface* voice_media_send_channel() const override { + RTC_CHECK(false) << "Attempt to fetch voice channel from non-voice"; + return nullptr; + } + MediaReceiveChannelInterface* media_receive_channel() const override { + return media_channel_->AsReceiveChannel(); + } + VideoMediaReceiveChannelInterface* video_media_receive_channel() + const override { + RTC_CHECK(false) << "Attempt to fetch video channel from non-video"; + return nullptr; + } + VoiceMediaReceiveChannelInterface* voice_media_receive_channel() + const override { RTC_CHECK(false) << "Attempt to fetch voice channel from non-voice"; return nullptr; } @@ -368,12 +384,22 @@ class VoiceChannel : public BaseChannel { ~VoiceChannel(); // downcasts a MediaChannel - VoiceMediaChannel* media_channel() const override { - return static_cast(BaseChannel::media_channel()); + VoiceMediaSendChannelInterface* media_send_channel() const override { + return media_channel()->AsVoiceChannel()->AsVoiceSendChannel(); } - VoiceMediaChannel* voice_media_channel() const override { - return static_cast(media_channel()); + VoiceMediaSendChannelInterface* voice_media_send_channel() const override { + return media_send_channel(); + } + + // downcasts a MediaChannel + VoiceMediaReceiveChannelInterface* media_receive_channel() const override { + return media_channel()->AsVoiceChannel()->AsVoiceReceiveChannel(); + } + + VoiceMediaReceiveChannelInterface* voice_media_receive_channel() + const override { + return media_receive_channel(); } cricket::MediaType media_type() const override { @@ -414,12 +440,22 @@ class VideoChannel : public BaseChannel { ~VideoChannel(); // downcasts a MediaChannel - VideoMediaChannel* media_channel() const override { - return static_cast(BaseChannel::media_channel()); + VideoMediaSendChannelInterface* media_send_channel() const override { + return media_channel()->AsVideoChannel()->AsVideoSendChannel(); } - VideoMediaChannel* video_media_channel() const override { - return static_cast(media_channel()); + VideoMediaSendChannelInterface* video_media_send_channel() const override { + return media_send_channel(); + } + + // downcasts a MediaChannel + VideoMediaReceiveChannelInterface* media_receive_channel() const override { + return media_channel()->AsVideoChannel()->AsVideoReceiveChannel(); + } + + VideoMediaReceiveChannelInterface* video_media_receive_channel() + const override { + return media_receive_channel(); } cricket::MediaType media_type() const override { diff --git a/third_party/libwebrtc/pc/channel_interface.h b/third_party/libwebrtc/pc/channel_interface.h index 3c6ca6fe6aca..445712b41fa5 100644 --- a/third_party/libwebrtc/pc/channel_interface.h +++ b/third_party/libwebrtc/pc/channel_interface.h @@ -28,6 +28,7 @@ class VideoBitrateAllocatorFactory; namespace cricket { +class MediaChannel; class MediaContentDescription; struct MediaConfig; @@ -47,11 +48,20 @@ class ChannelInterface { virtual ~ChannelInterface() = default; virtual cricket::MediaType media_type() const = 0; + // Temporary fix while MediaChannel is being reconstructed virtual MediaChannel* media_channel() const = 0; + virtual MediaSendChannelInterface* media_send_channel() const = 0; // Typecasts of media_channel(). Will cause an exception if the // channel is of the wrong type. - virtual VideoMediaChannel* video_media_channel() const = 0; - virtual VoiceMediaChannel* voice_media_channel() const = 0; + virtual VideoMediaSendChannelInterface* video_media_send_channel() const = 0; + virtual VoiceMediaSendChannelInterface* voice_media_send_channel() const = 0; + virtual MediaReceiveChannelInterface* media_receive_channel() const = 0; + // Typecasts of media_channel(). Will cause an exception if the + // channel is of the wrong type. + virtual VideoMediaReceiveChannelInterface* video_media_receive_channel() + const = 0; + virtual VoiceMediaReceiveChannelInterface* voice_media_receive_channel() + const = 0; // Returns a string view for the transport name. Fetching the transport name // must be done on the network thread only and note that the lifetime of diff --git a/third_party/libwebrtc/pc/channel_unittest.cc b/third_party/libwebrtc/pc/channel_unittest.cc index 2dd5d090bfd2..583c2923dfa3 100644 --- a/third_party/libwebrtc/pc/channel_unittest.cc +++ b/third_party/libwebrtc/pc/channel_unittest.cc @@ -429,7 +429,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { } void SendRtp1(rtc::Buffer data) { - SendRtp(media_channel1(), std::move(data)); + SendRtp(media_send_channel1(), std::move(data)); } void SendRtp2() { @@ -449,7 +449,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { } bool CheckRtp1() { - return media_channel1()->CheckRtp(rtp_packet_.data(), rtp_packet_.size()); + return media_send_channel1()->CheckRtp(rtp_packet_.data(), + rtp_packet_.size()); } bool CheckRtp2() { return media_channel2()->CheckRtp(rtp_packet_.data(), rtp_packet_.size()); @@ -457,7 +458,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { // Methods to check custom data. bool CheckCustomRtp1(uint32_t ssrc, int sequence_number, int pl_type = -1) { rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type); - return media_channel1()->CheckRtp(data.data(), data.size()); + return media_send_channel1()->CheckRtp(data.data(), data.size()); } bool CheckCustomRtp2(uint32_t ssrc, int sequence_number, int pl_type = -1) { rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type); @@ -474,7 +475,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { return data; } - bool CheckNoRtp1() { return media_channel1()->CheckNoRtp(); } + bool CheckNoRtp1() { return media_send_channel1()->CheckNoRtp(); } bool CheckNoRtp2() { return media_channel2()->CheckNoRtp(); } void CreateContent(int flags, @@ -557,13 +558,13 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { void TestInit() { CreateChannels(0, 0); EXPECT_FALSE(IsSrtpActive(channel1_)); - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); if (verify_playout_) { - EXPECT_FALSE(media_channel1()->playout()); + EXPECT_FALSE(media_send_channel1()->playout()); } - EXPECT_TRUE(media_channel1()->codecs().empty()); - EXPECT_TRUE(media_channel1()->recv_streams().empty()); - EXPECT_TRUE(media_channel1()->rtp_packets().empty()); + EXPECT_TRUE(media_send_channel1()->codecs().empty()); + EXPECT_TRUE(media_send_channel1()->recv_streams().empty()); + EXPECT_TRUE(media_send_channel1()->rtp_packets().empty()); } // Test that SetLocalContent and SetRemoteContent properly configure @@ -574,11 +575,11 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateContent(0, kPcmuCodec, kH264Codec, &content); std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, err)); - EXPECT_EQ(0U, media_channel1()->codecs().size()); + EXPECT_EQ(0U, media_send_channel1()->codecs().size()); EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err)); - ASSERT_EQ(1U, media_channel1()->codecs().size()); + ASSERT_EQ(1U, media_send_channel1()->codecs().size()); EXPECT_TRUE( - CodecMatches(content.codecs()[0], media_channel1()->codecs()[0])); + CodecMatches(content.codecs()[0], media_send_channel1()->codecs()[0])); } // Test that SetLocalContent and SetRemoteContent properly configure @@ -596,7 +597,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, err)); content.set_extmap_allow_mixed_enum(answer_enum); EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err)); - EXPECT_EQ(answer, media_channel1()->ExtmapAllowMixed()); + EXPECT_EQ(answer, media_send_channel1()->ExtmapAllowMixed()); } void TestSetContentsExtmapAllowMixedCallee(bool offer, bool answer) { // For a callee, SetRemoteContent() is called first with an offer and next @@ -611,7 +612,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kOffer, err)); content.set_extmap_allow_mixed_enum(answer_enum); EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kAnswer, err)); - EXPECT_EQ(answer, media_channel1()->ExtmapAllowMixed()); + EXPECT_EQ(answer, media_send_channel1()->ExtmapAllowMixed()); } // Test that SetLocalContent and SetRemoteContent properly deals @@ -622,11 +623,11 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, err)); CreateContent(0, kPcmuCodec, kH264Codec, &content); - EXPECT_EQ(0U, media_channel1()->codecs().size()); + EXPECT_EQ(0U, media_send_channel1()->codecs().size()); EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err)); - ASSERT_EQ(1U, media_channel1()->codecs().size()); + ASSERT_EQ(1U, media_send_channel1()->codecs().size()); EXPECT_TRUE( - CodecMatches(content.codecs()[0], media_channel1()->codecs()[0])); + CodecMatches(content.codecs()[0], media_send_channel1()->codecs()[0])); } // Test that SetLocalContent and SetRemoteContent properly set RTCP @@ -668,7 +669,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&content1, SdpType::kOffer, err)); channel1_->Enable(true); - EXPECT_EQ(1u, media_channel1()->send_streams().size()); + EXPECT_EQ(1u, media_send_channel1()->send_streams().size()); EXPECT_TRUE(channel2_->SetRemoteContent(&content1, SdpType::kOffer, err)); EXPECT_EQ(1u, media_channel2()->recv_streams().size()); @@ -678,7 +679,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { typename T::Content content2; CreateContent(0, kPcmuCodec, kH264Codec, &content2); EXPECT_TRUE(channel1_->SetRemoteContent(&content2, SdpType::kAnswer, err)); - EXPECT_EQ(0u, media_channel1()->recv_streams().size()); + EXPECT_EQ(0u, media_send_channel1()->recv_streams().size()); EXPECT_TRUE(channel2_->SetLocalContent(&content2, SdpType::kAnswer, err)); channel2_->Enable(true); EXPECT_EQ(0u, media_channel2()->send_streams().size()); @@ -696,14 +697,14 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_EQ(stream2, media_channel2()->send_streams()[0]); EXPECT_TRUE(channel1_->SetRemoteContent(&content3, SdpType::kOffer, err)); - ASSERT_EQ(1u, media_channel1()->recv_streams().size()); - EXPECT_EQ(stream2, media_channel1()->recv_streams()[0]); + ASSERT_EQ(1u, media_send_channel1()->recv_streams().size()); + EXPECT_EQ(stream2, media_send_channel1()->recv_streams()[0]); // Channel 1 replies but stop sending stream1. typename T::Content content4; CreateContent(0, kPcmuCodec, kH264Codec, &content4); EXPECT_TRUE(channel1_->SetLocalContent(&content4, SdpType::kAnswer, err)); - EXPECT_EQ(0u, media_channel1()->send_streams().size()); + EXPECT_EQ(0u, media_send_channel1()->send_streams().size()); EXPECT_TRUE(channel2_->SetRemoteContent(&content4, SdpType::kAnswer, err)); EXPECT_EQ(0u, media_channel2()->recv_streams().size()); @@ -717,9 +718,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { void TestPlayoutAndSendingStates() { CreateChannels(0, 0); if (verify_playout_) { - EXPECT_FALSE(media_channel1()->playout()); + EXPECT_FALSE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); if (verify_playout_) { EXPECT_FALSE(media_channel2()->playout()); } @@ -727,16 +728,16 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { channel1_->Enable(true); FlushCurrentThread(); if (verify_playout_) { - EXPECT_FALSE(media_channel1()->playout()); + EXPECT_FALSE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&local_media_content1_, SdpType::kOffer, err)); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); EXPECT_TRUE(channel2_->SetRemoteContent(&local_media_content1_, SdpType::kOffer, err)); if (verify_playout_) { @@ -751,9 +752,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_FALSE(media_channel2()->sending()); ConnectFakeTransports(); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); if (verify_playout_) { EXPECT_FALSE(media_channel2()->playout()); } @@ -767,9 +768,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_TRUE(channel1_->SetRemoteContent(&local_media_content2_, SdpType::kAnswer, err)); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_TRUE(media_channel1()->sending()); + EXPECT_TRUE(media_send_channel1()->sending()); } // Test that changing the MediaContentDirection in the local and remote @@ -787,9 +788,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { channel2_->Enable(true); FlushCurrentThread(); if (verify_playout_) { - EXPECT_FALSE(media_channel1()->playout()); + EXPECT_FALSE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); if (verify_playout_) { EXPECT_FALSE(media_channel2()->playout()); } @@ -804,9 +805,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { ConnectFakeTransports(); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); // remote InActive + EXPECT_FALSE(media_send_channel1()->sending()); // remote InActive if (verify_playout_) { EXPECT_FALSE(media_channel2()->playout()); // local InActive } @@ -819,9 +820,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { channel1_->SetRemoteContent(&content2, SdpType::kPrAnswer, err)); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_TRUE(media_channel1()->sending()); + EXPECT_TRUE(media_send_channel1()->sending()); if (verify_playout_) { EXPECT_TRUE(media_channel2()->playout()); // local RecvOnly } @@ -833,9 +834,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_TRUE(channel1_->SetRemoteContent(&content2, SdpType::kAnswer, err)); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_TRUE(media_channel1()->sending()); + EXPECT_TRUE(media_send_channel1()->sending()); if (verify_playout_) { EXPECT_TRUE(media_channel2()->playout()); } @@ -855,15 +856,15 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateChannels(DTLS, DTLS); SendInitiate(); - typename T::MediaChannel* media_channel1 = - static_cast(channel1_->media_channel()); - ASSERT_TRUE(media_channel1); + typename T::MediaChannel* media_send_channel1 = + static_cast(channel1_->media_send_channel()); + ASSERT_TRUE(media_send_channel1); // Need to wait for the threads before calling // `set_num_network_route_changes` because the network route would be set // when creating the channel. WaitForThreads(); - media_channel1->set_num_network_route_changes(0); + media_send_channel1->set_num_network_route_changes(0); SendTask(network_thread_, [this] { rtc::NetworkRoute network_route; // The transport channel becomes disconnected. @@ -871,9 +872,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { absl::optional(network_route)); }); WaitForThreads(); - EXPECT_EQ(1, media_channel1->num_network_route_changes()); - EXPECT_FALSE(media_channel1->last_network_route().connected); - media_channel1->set_num_network_route_changes(0); + EXPECT_EQ(1, media_send_channel1->num_network_route_changes()); + EXPECT_FALSE(media_send_channel1->last_network_route().connected); + media_send_channel1->set_num_network_route_changes(0); SendTask(network_thread_, [this] { rtc::NetworkRoute network_route; @@ -890,16 +891,16 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { absl::optional(network_route)); }); WaitForThreads(); - EXPECT_EQ(1, media_channel1->num_network_route_changes()); - EXPECT_TRUE(media_channel1->last_network_route().connected); + EXPECT_EQ(1, media_send_channel1->num_network_route_changes()); + EXPECT_TRUE(media_send_channel1->last_network_route().connected); EXPECT_EQ(kLocalNetId, - media_channel1->last_network_route().local.network_id()); + media_send_channel1->last_network_route().local.network_id()); EXPECT_EQ(kRemoteNetId, - media_channel1->last_network_route().remote.network_id()); + media_send_channel1->last_network_route().remote.network_id()); EXPECT_EQ(kLastPacketId, - media_channel1->last_network_route().last_sent_packet_id); + media_send_channel1->last_network_route().last_sent_packet_id); EXPECT_EQ(kTransportOverheadPerPacket + kSrtpOverheadPerPacket, - media_channel1->transport_overhead_per_packet()); + media_send_channel1->transport_overhead_per_packet()); } // Test setting up a call. @@ -908,13 +909,13 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_FALSE(IsSrtpActive(channel1_)); EXPECT_TRUE(SendInitiate()); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); EXPECT_TRUE(SendAccept()); EXPECT_FALSE(IsSrtpActive(channel1_)); - EXPECT_TRUE(media_channel1()->sending()); - EXPECT_EQ(1U, media_channel1()->codecs().size()); + EXPECT_TRUE(media_send_channel1()->sending()); + EXPECT_EQ(1U, media_send_channel1()->codecs().size()); if (verify_playout_) { EXPECT_TRUE(media_channel2()->playout()); } @@ -1046,7 +1047,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { // Regain writability SendTask(network_thread_, [this] { fake_rtp_dtls_transport1_->SetWritable(true); }); - EXPECT_TRUE(media_channel1()->sending()); + EXPECT_TRUE(media_send_channel1()->sending()); SendRtp1(); SendRtp2(); WaitForThreads(); @@ -1060,7 +1061,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { bool asymmetric = true; fake_rtp_dtls_transport1_->SetDestination(nullptr, asymmetric); }); - EXPECT_TRUE(media_channel1()->sending()); + EXPECT_TRUE(media_send_channel1()->sending()); // Should fail also. SendRtp1(); @@ -1076,7 +1077,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { fake_rtp_dtls_transport1_->SetDestination(fake_rtp_dtls_transport2_.get(), asymmetric); }); - EXPECT_TRUE(media_channel1()->sending()); + EXPECT_TRUE(media_send_channel1()->sending()); SendRtp1(); SendRtp2(); WaitForThreads(); @@ -1129,17 +1130,17 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { std::unique_ptr content( CreateMediaContentWithStream(1)); - media_channel1()->set_fail_set_recv_codecs(true); + media_send_channel1()->set_fail_set_recv_codecs(true); EXPECT_FALSE( channel1_->SetLocalContent(content.get(), SdpType::kOffer, err)); EXPECT_FALSE( channel1_->SetLocalContent(content.get(), SdpType::kAnswer, err)); - media_channel1()->set_fail_set_send_codecs(true); + media_send_channel1()->set_fail_set_send_codecs(true); EXPECT_FALSE( channel1_->SetRemoteContent(content.get(), SdpType::kOffer, err)); - media_channel1()->set_fail_set_send_codecs(true); + media_send_channel1()->set_fail_set_send_codecs(true); EXPECT_FALSE( channel1_->SetRemoteContent(content.get(), SdpType::kAnswer, err)); } @@ -1152,14 +1153,14 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateMediaContentWithStream(1)); EXPECT_TRUE( channel1_->SetLocalContent(content1.get(), SdpType::kOffer, err)); - EXPECT_TRUE(media_channel1()->HasSendStream(1)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(1)); std::unique_ptr content2( CreateMediaContentWithStream(2)); EXPECT_TRUE( channel1_->SetLocalContent(content2.get(), SdpType::kOffer, err)); - EXPECT_FALSE(media_channel1()->HasSendStream(1)); - EXPECT_TRUE(media_channel1()->HasSendStream(2)); + EXPECT_FALSE(media_send_channel1()->HasSendStream(1)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(2)); } void TestReceiveTwoOffers() { @@ -1170,14 +1171,14 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateMediaContentWithStream(1)); EXPECT_TRUE( channel1_->SetRemoteContent(content1.get(), SdpType::kOffer, err)); - EXPECT_TRUE(media_channel1()->HasRecvStream(1)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(1)); std::unique_ptr content2( CreateMediaContentWithStream(2)); EXPECT_TRUE( channel1_->SetRemoteContent(content2.get(), SdpType::kOffer, err)); - EXPECT_FALSE(media_channel1()->HasRecvStream(1)); - EXPECT_TRUE(media_channel1()->HasRecvStream(2)); + EXPECT_FALSE(media_send_channel1()->HasRecvStream(1)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(2)); } void TestSendPrAnswer() { @@ -1189,24 +1190,24 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateMediaContentWithStream(1)); EXPECT_TRUE( channel1_->SetRemoteContent(content1.get(), SdpType::kOffer, err)); - EXPECT_TRUE(media_channel1()->HasRecvStream(1)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(1)); // Send PR answer std::unique_ptr content2( CreateMediaContentWithStream(2)); EXPECT_TRUE( channel1_->SetLocalContent(content2.get(), SdpType::kPrAnswer, err)); - EXPECT_TRUE(media_channel1()->HasRecvStream(1)); - EXPECT_TRUE(media_channel1()->HasSendStream(2)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(1)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(2)); // Send answer std::unique_ptr content3( CreateMediaContentWithStream(3)); EXPECT_TRUE( channel1_->SetLocalContent(content3.get(), SdpType::kAnswer, err)); - EXPECT_TRUE(media_channel1()->HasRecvStream(1)); - EXPECT_FALSE(media_channel1()->HasSendStream(2)); - EXPECT_TRUE(media_channel1()->HasSendStream(3)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(1)); + EXPECT_FALSE(media_send_channel1()->HasSendStream(2)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(3)); } void TestReceivePrAnswer() { @@ -1218,39 +1219,39 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateMediaContentWithStream(1)); EXPECT_TRUE( channel1_->SetLocalContent(content1.get(), SdpType::kOffer, err)); - EXPECT_TRUE(media_channel1()->HasSendStream(1)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(1)); // Receive PR answer std::unique_ptr content2( CreateMediaContentWithStream(2)); EXPECT_TRUE( channel1_->SetRemoteContent(content2.get(), SdpType::kPrAnswer, err)); - EXPECT_TRUE(media_channel1()->HasSendStream(1)); - EXPECT_TRUE(media_channel1()->HasRecvStream(2)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(1)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(2)); // Receive answer std::unique_ptr content3( CreateMediaContentWithStream(3)); EXPECT_TRUE( channel1_->SetRemoteContent(content3.get(), SdpType::kAnswer, err)); - EXPECT_TRUE(media_channel1()->HasSendStream(1)); - EXPECT_FALSE(media_channel1()->HasRecvStream(2)); - EXPECT_TRUE(media_channel1()->HasRecvStream(3)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(1)); + EXPECT_FALSE(media_send_channel1()->HasRecvStream(2)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(3)); } void TestOnTransportReadyToSend() { CreateChannels(0, 0); - EXPECT_FALSE(media_channel1()->ready_to_send()); + EXPECT_FALSE(media_send_channel1()->ready_to_send()); network_thread_->PostTask( [this] { channel1_->OnTransportReadyToSend(true); }); WaitForThreads(); - EXPECT_TRUE(media_channel1()->ready_to_send()); + EXPECT_TRUE(media_send_channel1()->ready_to_send()); network_thread_->PostTask( [this] { channel1_->OnTransportReadyToSend(false); }); WaitForThreads(); - EXPECT_FALSE(media_channel1()->ready_to_send()); + EXPECT_FALSE(media_send_channel1()->ready_to_send()); } bool SetRemoteContentWithBitrateLimit(int remote_limit) { @@ -1279,8 +1280,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&local_media_content1_, SdpType::kOffer, err)); - EXPECT_EQ(media_channel1()->max_bps(), -1); - VerifyMaxBitrate(media_channel1()->GetRtpSendParameters(kSsrc1), + EXPECT_EQ(media_send_channel1()->max_bps(), -1); + VerifyMaxBitrate(media_send_channel1()->GetRtpSendParameters(kSsrc1), absl::nullopt); } @@ -1397,16 +1398,18 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { ProcessThreadQueue(rtc::Thread::Current()); } - typename T::MediaChannel* media_channel1() { + typename T::MediaChannel* media_send_channel1() { RTC_DCHECK(channel1_); - RTC_DCHECK(channel1_->media_channel()); - return static_cast(channel1_->media_channel()); + RTC_DCHECK(channel1_->media_send_channel()); + return static_cast( + channel1_->media_send_channel()); } typename T::MediaChannel* media_channel2() { RTC_DCHECK(channel2_); - RTC_DCHECK(channel2_->media_channel()); - return static_cast(channel2_->media_channel()); + RTC_DCHECK(channel2_->media_send_channel()); + return static_cast( + channel2_->media_send_channel()); } rtc::AutoThread main_thread_; @@ -1595,8 +1598,8 @@ class VideoChannelDoubleThreadTest : public ChannelTest { TEST_F(VoiceChannelSingleThreadTest, TestInit) { Base::TestInit(); - EXPECT_FALSE(media_channel1()->IsStreamMuted(0)); - EXPECT_TRUE(media_channel1()->dtmf_info_queue().empty()); + EXPECT_FALSE(media_send_channel1()->IsStreamMuted(0)); + EXPECT_TRUE(media_send_channel1()->dtmf_info_queue().empty()); } TEST_F(VoiceChannelSingleThreadTest, TestDeinit) { @@ -1732,8 +1735,8 @@ TEST_F(VoiceChannelSingleThreadTest, SocketOptionsMergedOnSetTransport) { // VoiceChannelDoubleThreadTest TEST_F(VoiceChannelDoubleThreadTest, TestInit) { Base::TestInit(); - EXPECT_FALSE(media_channel1()->IsStreamMuted(0)); - EXPECT_TRUE(media_channel1()->dtmf_info_queue().empty()); + EXPECT_FALSE(media_send_channel1()->IsStreamMuted(0)); + EXPECT_TRUE(media_send_channel1()->dtmf_info_queue().empty()); } TEST_F(VoiceChannelDoubleThreadTest, TestDeinit) { @@ -2016,14 +2019,15 @@ TEST_F(VideoChannelSingleThreadTest, TestSetLocalOfferWithPacketization) { std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&video, SdpType::kOffer, err)); - EXPECT_THAT(media_channel1()->send_codecs(), testing::IsEmpty()); - ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(2)); - EXPECT_TRUE( - media_channel1()->recv_codecs()[0].Matches(kVp8Codec, &field_trials_)); - EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, absl::nullopt); - EXPECT_TRUE( - media_channel1()->recv_codecs()[1].Matches(vp9_codec, &field_trials_)); - EXPECT_EQ(media_channel1()->recv_codecs()[1].packetization, + EXPECT_THAT(media_send_channel1()->send_codecs(), testing::IsEmpty()); + ASSERT_THAT(media_send_channel1()->recv_codecs(), testing::SizeIs(2)); + EXPECT_TRUE(media_send_channel1()->recv_codecs()[0].Matches(kVp8Codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[0].packetization, + absl::nullopt); + EXPECT_TRUE(media_send_channel1()->recv_codecs()[1].Matches(vp9_codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[1].packetization, cricket::kPacketizationParamRaw); } @@ -2039,14 +2043,15 @@ TEST_F(VideoChannelSingleThreadTest, TestSetRemoteOfferWithPacketization) { std::string err; EXPECT_TRUE(channel1_->SetRemoteContent(&video, SdpType::kOffer, err)); EXPECT_TRUE(err.empty()); - EXPECT_THAT(media_channel1()->recv_codecs(), testing::IsEmpty()); - ASSERT_THAT(media_channel1()->send_codecs(), testing::SizeIs(2)); - EXPECT_TRUE( - media_channel1()->send_codecs()[0].Matches(kVp8Codec, &field_trials_)); - EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt); - EXPECT_TRUE( - media_channel1()->send_codecs()[1].Matches(vp9_codec, &field_trials_)); - EXPECT_EQ(media_channel1()->send_codecs()[1].packetization, + EXPECT_THAT(media_send_channel1()->recv_codecs(), testing::IsEmpty()); + ASSERT_THAT(media_send_channel1()->send_codecs(), testing::SizeIs(2)); + EXPECT_TRUE(media_send_channel1()->send_codecs()[0].Matches(kVp8Codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->send_codecs()[0].packetization, + absl::nullopt); + EXPECT_TRUE(media_send_channel1()->send_codecs()[1].Matches(vp9_codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->send_codecs()[1].packetization, cricket::kPacketizationParamRaw); } @@ -2064,21 +2069,23 @@ TEST_F(VideoChannelSingleThreadTest, TestSetAnswerWithPacketization) { EXPECT_TRUE(err.empty()); EXPECT_TRUE(channel1_->SetRemoteContent(&video, SdpType::kAnswer, err)); EXPECT_TRUE(err.empty()); - ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(2)); - EXPECT_TRUE( - media_channel1()->recv_codecs()[0].Matches(kVp8Codec, &field_trials_)); - EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, absl::nullopt); - EXPECT_TRUE( - media_channel1()->recv_codecs()[1].Matches(vp9_codec, &field_trials_)); - EXPECT_EQ(media_channel1()->recv_codecs()[1].packetization, + ASSERT_THAT(media_send_channel1()->recv_codecs(), testing::SizeIs(2)); + EXPECT_TRUE(media_send_channel1()->recv_codecs()[0].Matches(kVp8Codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[0].packetization, + absl::nullopt); + EXPECT_TRUE(media_send_channel1()->recv_codecs()[1].Matches(vp9_codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[1].packetization, cricket::kPacketizationParamRaw); - EXPECT_THAT(media_channel1()->send_codecs(), testing::SizeIs(2)); - EXPECT_TRUE( - media_channel1()->send_codecs()[0].Matches(kVp8Codec, &field_trials_)); - EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt); - EXPECT_TRUE( - media_channel1()->send_codecs()[1].Matches(vp9_codec, &field_trials_)); - EXPECT_EQ(media_channel1()->send_codecs()[1].packetization, + EXPECT_THAT(media_send_channel1()->send_codecs(), testing::SizeIs(2)); + EXPECT_TRUE(media_send_channel1()->send_codecs()[0].Matches(kVp8Codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->send_codecs()[0].packetization, + absl::nullopt); + EXPECT_TRUE(media_send_channel1()->send_codecs()[1].Matches(vp9_codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->send_codecs()[1].packetization, cricket::kPacketizationParamRaw); } @@ -2096,10 +2103,12 @@ TEST_F(VideoChannelSingleThreadTest, TestSetLocalAnswerWithoutPacketization) { std::string err; EXPECT_TRUE(channel1_->SetRemoteContent(&remote_video, SdpType::kOffer, err)); EXPECT_TRUE(channel1_->SetLocalContent(&local_video, SdpType::kAnswer, err)); - ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(1)); - EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, absl::nullopt); - ASSERT_THAT(media_channel1()->send_codecs(), testing::SizeIs(1)); - EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt); + ASSERT_THAT(media_send_channel1()->recv_codecs(), testing::SizeIs(1)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[0].packetization, + absl::nullopt); + ASSERT_THAT(media_send_channel1()->send_codecs(), testing::SizeIs(1)); + EXPECT_EQ(media_send_channel1()->send_codecs()[0].packetization, + absl::nullopt); } TEST_F(VideoChannelSingleThreadTest, TestSetRemoteAnswerWithoutPacketization) { @@ -2117,10 +2126,12 @@ TEST_F(VideoChannelSingleThreadTest, TestSetRemoteAnswerWithoutPacketization) { EXPECT_TRUE(channel1_->SetLocalContent(&local_video, SdpType::kOffer, err)); EXPECT_TRUE( channel1_->SetRemoteContent(&remote_video, SdpType::kAnswer, err)); - ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(1)); - EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, absl::nullopt); - ASSERT_THAT(media_channel1()->send_codecs(), testing::SizeIs(1)); - EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt); + ASSERT_THAT(media_send_channel1()->recv_codecs(), testing::SizeIs(1)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[0].packetization, + absl::nullopt); + ASSERT_THAT(media_send_channel1()->send_codecs(), testing::SizeIs(1)); + EXPECT_EQ(media_send_channel1()->send_codecs()[0].packetization, + absl::nullopt); } TEST_F(VideoChannelSingleThreadTest, @@ -2142,10 +2153,10 @@ TEST_F(VideoChannelSingleThreadTest, EXPECT_FALSE( channel1_->SetRemoteContent(&remote_video, SdpType::kAnswer, err)); EXPECT_FALSE(err.empty()); - ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(1)); - EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, + ASSERT_THAT(media_send_channel1()->recv_codecs(), testing::SizeIs(1)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[0].packetization, cricket::kPacketizationParamRaw); - EXPECT_THAT(media_channel1()->send_codecs(), testing::IsEmpty()); + EXPECT_THAT(media_send_channel1()->send_codecs(), testing::IsEmpty()); } TEST_F(VideoChannelSingleThreadTest, @@ -2165,9 +2176,10 @@ TEST_F(VideoChannelSingleThreadTest, EXPECT_TRUE(err.empty()); EXPECT_FALSE(channel1_->SetLocalContent(&local_video, SdpType::kAnswer, err)); EXPECT_FALSE(err.empty()); - EXPECT_THAT(media_channel1()->recv_codecs(), testing::IsEmpty()); - ASSERT_THAT(media_channel1()->send_codecs(), testing::SizeIs(1)); - EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt); + EXPECT_THAT(media_send_channel1()->recv_codecs(), testing::IsEmpty()); + ASSERT_THAT(media_send_channel1()->send_codecs(), testing::SizeIs(1)); + EXPECT_EQ(media_send_channel1()->send_codecs()[0].packetization, + absl::nullopt); } // VideoChannelDoubleThreadTest diff --git a/third_party/libwebrtc/pc/legacy_stats_collector.cc b/third_party/libwebrtc/pc/legacy_stats_collector.cc index b710bc16e6a3..ad9f7ad00718 100644 --- a/third_party/libwebrtc/pc/legacy_stats_collector.cc +++ b/third_party/libwebrtc/pc/legacy_stats_collector.cc @@ -34,6 +34,7 @@ #include "api/video/video_timing.h" #include "call/call.h" #include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" #include "modules/audio_processing/include/audio_processing_statistics.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" @@ -1043,7 +1044,7 @@ void LegacyStatsCollector::ExtractBweInfo() { auto* video_channel = transceiver->internal()->channel(); if (video_channel) { video_media_channels.push_back(static_cast( - video_channel->media_channel())); + video_channel->video_media_send_channel())); } } @@ -1155,11 +1156,11 @@ std::unique_ptr CreateMediaChannelStatsGatherer( RTC_DCHECK(channel); if (channel->media_type() == cricket::MEDIA_TYPE_AUDIO) { return std::make_unique( - static_cast(channel)); + channel->AsVoiceChannel()); } else { RTC_DCHECK_EQ(channel->media_type(), cricket::MEDIA_TYPE_VIDEO); return std::make_unique( - static_cast(channel)); + channel->AsVideoChannel()); } } diff --git a/third_party/libwebrtc/pc/legacy_stats_collector.h b/third_party/libwebrtc/pc/legacy_stats_collector.h index 21f51c5143a3..cedd36c8537b 100644 --- a/third_party/libwebrtc/pc/legacy_stats_collector.h +++ b/third_party/libwebrtc/pc/legacy_stats_collector.h @@ -27,10 +27,10 @@ #include "absl/types/optional.h" #include "api/field_trials_view.h" +#include "api/legacy_stats_types.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/scoped_refptr.h" -#include "api/stats_types.h" #include "p2p/base/connection_info.h" #include "p2p/base/port.h" #include "pc/legacy_stats_collector_interface.h" diff --git a/third_party/libwebrtc/pc/legacy_stats_collector_interface.h b/third_party/libwebrtc/pc/legacy_stats_collector_interface.h index 3cddb284f816..a0c6f3bd65af 100644 --- a/third_party/libwebrtc/pc/legacy_stats_collector_interface.h +++ b/third_party/libwebrtc/pc/legacy_stats_collector_interface.h @@ -17,8 +17,8 @@ #include +#include "api/legacy_stats_types.h" #include "api/media_stream_interface.h" -#include "api/stats_types.h" namespace webrtc { diff --git a/third_party/libwebrtc/pc/peer_connection.cc b/third_party/libwebrtc/pc/peer_connection.cc index 9e78bac671b4..5de77fee9d2e 100644 --- a/third_party/libwebrtc/pc/peer_connection.cc +++ b/third_party/libwebrtc/pc/peer_connection.cc @@ -297,7 +297,6 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( RtcpMuxPolicy rtcp_mux_policy; std::vector> certificates; int ice_candidate_pool_size; - bool DEPRECATED_disable_ipv6; bool disable_ipv6_on_wifi; int max_ipv6_networks; bool disable_link_local_networks; @@ -343,6 +342,7 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( webrtc::VpnPreference vpn_preference; std::vector vpn_list; PortAllocatorConfig port_allocator_config; + absl::optional pacer_burst_interval; }; static_assert(sizeof(stuff_being_tested_for_equality) == sizeof(*this), "Did you add something to RTCConfiguration and forget to " @@ -366,7 +366,6 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( prioritize_most_likely_ice_candidate_pairs == o.prioritize_most_likely_ice_candidate_pairs && media_config == o.media_config && - DEPRECATED_disable_ipv6 == o.DEPRECATED_disable_ipv6 && disable_ipv6_on_wifi == o.disable_ipv6_on_wifi && max_ipv6_networks == o.max_ipv6_networks && disable_link_local_networks == o.disable_link_local_networks && @@ -409,7 +408,8 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( vpn_preference == o.vpn_preference && vpn_list == o.vpn_list && port_allocator_config.min_port == o.port_allocator_config.min_port && port_allocator_config.max_port == o.port_allocator_config.max_port && - port_allocator_config.flags == o.port_allocator_config.flags; + port_allocator_config.flags == o.port_allocator_config.flags && + pacer_burst_interval == o.pacer_burst_interval; } bool PeerConnectionInterface::RTCConfiguration::operator!=( @@ -603,6 +603,16 @@ RTCError PeerConnection::Initialize( return parse_error; } + // Restrict number of TURN servers. + if (!trials().IsDisabled("WebRTC-LimitTurnServers") && + turn_servers.size() > cricket::kMaxTurnServers) { + RTC_LOG(LS_WARNING) << "Number of configured TURN servers is " + << turn_servers.size() + << " which exceeds the maximum allowed number of " + << cricket::kMaxTurnServers; + turn_servers.resize(cricket::kMaxTurnServers); + } + // Add the turn logging id to all turn servers for (cricket::RelayServerConfig& turn_server : turn_servers) { turn_server.turn_logging_id = configuration.turn_logging_id; @@ -1164,14 +1174,14 @@ rtc::scoped_refptr PeerConnection::CreateSender( auto audio_sender = AudioRtpSender::Create(worker_thread(), rtc::CreateRandomUuid(), legacy_stats_.get(), rtp_manager()); - audio_sender->SetMediaChannel(rtp_manager()->voice_media_channel()); + audio_sender->SetMediaChannel(rtp_manager()->voice_media_send_channel()); new_sender = RtpSenderProxyWithInternal::Create( signaling_thread(), audio_sender); rtp_manager()->GetAudioTransceiver()->internal()->AddSender(new_sender); } else if (kind == MediaStreamTrackInterface::kVideoKind) { auto video_sender = VideoRtpSender::Create( worker_thread(), rtc::CreateRandomUuid(), rtp_manager()); - video_sender->SetMediaChannel(rtp_manager()->video_media_channel()); + video_sender->SetMediaChannel(rtp_manager()->video_media_send_channel()); new_sender = RtpSenderProxyWithInternal::Create( signaling_thread(), video_sender); rtp_manager()->GetVideoTransceiver()->internal()->AddSender(new_sender); @@ -1547,6 +1557,17 @@ RTCError PeerConnection::SetConfiguration( if (!parse_error.ok()) { return parse_error; } + + // Restrict number of TURN servers. + if (!trials().IsDisabled("WebRTC-LimitTurnServers") && + turn_servers.size() > cricket::kMaxTurnServers) { + RTC_LOG(LS_WARNING) << "Number of configured TURN servers is " + << turn_servers.size() + << " which exceeds the maximum allowed number of " + << cricket::kMaxTurnServers; + turn_servers.resize(cricket::kMaxTurnServers); + } + // Add the turn logging id to all turn servers for (cricket::RelayServerConfig& turn_server : turn_servers) { turn_server.turn_logging_id = configuration.turn_logging_id; @@ -1608,15 +1629,16 @@ RTCError PeerConnection::SetConfiguration( } if (modified_config.allow_codec_switching.has_value()) { - std::vector channels; + std::vector channels; for (const auto& transceiver : rtp_manager()->transceivers()->List()) { if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO) continue; auto* video_channel = transceiver->internal()->channel(); if (video_channel) - channels.push_back(static_cast( - video_channel->media_channel())); + channels.push_back( + static_cast( + video_channel->media_send_channel())); } worker_thread()->BlockingCall( @@ -2093,11 +2115,7 @@ PeerConnection::InitializePortAllocator_n( port_allocator_flags |= cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET | cricket::PORTALLOCATOR_ENABLE_IPV6 | cricket::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI; - // If the disable-IPv6 flag was specified, we'll not override it - // by experiment. - if (configuration.DEPRECATED_disable_ipv6) { - port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6); - } else if (trials().IsDisabled("WebRTC-IPv6Default")) { + if (trials().IsDisabled("WebRTC-IPv6Default")) { port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6); } if (configuration.disable_ipv6_on_wifi) { diff --git a/third_party/libwebrtc/pc/peer_connection_factory.cc b/third_party/libwebrtc/pc/peer_connection_factory.cc index 1e1c8185e0c3..afebdd79a559 100644 --- a/third_party/libwebrtc/pc/peer_connection_factory.cc +++ b/third_party/libwebrtc/pc/peer_connection_factory.cc @@ -243,8 +243,8 @@ PeerConnectionFactory::CreatePeerConnectionOrError( const FieldTrialsView* trials = dependencies.trials ? dependencies.trials.get() : &field_trials(); std::unique_ptr call = - worker_thread()->BlockingCall([this, &event_log, trials] { - return CreateCall_w(event_log.get(), *trials); + worker_thread()->BlockingCall([this, &event_log, trials, &configuration] { + return CreateCall_w(event_log.get(), *trials, configuration); }); auto result = PeerConnection::Create(context_, options_, std::move(event_log), @@ -303,7 +303,8 @@ std::unique_ptr PeerConnectionFactory::CreateRtcEventLog_w() { std::unique_ptr PeerConnectionFactory::CreateCall_w( RtcEventLog* event_log, - const FieldTrialsView& field_trials) { + const FieldTrialsView& field_trials, + const PeerConnectionInterface::RTCConfiguration& configuration) { RTC_DCHECK_RUN_ON(worker_thread()); webrtc::Call::Config call_config(event_log, network_thread()); @@ -346,6 +347,7 @@ std::unique_ptr PeerConnectionFactory::CreateCall_w( call_config.rtp_transport_controller_send_factory = transport_controller_send_factory_.get(); call_config.metronome = metronome_.get(); + call_config.pacer_burst_interval = configuration.pacer_burst_interval; return std::unique_ptr( context_->call_factory()->CreateCall(call_config)); } diff --git a/third_party/libwebrtc/pc/peer_connection_factory.h b/third_party/libwebrtc/pc/peer_connection_factory.h index 2851954a2f75..dac3702e37a5 100644 --- a/third_party/libwebrtc/pc/peer_connection_factory.h +++ b/third_party/libwebrtc/pc/peer_connection_factory.h @@ -136,8 +136,10 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { bool IsTrialEnabled(absl::string_view key) const; std::unique_ptr CreateRtcEventLog_w(); - std::unique_ptr CreateCall_w(RtcEventLog* event_log, - const FieldTrialsView& field_trials); + std::unique_ptr CreateCall_w( + RtcEventLog* event_log, + const FieldTrialsView& field_trials, + const PeerConnectionInterface::RTCConfiguration& configuration); rtc::scoped_refptr context_; PeerConnectionFactoryInterface::Options options_ diff --git a/third_party/libwebrtc/pc/peer_connection_interface_unittest.cc b/third_party/libwebrtc/pc/peer_connection_interface_unittest.cc index e4c1ce25df44..dfca4868b592 100644 --- a/third_party/libwebrtc/pc/peer_connection_interface_unittest.cc +++ b/third_party/libwebrtc/pc/peer_connection_interface_unittest.cc @@ -115,7 +115,7 @@ static const char kSdpStringWithStream1PlanB[] = "o=- 0 0 IN IP4 127.0.0.1\r\n" "s=-\r\n" "t=0 0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -123,7 +123,7 @@ static const char kSdpStringWithStream1PlanB[] = "a=mid:audio\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "a=ssrc:1 cname:stream1\r\n" "a=ssrc:1 msid:stream1 audiotrack0\r\n" "m=video 1 RTP/AVPF 120\r\n" @@ -145,7 +145,7 @@ static const char kSdpStringWithStream1UnifiedPlan[] = "o=- 0 0 IN IP4 127.0.0.1\r\n" "s=-\r\n" "t=0 0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -153,7 +153,7 @@ static const char kSdpStringWithStream1UnifiedPlan[] = "a=mid:0\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "a=msid:stream1 audiotrack0\r\n" "a=ssrc:1 cname:stream1\r\n" "m=video 1 RTP/AVPF 120\r\n" @@ -175,14 +175,14 @@ static const char kSdpStringWithStream1AudioTrackOnly[] = "o=- 0 0 IN IP4 127.0.0.1\r\n" "s=-\r\n" "t=0 0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n" "a=mid:audio\r\n" "a=sendrecv\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "a=ssrc:1 cname:stream1\r\n" "a=ssrc:1 msid:stream1 audiotrack0\r\n" "a=rtcp-mux\r\n"; @@ -196,7 +196,7 @@ static const char kSdpStringWithStream1And2PlanB[] = "s=-\r\n" "t=0 0\r\n" "a=msid-semantic: WMS stream1 stream2\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -204,7 +204,7 @@ static const char kSdpStringWithStream1And2PlanB[] = "a=mid:audio\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "a=ssrc:1 cname:stream1\r\n" "a=ssrc:1 msid:stream1 audiotrack0\r\n" "a=ssrc:3 cname:stream2\r\n" @@ -228,7 +228,7 @@ static const char kSdpStringWithStream1And2UnifiedPlan[] = "s=-\r\n" "t=0 0\r\n" "a=msid-semantic: WMS stream1 stream2\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -236,7 +236,7 @@ static const char kSdpStringWithStream1And2UnifiedPlan[] = "a=mid:0\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "a=ssrc:1 cname:stream1\r\n" "a=ssrc:1 msid:stream1 audiotrack0\r\n" "m=video 1 RTP/AVPF 120\r\n" @@ -250,7 +250,7 @@ static const char kSdpStringWithStream1And2UnifiedPlan[] = "a=rtpmap:120 VP8/0\r\n" "a=ssrc:2 cname:stream1\r\n" "a=ssrc:2 msid:stream1 videotrack0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -258,7 +258,7 @@ static const char kSdpStringWithStream1And2UnifiedPlan[] = "a=mid:2\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "a=ssrc:3 cname:stream2\r\n" "a=ssrc:3 msid:stream2 audiotrack1\r\n" "m=video 1 RTP/AVPF 120\r\n" @@ -279,7 +279,7 @@ static const char kSdpStringWithoutStreams[] = "o=- 0 0 IN IP4 127.0.0.1\r\n" "s=-\r\n" "t=0 0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -287,7 +287,7 @@ static const char kSdpStringWithoutStreams[] = "a=mid:audio\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "m=video 1 RTP/AVPF 120\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" @@ -305,7 +305,7 @@ static const char kSdpStringWithMsidWithoutStreams[] = "s=-\r\n" "t=0 0\r\n" "a=msid-semantic: WMS\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -313,7 +313,7 @@ static const char kSdpStringWithMsidWithoutStreams[] = "a=mid:audio\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "m=video 1 RTP/AVPF 120\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" @@ -330,7 +330,7 @@ static const char kSdpStringWithoutStreamsAudioOnly[] = "o=- 0 0 IN IP4 127.0.0.1\r\n" "s=-\r\n" "t=0 0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -338,7 +338,7 @@ static const char kSdpStringWithoutStreamsAudioOnly[] = "a=mid:audio\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n"; + "a=rtpmap:111 OPUS/48000/2\r\n"; // Reference SENDONLY SDP without MediaStreams. Msid is not supported. static const char kSdpStringSendOnlyWithoutStreams[] = @@ -346,7 +346,7 @@ static const char kSdpStringSendOnlyWithoutStreams[] = "o=- 0 0 IN IP4 127.0.0.1\r\n" "s=-\r\n" "t=0 0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -355,7 +355,7 @@ static const char kSdpStringSendOnlyWithoutStreams[] = "a=sendrecv\r\n" "a=sendonly\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "m=video 1 RTP/AVPF 120\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" @@ -375,7 +375,7 @@ static const char kSdpStringInit[] = "a=msid-semantic: WMS\r\n"; static const char kSdpStringAudio[] = - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -383,7 +383,7 @@ static const char kSdpStringAudio[] = "a=mid:audio\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n"; + "a=rtpmap:111 OPUS/48000/2\r\n"; static const char kSdpStringVideo[] = "m=video 1 RTP/AVPF 120\r\n" @@ -1334,7 +1334,6 @@ TEST_P(PeerConnectionInterfaceTest, CreatePeerConnectionWithPooledCandidates) { server.uri = kStunAddressOnly; config.servers.push_back(server); config.type = PeerConnectionInterface::kRelay; - config.DEPRECATED_disable_ipv6 = true; config.tcp_candidate_policy = PeerConnectionInterface::kTcpCandidatePolicyDisabled; config.candidate_network_policy = @@ -1347,7 +1346,6 @@ TEST_P(PeerConnectionInterfaceTest, CreatePeerConnectionWithPooledCandidates) { port_allocator_->GetPooledSession()); ASSERT_NE(nullptr, session); EXPECT_EQ(1UL, session->stun_servers().size()); - EXPECT_EQ(0U, session->flags() & cricket::PORTALLOCATOR_ENABLE_IPV6); EXPECT_LT(0U, session->flags() & cricket::PORTALLOCATOR_DISABLE_TCP); EXPECT_LT(0U, session->flags() & cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); @@ -3828,10 +3826,6 @@ TEST(RTCConfigurationTest, ComparisonOperators) { f.ice_connection_receiving_timeout = 1337; EXPECT_NE(a, f); - PeerConnectionInterface::RTCConfiguration g; - g.DEPRECATED_disable_ipv6 = true; - EXPECT_NE(a, g); - PeerConnectionInterface::RTCConfiguration h( PeerConnectionInterface::RTCConfigurationType::kAggressive); EXPECT_NE(a, h); diff --git a/third_party/libwebrtc/pc/peer_connection_message_handler.cc b/third_party/libwebrtc/pc/peer_connection_message_handler.cc index 2d674aad4d55..8ddeddea5841 100644 --- a/third_party/libwebrtc/pc/peer_connection_message_handler.cc +++ b/third_party/libwebrtc/pc/peer_connection_message_handler.cc @@ -13,11 +13,11 @@ #include #include "api/jsep.h" +#include "api/legacy_stats_types.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "api/stats_types.h" #include "api/task_queue/pending_task_safety_flag.h" #include "pc/legacy_stats_collector_interface.h" #include "rtc_base/checks.h" diff --git a/third_party/libwebrtc/pc/peer_connection_message_handler.h b/third_party/libwebrtc/pc/peer_connection_message_handler.h index 1351a279b6e3..8bd0e5ebb149 100644 --- a/third_party/libwebrtc/pc/peer_connection_message_handler.h +++ b/third_party/libwebrtc/pc/peer_connection_message_handler.h @@ -14,10 +14,10 @@ #include #include "api/jsep.h" +#include "api/legacy_stats_types.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" -#include "api/stats_types.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "pc/legacy_stats_collector_interface.h" diff --git a/third_party/libwebrtc/pc/remote_audio_source.cc b/third_party/libwebrtc/pc/remote_audio_source.cc index 1058d1cbf98c..a516c576174e 100644 --- a/third_party/libwebrtc/pc/remote_audio_source.cc +++ b/third_party/libwebrtc/pc/remote_audio_source.cc @@ -70,8 +70,9 @@ RemoteAudioSource::~RemoteAudioSource() { } } -void RemoteAudioSource::Start(cricket::VoiceMediaChannel* media_channel, - absl::optional ssrc) { +void RemoteAudioSource::Start( + cricket::VoiceMediaReceiveChannelInterface* media_channel, + absl::optional ssrc) { RTC_DCHECK_RUN_ON(worker_thread_); // Register for callbacks immediately before AddSink so that we always get @@ -84,8 +85,9 @@ void RemoteAudioSource::Start(cricket::VoiceMediaChannel* media_channel, std::make_unique(this)); } -void RemoteAudioSource::Stop(cricket::VoiceMediaChannel* media_channel, - absl::optional ssrc) { +void RemoteAudioSource::Stop( + cricket::VoiceMediaReceiveChannelInterface* media_channel, + absl::optional ssrc) { RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(media_channel); ssrc ? media_channel->SetRawAudioSink(*ssrc, nullptr) diff --git a/third_party/libwebrtc/pc/remote_audio_source.h b/third_party/libwebrtc/pc/remote_audio_source.h index d294a0f0fb5c..0fac606ad48a 100644 --- a/third_party/libwebrtc/pc/remote_audio_source.h +++ b/third_party/libwebrtc/pc/remote_audio_source.h @@ -49,9 +49,9 @@ class RemoteAudioSource : public Notifier { // Register and unregister remote audio source with the underlying media // engine. - void Start(cricket::VoiceMediaChannel* media_channel, + void Start(cricket::VoiceMediaReceiveChannelInterface* media_channel, absl::optional ssrc); - void Stop(cricket::VoiceMediaChannel* media_channel, + void Stop(cricket::VoiceMediaReceiveChannelInterface* media_channel, absl::optional ssrc); void SetState(SourceState new_state); diff --git a/third_party/libwebrtc/pc/rtc_stats_collector.cc b/third_party/libwebrtc/pc/rtc_stats_collector.cc index c1ef9acc201a..1d88566f0444 100644 --- a/third_party/libwebrtc/pc/rtc_stats_collector.cc +++ b/third_party/libwebrtc/pc/rtc_stats_collector.cc @@ -33,8 +33,10 @@ #include "api/stats/rtcstats_objects.h" #include "api/units/time_delta.h" #include "api/video/video_content_type.h" +#include "api/video_codecs/scalability_mode.h" #include "common_video/include/quality_limitation_reason.h" #include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" #include "modules/audio_processing/include/audio_processing_statistics.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -792,6 +794,10 @@ void SetOutboundRTPStreamStatsFromVideoSenderInfo( outbound_video->power_efficient_encoder = video_sender_info.power_efficient_encoder.value(); } + if (video_sender_info.scalability_mode) { + outbound_video->scalability_mode = std::string( + ScalabilityModeToString(*video_sender_info.scalability_mode)); + } } std::unique_ptr @@ -815,9 +821,11 @@ ProduceRemoteInboundRtpStreamStatsFromReportBlockData( remote_inbound->packets_lost = report_block.packets_lost; remote_inbound->fraction_lost = static_cast(report_block.fraction_lost) / (1 << 8); - remote_inbound->round_trip_time = - static_cast(report_block_data.last_rtt_ms()) / - rtc::kNumMillisecsPerSec; + if (report_block_data.num_rtts() > 0) { + remote_inbound->round_trip_time = + static_cast(report_block_data.last_rtt_ms()) / + rtc::kNumMillisecsPerSec; + } remote_inbound->total_round_trip_time = static_cast(report_block_data.sum_rtt_ms()) / rtc::kNumMillisecsPerSec; @@ -2359,13 +2367,15 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { if (media_type == cricket::MEDIA_TYPE_AUDIO) { cricket::VoiceMediaChannel* voice_channel = - static_cast(channel->media_channel()); + static_cast( + channel->voice_media_send_channel()); RTC_DCHECK(voice_stats.find(voice_channel) == voice_stats.end()); voice_stats.insert( std::make_pair(voice_channel, cricket::VoiceMediaInfo())); } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { cricket::VideoMediaChannel* video_channel = - static_cast(channel->media_channel()); + static_cast( + channel->video_media_send_channel()); RTC_DCHECK(video_stats.find(video_channel) == video_stats.end()); video_stats.insert( std::make_pair(video_channel, cricket::VideoMediaInfo())); @@ -2405,13 +2415,13 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { if (media_type == cricket::MEDIA_TYPE_AUDIO) { cricket::VoiceMediaChannel* voice_channel = static_cast( - channel->media_channel()); + channel->voice_media_send_channel()); RTC_DCHECK(voice_stats.find(voice_channel) != voice_stats.end()); voice_media_info = std::move(voice_stats[voice_channel]); } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { cricket::VideoMediaChannel* video_channel = static_cast( - channel->media_channel()); + channel->video_media_send_channel()); RTC_DCHECK(video_stats.find(video_channel) != video_stats.end()); video_media_info = std::move(video_stats[video_channel]); } diff --git a/third_party/libwebrtc/pc/rtc_stats_collector_unittest.cc b/third_party/libwebrtc/pc/rtc_stats_collector_unittest.cc index a3a17801c15b..5388355eb791 100644 --- a/third_party/libwebrtc/pc/rtc_stats_collector_unittest.cc +++ b/third_party/libwebrtc/pc/rtc_stats_collector_unittest.cc @@ -39,6 +39,7 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video/video_timing.h" +#include "api/video_codecs/scalability_mode.h" #include "common_video/include/quality_limitation_reason.h" #include "media/base/media_channel.h" #include "modules/audio_processing/include/audio_processing_statistics.h" @@ -2835,6 +2836,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Video) { video_media_info.senders[0].frames_sent = 5; video_media_info.senders[0].huge_frames_sent = 2; video_media_info.senders[0].active = false; + video_media_info.senders[0].scalability_mode = ScalabilityMode::kL3T3_KEY; video_media_info.aggregated_senders.push_back(video_media_info.senders[0]); RtpCodecParameters codec_parameters; codec_parameters.payload_type = 42; @@ -2894,6 +2896,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Video) { expected_video.huge_frames_sent = 2; expected_video.active = false; expected_video.power_efficient_encoder = false; + expected_video.scalability_mode = "L3T3_KEY"; // `expected_video.content_type` should be undefined. // `expected_video.qp_sum` should be undefined. // `expected_video.encoder_implementation` should be undefined. @@ -3557,6 +3560,32 @@ TEST_P(RTCStatsCollectorTestWithParamKind, } } +TEST_P(RTCStatsCollectorTestWithParamKind, + RTCRemoteInboundRtpStreamStatsRttMissingBeforeMeasurement) { + constexpr int64_t kReportBlockTimestampUtcUs = 123456789; + + RTCPReportBlock report_block; + // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the + // `source_ssrc`, "SSRC of the RTP packet sender". + report_block.source_ssrc = 12; + ReportBlockData report_block_data; // AddRoundTripTimeSample() not called. + report_block_data.SetReportBlock(report_block, kReportBlockTimestampUtcUs); + + AddSenderInfoAndMediaChannel("TransportName", {report_block_data}, + absl::nullopt); + + rtc::scoped_refptr report = stats_->GetStatsReport(); + + std::string remote_inbound_rtp_id = "RI" + MediaTypeCharStr() + "12"; + ASSERT_TRUE(report->Get(remote_inbound_rtp_id)); + auto& remote_inbound_rtp = report->Get(remote_inbound_rtp_id) + ->cast_to(); + + EXPECT_TRUE(remote_inbound_rtp.round_trip_time_measurements.is_defined()); + EXPECT_EQ(0, *remote_inbound_rtp.round_trip_time_measurements); + EXPECT_FALSE(remote_inbound_rtp.round_trip_time.is_defined()); +} + TEST_P(RTCStatsCollectorTestWithParamKind, RTCRemoteInboundRtpStreamStatsWithTimestampFromReportBlock) { const int64_t kReportBlockTimestampUtcUs = 123456789; diff --git a/third_party/libwebrtc/pc/rtc_stats_integrationtest.cc b/third_party/libwebrtc/pc/rtc_stats_integrationtest.cc index c63083dbfcdb..b5abad9d6374 100644 --- a/third_party/libwebrtc/pc/rtc_stats_integrationtest.cc +++ b/third_party/libwebrtc/pc/rtc_stats_integrationtest.cc @@ -984,6 +984,7 @@ class RTCStatsReportVerifier { verifier.TestMemberIsNonNegative( outbound_stream.huge_frames_sent); verifier.MarkMemberTested(outbound_stream.rid, true); + verifier.TestMemberIsDefined(outbound_stream.scalability_mode); } else { verifier.TestMemberIsUndefined(outbound_stream.frames_encoded); verifier.TestMemberIsUndefined(outbound_stream.key_frames_encoded); @@ -1005,6 +1006,7 @@ class RTCStatsReportVerifier { verifier.TestMemberIsUndefined(outbound_stream.frame_width); verifier.TestMemberIsUndefined(outbound_stream.frames_sent); verifier.TestMemberIsUndefined(outbound_stream.huge_frames_sent); + verifier.TestMemberIsUndefined(outbound_stream.scalability_mode); } return verifier.ExpectAllMembersSuccessfullyTested(); } diff --git a/third_party/libwebrtc/pc/rtp_receiver.h b/third_party/libwebrtc/pc/rtp_receiver.h index 8c49f56b7564..7622139f83b4 100644 --- a/third_party/libwebrtc/pc/rtp_receiver.h +++ b/third_party/libwebrtc/pc/rtp_receiver.h @@ -53,7 +53,8 @@ class RtpReceiverInternal : public RtpReceiverInterface { // * SetMediaChannel(nullptr) must be called before the media channel is // destroyed. // * This method must be invoked on the worker thread. - virtual void SetMediaChannel(cricket::MediaChannel* media_channel) = 0; + virtual void SetMediaChannel( + cricket::MediaReceiveChannelInterface* media_channel) = 0; // Configures the RtpReceiver with the underlying media channel, with the // given SSRC as the stream identifier. diff --git a/third_party/libwebrtc/pc/rtp_sender.cc b/third_party/libwebrtc/pc/rtp_sender.cc index 698d7ffe619b..b3330d3897c4 100644 --- a/third_party/libwebrtc/pc/rtp_sender.cc +++ b/third_party/libwebrtc/pc/rtp_sender.cc @@ -88,6 +88,46 @@ RtpParameters RestoreEncodingLayers( return result; } +class SignalingThreadCallback { + public: + SignalingThreadCallback(rtc::Thread* signaling_thread, + SetParametersCallback callback) + : signaling_thread_(signaling_thread), callback_(std::move(callback)) {} + SignalingThreadCallback(SignalingThreadCallback&& other) + : signaling_thread_(other.signaling_thread_), + callback_(std::move(other.callback_)) { + other.callback_ = nullptr; + } + + ~SignalingThreadCallback() { + if (callback_) { + Resolve(RTCError(RTCErrorType::INTERNAL_ERROR)); + + RTC_CHECK_NOTREACHED(); + } + } + + void operator()(const RTCError& error) { Resolve(error); } + + private: + void Resolve(const RTCError& error) { + if (!signaling_thread_->IsCurrent()) { + signaling_thread_->PostTask( + [callback = std::move(callback_), error]() mutable { + webrtc::InvokeSetParametersCallback(callback, error); + }); + callback_ = nullptr; + return; + } + + webrtc::InvokeSetParametersCallback(callback_, error); + callback_ = nullptr; + } + + rtc::Thread* signaling_thread_; + SetParametersCallback callback_; +}; + } // namespace // Returns true if any RtpParameters member that isn't implemented contains a @@ -146,7 +186,8 @@ void RtpSenderBase::SetEncoderSelectorOnChannel() { } } -void RtpSenderBase::SetMediaChannel(cricket::MediaChannel* media_channel) { +void RtpSenderBase::SetMediaChannel( + cricket::MediaSendChannelInterface* media_channel) { RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); media_channel_ = media_channel; @@ -189,14 +230,20 @@ RtpParameters RtpSenderBase::GetParameters() const { return result; } -RTCError RtpSenderBase::SetParametersInternal(const RtpParameters& parameters) { +void RtpSenderBase::SetParametersInternal(const RtpParameters& parameters, + SetParametersCallback callback, + bool blocking) { RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(!stopped_); if (UnimplementedRtpParameterHasValue(parameters)) { - LOG_AND_RETURN_ERROR( + RTCError error( RTCErrorType::UNSUPPORTED_PARAMETER, "Attempted to set an unimplemented parameter of RtpParameters."); + RTC_LOG(LS_ERROR) << error.message() << " (" + << ::webrtc::ToString(error.type()) << ")"; + webrtc::InvokeSetParametersCallback(callback, error); + return; } if (!media_channel_ || !ssrc_) { auto result = cricket::CheckRtpParametersInvalidModificationAndValues( @@ -204,9 +251,11 @@ RTCError RtpSenderBase::SetParametersInternal(const RtpParameters& parameters) { if (result.ok()) { init_parameters_ = parameters; } - return result; + webrtc::InvokeSetParametersCallback(callback, result); + return; } - return worker_thread_->BlockingCall([&] { + auto task = [&, callback = std::move(callback), + parameters = std::move(parameters)]() mutable { RtpParameters rtp_parameters = parameters; RtpParameters old_parameters = media_channel_->GetRtpSendParameters(ssrc_); if (!disabled_rids_.empty()) { @@ -215,17 +264,26 @@ RTCError RtpSenderBase::SetParametersInternal(const RtpParameters& parameters) { old_parameters.encodings); } - auto result = cricket::CheckRtpParametersInvalidModificationAndValues( + RTCError result = cricket::CheckRtpParametersInvalidModificationAndValues( old_parameters, rtp_parameters); - if (!result.ok()) - return result; + if (!result.ok()) { + webrtc::InvokeSetParametersCallback(callback, result); + return; + } result = CheckSVCParameters(rtp_parameters); - if (!result.ok()) - return result; + if (!result.ok()) { + webrtc::InvokeSetParametersCallback(callback, result); + return; + } - return media_channel_->SetRtpSendParameters(ssrc_, rtp_parameters); - }); + media_channel_->SetRtpSendParameters(ssrc_, rtp_parameters, + std::move(callback)); + }; + if (blocking) + worker_thread_->BlockingCall(task); + else + worker_thread_->PostTask(std::move(task)); } RTCError RtpSenderBase::SetParametersInternalWithAllLayers( @@ -248,13 +306,12 @@ RTCError RtpSenderBase::SetParametersInternalWithAllLayers( } return worker_thread_->BlockingCall([&] { RtpParameters rtp_parameters = parameters; - return media_channel_->SetRtpSendParameters(ssrc_, rtp_parameters); + return media_channel_->SetRtpSendParameters(ssrc_, rtp_parameters, nullptr); }); } -RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { +RTCError RtpSenderBase::CheckSetParameters(const RtpParameters& parameters) { RTC_DCHECK_RUN_ON(signaling_thread_); - TRACE_EVENT0("webrtc", "RtpSenderBase::SetParameters"); if (is_transceiver_stopped_) { LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_STATE, @@ -264,10 +321,6 @@ RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, "Cannot set parameters on a stopped sender."); } - if (stopped_) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, - "Cannot set parameters on a stopped sender."); - } if (!last_transaction_id_) { LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_STATE, @@ -281,11 +334,55 @@ RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { " the last value returned from getParameters()"); } - RTCError result = SetParametersInternal(parameters); + return RTCError::OK(); +} + +RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { + RTC_DCHECK_RUN_ON(signaling_thread_); + TRACE_EVENT0("webrtc", "RtpSenderBase::SetParameters"); + RTCError result = CheckSetParameters(parameters); + if (!result.ok()) + return result; + + // Some tests rely on working in single thread mode without a run loop and a + // blocking call is required to keep them working. The encoder configuration + // also involves another thread with an asynchronous task, thus we still do + // need to wait for the callback to be resolved this way. + std::unique_ptr done_event = std::make_unique(); + SetParametersInternal( + parameters, + [done = done_event.get(), &result](RTCError error) { + result = error; + done->Set(); + }, + true); + done_event->Wait(rtc::Event::kForever); last_transaction_id_.reset(); return result; } +void RtpSenderBase::SetParametersAsync(const RtpParameters& parameters, + SetParametersCallback callback) { + RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_DCHECK(callback); + TRACE_EVENT0("webrtc", "RtpSenderBase::SetParametersAsync"); + RTCError result = CheckSetParameters(parameters); + if (!result.ok()) { + webrtc::InvokeSetParametersCallback(callback, result); + return; + } + + SetParametersInternal( + parameters, + SignalingThreadCallback( + signaling_thread_, + [this, callback = std::move(callback)](RTCError error) mutable { + last_transaction_id_.reset(); + webrtc::InvokeSetParametersCallback(callback, error); + }), + false); +} + void RtpSenderBase::SetStreams(const std::vector& stream_ids) { set_stream_ids(stream_ids); if (set_streams_observer_) @@ -372,7 +469,7 @@ void RtpSenderBase::SetSsrc(uint32_t ssrc) { } current_parameters.degradation_preference = init_parameters_.degradation_preference; - media_channel_->SetRtpSendParameters(ssrc_, current_parameters); + media_channel_->SetRtpSendParameters(ssrc_, current_parameters, nullptr); init_parameters_.encodings.clear(); init_parameters_.degradation_preference = absl::nullopt; }); diff --git a/third_party/libwebrtc/pc/rtp_sender.h b/third_party/libwebrtc/pc/rtp_sender.h index 70a9c947aa6c..29e5f16cfea2 100644 --- a/third_party/libwebrtc/pc/rtp_sender.h +++ b/third_party/libwebrtc/pc/rtp_sender.h @@ -54,7 +54,8 @@ class RtpSenderInternal : public RtpSenderInterface { // A VoiceMediaChannel should be used for audio RtpSenders and // a VideoMediaChannel should be used for video RtpSenders. // Must call SetMediaChannel(nullptr) before the media channel is destroyed. - virtual void SetMediaChannel(cricket::MediaChannel* media_channel) = 0; + virtual void SetMediaChannel( + cricket::MediaSendChannelInterface* media_channel) = 0; // Used to set the SSRC of the sender, once a local description has been set. // If `ssrc` is 0, this indiates that the sender should disconnect from the @@ -73,7 +74,9 @@ class RtpSenderInternal : public RtpSenderInterface { // `GetParameters` and `SetParameters` operate with a transactional model. // Allow access to get/set parameters without invalidating transaction id. virtual RtpParameters GetParametersInternal() const = 0; - virtual RTCError SetParametersInternal(const RtpParameters& parameters) = 0; + virtual void SetParametersInternal(const RtpParameters& parameters, + SetParametersCallback, + bool blocking) = 0; // GetParameters and SetParameters will remove deactivated simulcast layers // and restore them on SetParameters. This is probably a Bad Idea, but we @@ -118,7 +121,8 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { // A VoiceMediaChannel should be used for audio RtpSenders and // a VideoMediaChannel should be used for video RtpSenders. // Must call SetMediaChannel(nullptr) before the media channel is destroyed. - void SetMediaChannel(cricket::MediaChannel* media_channel) override; + void SetMediaChannel( + cricket::MediaSendChannelInterface* media_channel) override; bool SetTrack(MediaStreamTrackInterface* track) override; rtc::scoped_refptr track() const override { @@ -130,11 +134,16 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { RtpParameters GetParameters() const override; RTCError SetParameters(const RtpParameters& parameters) override; + void SetParametersAsync(const RtpParameters& parameters, + SetParametersCallback callback) override; // `GetParameters` and `SetParameters` operate with a transactional model. // Allow access to get/set parameters without invalidating transaction id. RtpParameters GetParametersInternal() const override; - RTCError SetParametersInternal(const RtpParameters& parameters) override; + void SetParametersInternal(const RtpParameters& parameters, + SetParametersCallback callback = nullptr, + bool blocking = true) override; + RTCError CheckSetParameters(const RtpParameters& parameters); RtpParameters GetParametersInternalWithAllLayers() const override; RTCError SetParametersInternalWithAllLayers( const RtpParameters& parameters) override; @@ -260,7 +269,7 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { // a guard or lock. Internally there are also several Invoke()s that we could // remove since the upstream code may already be performing several operations // on the worker thread. - cricket::MediaChannel* media_channel_ = nullptr; + cricket::MediaSendChannelInterface* media_channel_ = nullptr; rtc::scoped_refptr track_; rtc::scoped_refptr dtls_transport_; @@ -369,8 +378,8 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { void RemoveTrackFromStats() override; private: - cricket::VoiceMediaChannel* voice_media_channel() { - return static_cast(media_channel_); + cricket::VoiceMediaSendChannelInterface* voice_media_channel() { + return media_channel_->AsVoiceSendChannel(); } rtc::scoped_refptr audio_track() const { return rtc::scoped_refptr( @@ -427,8 +436,8 @@ class VideoRtpSender : public RtpSenderBase { void AttachTrack() override; private: - cricket::VideoMediaChannel* video_media_channel() { - return static_cast(media_channel_); + cricket::VideoMediaSendChannelInterface* video_media_channel() { + return media_channel_->AsVideoSendChannel(); } rtc::scoped_refptr video_track() const { return rtc::scoped_refptr( diff --git a/third_party/libwebrtc/pc/rtp_sender_proxy.h b/third_party/libwebrtc/pc/rtp_sender_proxy.h index a38c8af71566..236ac10fa2ea 100644 --- a/third_party/libwebrtc/pc/rtp_sender_proxy.h +++ b/third_party/libwebrtc/pc/rtp_sender_proxy.h @@ -35,6 +35,10 @@ PROXY_CONSTMETHOD0(std::vector, stream_ids) PROXY_CONSTMETHOD0(std::vector, init_send_encodings) PROXY_CONSTMETHOD0(RtpParameters, GetParameters) PROXY_METHOD1(RTCError, SetParameters, const RtpParameters&) +PROXY_METHOD2(void, + SetParametersAsync, + const RtpParameters&, + SetParametersCallback) PROXY_CONSTMETHOD0(rtc::scoped_refptr, GetDtmfSender) PROXY_METHOD1(void, SetFrameEncryptor, diff --git a/third_party/libwebrtc/pc/rtp_sender_receiver_unittest.cc b/third_party/libwebrtc/pc/rtp_sender_receiver_unittest.cc index 5df57958c821..a189e6517bec 100644 --- a/third_party/libwebrtc/pc/rtp_sender_receiver_unittest.cc +++ b/third_party/libwebrtc/pc/rtp_sender_receiver_unittest.cc @@ -204,7 +204,7 @@ class RtpSenderReceiverTest ASSERT_TRUE(audio_rtp_sender_->SetTrack(audio_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); audio_rtp_sender_->SetStreams({local_stream_->id()}); - audio_rtp_sender_->SetMediaChannel(voice_media_channel()); + audio_rtp_sender_->SetMediaChannel(voice_media_channel()->AsSendChannel()); audio_rtp_sender_->SetSsrc(kAudioSsrc); VerifyVoiceChannelInput(); } @@ -212,7 +212,8 @@ class RtpSenderReceiverTest void CreateAudioRtpSenderWithNoTrack() { audio_rtp_sender_ = AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr); - audio_rtp_sender_->SetMediaChannel(voice_media_channel()); + audio_rtp_sender_->SetMediaChannel( + voice_media_channel()->AsVoiceSendChannel()); } void CreateVideoRtpSender(uint32_t ssrc) { @@ -264,14 +265,16 @@ class RtpSenderReceiverTest ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); video_rtp_sender_->SetStreams({local_stream_->id()}); - video_rtp_sender_->SetMediaChannel(video_media_channel()); + video_rtp_sender_->SetMediaChannel( + video_media_channel()->AsVideoSendChannel()); video_rtp_sender_->SetSsrc(ssrc); VerifyVideoChannelInput(ssrc); } void CreateVideoRtpSenderWithNoTrack() { video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr); - video_rtp_sender_->SetMediaChannel(video_media_channel()); + video_rtp_sender_->SetMediaChannel( + video_media_channel()->AsVideoSendChannel()); } void DestroyAudioRtpSender() { @@ -289,7 +292,8 @@ class RtpSenderReceiverTest audio_rtp_receiver_ = rtc::make_ref_counted( rtc::Thread::Current(), kAudioTrackId, streams, /*is_unified_plan=*/true); - audio_rtp_receiver_->SetMediaChannel(voice_media_channel()); + audio_rtp_receiver_->SetMediaChannel( + voice_media_channel()->AsVoiceReceiveChannel()); audio_rtp_receiver_->SetupMediaChannel(kAudioSsrc); audio_track_ = audio_rtp_receiver_->audio_track(); VerifyVoiceChannelOutput(); @@ -299,7 +303,8 @@ class RtpSenderReceiverTest std::vector> streams = {}) { video_rtp_receiver_ = rtc::make_ref_counted( rtc::Thread::Current(), kVideoTrackId, streams); - video_rtp_receiver_->SetMediaChannel(video_media_channel()); + video_rtp_receiver_->SetMediaChannel( + video_media_channel()->AsVideoReceiveChannel()); video_rtp_receiver_->SetupMediaChannel(kVideoSsrc); video_track_ = video_rtp_receiver_->video_track(); VerifyVideoChannelOutput(); @@ -319,7 +324,8 @@ class RtpSenderReceiverTest video_rtp_receiver_ = rtc::make_ref_counted( rtc::Thread::Current(), kVideoTrackId, streams); - video_rtp_receiver_->SetMediaChannel(video_media_channel()); + video_rtp_receiver_->SetMediaChannel( + video_media_channel()->AsVideoReceiveChannel()); video_rtp_receiver_->SetupMediaChannel(primary_ssrc); video_track_ = video_rtp_receiver_->video_track(); } @@ -689,15 +695,17 @@ TEST_F(RtpSenderReceiverTest, RemoteAudioTrackSetVolume) { TEST_F(RtpSenderReceiverTest, AudioRtpReceiverDelay) { CreateAudioRtpReceiver(); - VerifyRtpReceiverDelayBehaviour(voice_media_channel(), - audio_rtp_receiver_.get(), kAudioSsrc); + VerifyRtpReceiverDelayBehaviour( + voice_media_channel()->AsVoiceReceiveChannel(), audio_rtp_receiver_.get(), + kAudioSsrc); DestroyAudioRtpReceiver(); } TEST_F(RtpSenderReceiverTest, VideoRtpReceiverDelay) { CreateVideoRtpReceiver(); - VerifyRtpReceiverDelayBehaviour(video_media_channel(), - video_rtp_receiver_.get(), kVideoSsrc); + VerifyRtpReceiverDelayBehaviour( + video_media_channel()->AsVideoReceiveChannel(), video_rtp_receiver_.get(), + kVideoSsrc); DestroyVideoRtpReceiver(); } @@ -855,6 +863,20 @@ TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParameters) { DestroyAudioRtpSender(); } +TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersAsync) { + CreateAudioRtpSender(); + + RtpParameters params = audio_rtp_sender_->GetParameters(); + EXPECT_EQ(1u, params.encodings.size()); + absl::optional result; + audio_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + + DestroyAudioRtpSender(); +} + TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersBeforeNegotiation) { audio_rtp_sender_ = AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr); @@ -865,8 +887,34 @@ TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersBeforeNegotiation) { EXPECT_TRUE(audio_rtp_sender_->SetParameters(params).ok()); params = audio_rtp_sender_->GetParameters(); - EXPECT_TRUE(audio_rtp_sender_->SetParameters(params).ok()); EXPECT_EQ(params.encodings[0].max_bitrate_bps, 90000); + EXPECT_TRUE(audio_rtp_sender_->SetParameters(params).ok()); + + DestroyAudioRtpSender(); +} + +TEST_F(RtpSenderReceiverTest, + AudioSenderCanSetParametersAsyncBeforeNegotiation) { + audio_rtp_sender_ = + AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr); + + absl::optional result; + RtpParameters params = audio_rtp_sender_->GetParameters(); + ASSERT_EQ(1u, params.encodings.size()); + params.encodings[0].max_bitrate_bps = 90000; + + audio_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + + params = audio_rtp_sender_->GetParameters(); + EXPECT_EQ(params.encodings[0].max_bitrate_bps, 90000); + + audio_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); DestroyAudioRtpSender(); } @@ -896,7 +944,8 @@ TEST_F(RtpSenderReceiverTest, AudioSenderInitParametersMovedAfterNegotiation) { cricket::StreamParams stream_params = cricket::CreateSimStreamParams("cname", ssrcs); voice_media_channel()->AddSendStream(stream_params); - audio_rtp_sender_->SetMediaChannel(voice_media_channel()); + audio_rtp_sender_->SetMediaChannel( + voice_media_channel()->AsVoiceSendChannel()); audio_rtp_sender_->SetSsrc(1); params = audio_rtp_sender_->GetParameters(); @@ -941,6 +990,25 @@ TEST_F(RtpSenderReceiverTest, DestroyAudioRtpSender(); } +TEST_F(RtpSenderReceiverTest, + AudioSenderSetParametersAsyncInvalidatesTransactionId) { + CreateAudioRtpSender(); + + RtpParameters params = audio_rtp_sender_->GetParameters(); + EXPECT_EQ(1u, params.encodings.size()); + absl::optional result; + audio_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + audio_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_EQ(RTCErrorType::INVALID_STATE, result->type()); + + DestroyAudioRtpSender(); +} + TEST_F(RtpSenderReceiverTest, AudioSenderDetectTransactionIdModification) { CreateAudioRtpSender(); @@ -1047,6 +1115,20 @@ TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParameters) { DestroyVideoRtpSender(); } +TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersAsync) { + CreateVideoRtpSender(); + + RtpParameters params = video_rtp_sender_->GetParameters(); + EXPECT_EQ(1u, params.encodings.size()); + absl::optional result; + video_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + + DestroyVideoRtpSender(); +} + TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersBeforeNegotiation) { video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr); @@ -1063,6 +1145,30 @@ TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersBeforeNegotiation) { DestroyVideoRtpSender(); } +TEST_F(RtpSenderReceiverTest, + VideoSenderCanSetParametersAsyncBeforeNegotiation) { + video_rtp_sender_ = + VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr); + + absl::optional result; + RtpParameters params = video_rtp_sender_->GetParameters(); + ASSERT_EQ(1u, params.encodings.size()); + params.encodings[0].max_bitrate_bps = 90000; + video_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + + params = video_rtp_sender_->GetParameters(); + EXPECT_EQ(params.encodings[0].max_bitrate_bps, 90000); + video_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + + DestroyVideoRtpSender(); +} + TEST_F(RtpSenderReceiverTest, VideoSenderInitParametersMovedAfterNegotiation) { AddVideoTrack(false); @@ -1092,7 +1198,8 @@ TEST_F(RtpSenderReceiverTest, VideoSenderInitParametersMovedAfterNegotiation) { cricket::StreamParams stream_params = cricket::CreateSimStreamParams("cname", ssrcs); video_media_channel()->AddSendStream(stream_params); - video_rtp_sender_->SetMediaChannel(video_media_channel()); + video_rtp_sender_->SetMediaChannel( + video_media_channel()->AsVideoSendChannel()); video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast); params = video_rtp_sender_->GetParameters(); @@ -1132,7 +1239,8 @@ TEST_F(RtpSenderReceiverTest, cricket::StreamParams stream_params = cricket::CreateSimStreamParams("cname", ssrcs); video_media_channel()->AddSendStream(stream_params); - video_rtp_sender_->SetMediaChannel(video_media_channel()); + video_rtp_sender_->SetMediaChannel( + video_media_channel()->AsVideoSendChannel()); video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast); params = video_rtp_sender_->GetParameters(); @@ -1175,7 +1283,8 @@ TEST_F(RtpSenderReceiverDeathTest, cricket::StreamParams stream_params = cricket::StreamParams::CreateLegacy(kVideoSsrc); video_media_channel()->AddSendStream(stream_params); - video_rtp_sender_->SetMediaChannel(video_media_channel()); + video_rtp_sender_->SetMediaChannel( + video_media_channel()->AsVideoSendChannel()); EXPECT_DEATH(video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast), ""); } #endif @@ -1215,6 +1324,25 @@ TEST_F(RtpSenderReceiverTest, DestroyVideoRtpSender(); } +TEST_F(RtpSenderReceiverTest, + VideoSenderSetParametersAsyncInvalidatesTransactionId) { + CreateVideoRtpSender(); + + RtpParameters params = video_rtp_sender_->GetParameters(); + EXPECT_EQ(1u, params.encodings.size()); + absl::optional result; + video_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + video_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_EQ(RTCErrorType::INVALID_STATE, result->type()); + + DestroyVideoRtpSender(); +} + TEST_F(RtpSenderReceiverTest, VideoSenderDetectTransactionIdModification) { CreateVideoRtpSender(); @@ -1571,7 +1699,8 @@ TEST_F(RtpSenderReceiverTest, ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); video_rtp_sender_->SetStreams({local_stream_->id()}); - video_rtp_sender_->SetMediaChannel(video_media_channel()); + video_rtp_sender_->SetMediaChannel( + video_media_channel()->AsVideoSendChannel()); video_track_->set_enabled(true); // Sender is not ready to send (no SSRC) so no option should have been set. @@ -1745,9 +1874,9 @@ TEST_F(RtpSenderReceiverTest, RtpParameters parameters = video_rtp_sender_->GetParameters(); RtpParameters new_parameters = video_rtp_sender_->GetParametersInternal(); new_parameters.encodings[0].active = false; - video_rtp_sender_->SetParametersInternal(new_parameters); + video_rtp_sender_->SetParametersInternal(new_parameters, nullptr, true); new_parameters.encodings[0].active = true; - video_rtp_sender_->SetParametersInternal(new_parameters); + video_rtp_sender_->SetParametersInternal(new_parameters, nullptr, true); parameters.encodings[0].active = false; EXPECT_TRUE(video_rtp_sender_->SetParameters(parameters).ok()); } diff --git a/third_party/libwebrtc/pc/rtp_transceiver.cc b/third_party/libwebrtc/pc/rtp_transceiver.cc index 8b65dbf4feac..5f9e876b4cf3 100644 --- a/third_party/libwebrtc/pc/rtp_transceiver.cc +++ b/third_party/libwebrtc/pc/rtp_transceiver.cc @@ -339,13 +339,16 @@ void RtpTransceiver::PushNewMediaChannelAndDeleteChannel( } context()->worker_thread()->BlockingCall([&]() { // Push down the new media_channel, if any, otherwise clear it. - auto* media_channel = channel_ ? channel_->media_channel() : nullptr; + auto* media_send_channel = + channel_ ? channel_->media_send_channel() : nullptr; for (const auto& sender : senders_) { - sender->internal()->SetMediaChannel(media_channel); + sender->internal()->SetMediaChannel(media_send_channel); } + auto* media_receive_channel = + channel_ ? channel_->media_receive_channel() : nullptr; for (const auto& receiver : receivers_) { - receiver->internal()->SetMediaChannel(media_channel); + receiver->internal()->SetMediaChannel(media_receive_channel); } // Destroy the channel, if we had one, now _after_ updating the receivers diff --git a/third_party/libwebrtc/pc/rtp_transceiver_unittest.cc b/third_party/libwebrtc/pc/rtp_transceiver_unittest.cc index 7961747b6462..a2f2c362dd48 100644 --- a/third_party/libwebrtc/pc/rtp_transceiver_unittest.cc +++ b/third_party/libwebrtc/pc/rtp_transceiver_unittest.cc @@ -344,7 +344,8 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, EXPECT_CALL(*mock_channel, SetFirstPacketReceivedCallback(_)); EXPECT_CALL(*mock_channel, media_type()) .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); - EXPECT_CALL(*mock_channel, media_channel()).WillRepeatedly(Return(nullptr)); + EXPECT_CALL(*mock_channel, media_send_channel()) + .WillRepeatedly(Return(nullptr)); EXPECT_CALL(*mock_channel, mid()).WillRepeatedly(ReturnRef(content_name)); EXPECT_CALL(*mock_channel, SetRtpTransport(_)).WillRepeatedly(Return(true)); transceiver_->SetChannel(std::move(mock_channel), @@ -368,7 +369,8 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, ReturnsNegotiatedHdrExts) { EXPECT_CALL(*mock_channel, SetFirstPacketReceivedCallback(_)); EXPECT_CALL(*mock_channel, media_type()) .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); - EXPECT_CALL(*mock_channel, media_channel()).WillRepeatedly(Return(nullptr)); + EXPECT_CALL(*mock_channel, media_send_channel()) + .WillRepeatedly(Return(nullptr)); EXPECT_CALL(*mock_channel, mid()).WillRepeatedly(ReturnRef(content_name)); EXPECT_CALL(*mock_channel, SetRtpTransport(_)).WillRepeatedly(Return(true)); diff --git a/third_party/libwebrtc/pc/rtp_transmission_manager.cc b/third_party/libwebrtc/pc/rtp_transmission_manager.cc index a81f17a95be5..96b748b4b452 100644 --- a/third_party/libwebrtc/pc/rtp_transmission_manager.cc +++ b/third_party/libwebrtc/pc/rtp_transmission_manager.cc @@ -72,25 +72,48 @@ PeerConnectionObserver* RtpTransmissionManager::Observer() const { return observer_; } -cricket::VoiceMediaChannel* RtpTransmissionManager::voice_media_channel() - const { +cricket::VoiceMediaSendChannelInterface* +RtpTransmissionManager::voice_media_send_channel() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsUnifiedPlan()); auto* voice_channel = GetAudioTransceiver()->internal()->channel(); if (voice_channel) { - return voice_channel->voice_media_channel(); + return voice_channel->voice_media_send_channel(); } else { return nullptr; } } -cricket::VideoMediaChannel* RtpTransmissionManager::video_media_channel() - const { +cricket::VideoMediaSendChannelInterface* +RtpTransmissionManager::video_media_send_channel() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsUnifiedPlan()); auto* video_channel = GetVideoTransceiver()->internal()->channel(); if (video_channel) { - return video_channel->video_media_channel(); + return video_channel->video_media_send_channel(); + } else { + return nullptr; + } +} +cricket::VoiceMediaReceiveChannelInterface* +RtpTransmissionManager::voice_media_receive_channel() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto* voice_channel = GetAudioTransceiver()->internal()->channel(); + if (voice_channel) { + return voice_channel->voice_media_receive_channel(); + } else { + return nullptr; + } +} + +cricket::VideoMediaReceiveChannelInterface* +RtpTransmissionManager::video_media_receive_channel() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto* video_channel = GetVideoTransceiver()->internal()->channel(); + if (video_channel) { + return video_channel->video_media_receive_channel(); } else { return nullptr; } @@ -132,7 +155,7 @@ RtpTransmissionManager::AddTrackPlanB( init_send_encodings ? *init_send_encodings : std::vector()); if (track->kind() == MediaStreamTrackInterface::kAudioKind) { - new_sender->internal()->SetMediaChannel(voice_media_channel()); + new_sender->internal()->SetMediaChannel(voice_media_send_channel()); GetAudioTransceiver()->internal()->AddSender(new_sender); const RtpSenderInfo* sender_info = FindSenderInfo(local_audio_sender_infos_, @@ -142,7 +165,7 @@ RtpTransmissionManager::AddTrackPlanB( } } else { RTC_DCHECK_EQ(MediaStreamTrackInterface::kVideoKind, track->kind()); - new_sender->internal()->SetMediaChannel(video_media_channel()); + new_sender->internal()->SetMediaChannel(video_media_send_channel()); GetVideoTransceiver()->internal()->AddSender(new_sender); const RtpSenderInfo* sender_info = FindSenderInfo(local_video_sender_infos_, @@ -389,7 +412,7 @@ void RtpTransmissionManager::AddAudioTrack(AudioTrackInterface* track, auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(), rtc::scoped_refptr(track), {stream->id()}, {}); - new_sender->internal()->SetMediaChannel(voice_media_channel()); + new_sender->internal()->SetMediaChannel(voice_media_send_channel()); GetAudioTransceiver()->internal()->AddSender(new_sender); // If the sender has already been configured in SDP, we call SetSsrc, // which will connect the sender to the underlying transport. This can @@ -436,7 +459,7 @@ void RtpTransmissionManager::AddVideoTrack(VideoTrackInterface* track, auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(), rtc::scoped_refptr(track), {stream->id()}, {}); - new_sender->internal()->SetMediaChannel(video_media_channel()); + new_sender->internal()->SetMediaChannel(video_media_send_channel()); GetVideoTransceiver()->internal()->AddSender(new_sender); const RtpSenderInfo* sender_info = FindSenderInfo(local_video_sender_infos_, stream->id(), track->id()); @@ -468,7 +491,7 @@ void RtpTransmissionManager::CreateAudioReceiver( // the constructor taking stream IDs instead. auto audio_receiver = rtc::make_ref_counted( worker_thread(), remote_sender_info.sender_id, streams, IsUnifiedPlan(), - voice_media_channel()); + voice_media_receive_channel()); if (remote_sender_info.sender_id == kDefaultAudioSenderId) { audio_receiver->SetupUnsignaledMediaChannel(); } else { @@ -497,7 +520,7 @@ void RtpTransmissionManager::CreateVideoReceiver( remote_sender_info.sender_id == kDefaultVideoSenderId ? absl::nullopt : absl::optional(remote_sender_info.first_ssrc), - video_media_channel()); + video_media_receive_channel()); auto receiver = RtpReceiverProxyWithInternal::Create( signaling_thread(), worker_thread(), std::move(video_receiver)); diff --git a/third_party/libwebrtc/pc/rtp_transmission_manager.h b/third_party/libwebrtc/pc/rtp_transmission_manager.h index 06ce4feeaf6c..b41848c91735 100644 --- a/third_party/libwebrtc/pc/rtp_transmission_manager.h +++ b/third_party/libwebrtc/pc/rtp_transmission_manager.h @@ -204,8 +204,12 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { // Plan B helpers for getting the voice/video media channels for the single // audio/video transceiver, if it exists. - cricket::VoiceMediaChannel* voice_media_channel() const; - cricket::VideoMediaChannel* video_media_channel() const; + cricket::VoiceMediaSendChannelInterface* voice_media_send_channel() const; + cricket::VideoMediaSendChannelInterface* video_media_send_channel() const; + cricket::VoiceMediaReceiveChannelInterface* voice_media_receive_channel() + const; + cricket::VideoMediaReceiveChannelInterface* video_media_receive_channel() + const; private: rtc::Thread* signaling_thread() const { return context_->signaling_thread(); } diff --git a/third_party/libwebrtc/pc/sdp_offer_answer.cc b/third_party/libwebrtc/pc/sdp_offer_answer.cc index 758a50c2c911..8a9849d815d6 100644 --- a/third_party/libwebrtc/pc/sdp_offer_answer.cc +++ b/third_party/libwebrtc/pc/sdp_offer_answer.cc @@ -120,8 +120,7 @@ const char kSimulcastDisabled[] = "WebRTC.PeerConnection.Simulcast.Disabled"; static const int kRtcpCnameLength = 16; // The maximum length of the MID attribute. -// TODO(bugs.webrtc.org/12517) - reduce to 16 again. -static constexpr size_t kMidMaxSize = 32; +static constexpr size_t kMidMaxSize = 16; const char kDefaultStreamId[] = "default"; // NOTE: Duplicated in peer_connection.cc: @@ -408,25 +407,88 @@ bool VerifyIceUfragPwdPresent( RTCError ValidateMids(const cricket::SessionDescription& description) { std::set mids; - size_t max_length = 0; for (const cricket::ContentInfo& content : description.contents()) { if (content.name.empty()) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "A media section is missing a MID attribute."); } - max_length = std::max(max_length, content.name.size()); if (content.name.size() > kMidMaxSize) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "The MID attribute exceeds the maximum supported " - "length of 32 characters."); + "length of 16 characters."); } if (!mids.insert(content.name).second) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "Duplicate a=mid value '" + content.name + "'."); } } - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.PeerConnection.Mid.Size", max_length, 0, - 31, 32); + return RTCError::OK(); +} + +RTCError FindDuplicateCodecParameters( + const RtpCodecParameters codec_parameters, + std::map& payload_to_codec_parameters) { + auto existing_codec_parameters = + payload_to_codec_parameters.find(codec_parameters.payload_type); + if (existing_codec_parameters != payload_to_codec_parameters.end() && + codec_parameters != existing_codec_parameters->second) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "A BUNDLE group contains a codec collision for " + "payload_type='" + + rtc::ToString(codec_parameters.payload_type) + + ". All codecs must share the same type, " + "encoding name, clock rate and parameters."); + } + payload_to_codec_parameters.insert( + std::make_pair(codec_parameters.payload_type, codec_parameters)); + return RTCError::OK(); +} + +RTCError ValidateBundledPayloadTypes( + const cricket::SessionDescription& description) { + // https://www.rfc-editor.org/rfc/rfc8843#name-payload-type-pt-value-reuse + // ... all codecs associated with the payload type number MUST share an + // identical codec configuration. This means that the codecs MUST share + // the same media type, encoding name, clock rate, and any parameter + // that can affect the codec configuration and packetization. + std::map payload_to_codec_parameters; + std::vector bundle_groups = + description.GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); + for (const cricket::ContentGroup* bundle_group : bundle_groups) { + std::map payload_to_codec_parameters; + for (const std::string& content_name : bundle_group->content_names()) { + const cricket::MediaContentDescription* media_description = + description.GetContentDescriptionByName(content_name); + if (!media_description) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "A BUNDLE group contains a MID='" + content_name + + "' matching no m= section."); + } + if (!media_description->has_codecs()) { + continue; + } + const auto type = media_description->type(); + if (type == cricket::MEDIA_TYPE_AUDIO) { + RTC_DCHECK(media_description->as_audio()); + for (const auto& c : media_description->as_audio()->codecs()) { + auto error = FindDuplicateCodecParameters( + c.ToCodecParameters(), payload_to_codec_parameters); + if (!error.ok()) { + return error; + } + } + } else if (type == cricket::MEDIA_TYPE_VIDEO) { + RTC_DCHECK(media_description->as_video()); + for (const auto& c : media_description->as_video()->codecs()) { + auto error = FindDuplicateCodecParameters( + c.ToCodecParameters(), payload_to_codec_parameters); + if (!error.ok()) { + return error; + } + } + } + } + } return RTCError::OK(); } @@ -3310,6 +3372,12 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( return RTCError(RTCErrorType::INVALID_PARAMETER, kSdpWithoutIceUfragPwd); } + // Validate bundle, payload types and that there are no collisions. + error = ValidateBundledPayloadTypes(*sdesc->description()); + // TODO(bugs.webrtc.org/14420): actually reject. + RTC_HISTOGRAM_BOOLEAN("WebRTC.PeerConnection.ValidBundledPayloadTypes", + error.ok()); + if (!pc_->ValidateBundleSettings(sdesc->description(), bundle_groups_by_mid)) { return RTCError(RTCErrorType::INVALID_PARAMETER, kBundleWithoutRtcpMux); diff --git a/third_party/libwebrtc/pc/sdp_offer_answer_unittest.cc b/third_party/libwebrtc/pc/sdp_offer_answer_unittest.cc index 4f16de43ac19..ecac7f8de8c4 100644 --- a/third_party/libwebrtc/pc/sdp_offer_answer_unittest.cc +++ b/third_party/libwebrtc/pc/sdp_offer_answer_unittest.cc @@ -114,4 +114,154 @@ TEST_F(SdpOfferAnswerTest, OnTrackReturnsProxiedObject) { transceiver->stopped(); } +TEST_F(SdpOfferAnswerTest, BundleRejectsCodecCollisionsAudioVideo) { + auto pc = CreatePeerConnection(); + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0 1\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=audio 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:1\r\n" + "a=rtpmap:111 H264/90000\r\n" + "a=fmtp:111 " + "level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42e01f\r\n"; + + auto desc = CreateSessionDescription(SdpType::kOffer, sdp); + ASSERT_NE(desc, nullptr); + RTCError error; + pc->SetRemoteDescription(std::move(desc), &error); + EXPECT_TRUE(error.ok()); + EXPECT_METRIC_EQ( + 1, webrtc::metrics::NumEvents( + "WebRTC.PeerConnection.ValidBundledPayloadTypes", false)); +} + +TEST_F(SdpOfferAnswerTest, BundleRejectsCodecCollisionsVideoFmtp) { + auto pc = CreatePeerConnection(); + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0 1\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:0\r\n" + "a=rtpmap:111 H264/90000\r\n" + "a=fmtp:111 " + "level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42e01f\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:1\r\n" + "a=rtpmap:111 H264/90000\r\n" + "a=fmtp:111 " + "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42e01f\r\n"; + + auto desc = CreateSessionDescription(SdpType::kOffer, sdp); + ASSERT_NE(desc, nullptr); + RTCError error; + pc->SetRemoteDescription(std::move(desc), &error); + EXPECT_TRUE(error.ok()); + EXPECT_METRIC_EQ( + 1, webrtc::metrics::NumEvents( + "WebRTC.PeerConnection.ValidBundledPayloadTypes", false)); +} + +TEST_F(SdpOfferAnswerTest, BundleCodecCollisionInDifferentBundlesAllowed) { + auto pc = CreatePeerConnection(); + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0\r\n" + "a=group:BUNDLE 1\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:0\r\n" + "a=rtpmap:111 H264/90000\r\n" + "a=fmtp:111 " + "level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42e01f\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:1\r\n" + "a=rtpmap:111 H264/90000\r\n" + "a=fmtp:111 " + "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42e01f\r\n"; + + auto desc = CreateSessionDescription(SdpType::kOffer, sdp); + ASSERT_NE(desc, nullptr); + RTCError error; + pc->SetRemoteDescription(std::move(desc), &error); + EXPECT_TRUE(error.ok()); + EXPECT_METRIC_EQ( + 0, webrtc::metrics::NumEvents( + "WebRTC.PeerConnection.ValidBundledPayloadTypes", false)); +} + +TEST_F(SdpOfferAnswerTest, LargeMidsAreRejected) { + auto pc = CreatePeerConnection(); + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=rtpmap:111 VP8/90000\r\n" + "a=mid:01234567890123456\r\n"; + auto desc = CreateSessionDescription(SdpType::kOffer, sdp); + ASSERT_NE(desc, nullptr); + RTCError error; + pc->SetRemoteDescription(std::move(desc), &error); + EXPECT_FALSE(error.ok()); + EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER); +} + } // namespace webrtc diff --git a/third_party/libwebrtc/pc/session_description.h b/third_party/libwebrtc/pc/session_description.h index a7259e1f1dc0..f68e044db2a3 100644 --- a/third_party/libwebrtc/pc/session_description.h +++ b/third_party/libwebrtc/pc/session_description.h @@ -95,45 +95,41 @@ class MediaContentDescription { // `protocol` is the expected media transport protocol, such as RTP/AVPF, // RTP/SAVPF or SCTP/DTLS. - virtual std::string protocol() const { return protocol_; } + std::string protocol() const { return protocol_; } virtual void set_protocol(absl::string_view protocol) { protocol_ = std::string(protocol); } - virtual webrtc::RtpTransceiverDirection direction() const { - return direction_; - } - virtual void set_direction(webrtc::RtpTransceiverDirection direction) { + webrtc::RtpTransceiverDirection direction() const { return direction_; } + void set_direction(webrtc::RtpTransceiverDirection direction) { direction_ = direction; } - virtual bool rtcp_mux() const { return rtcp_mux_; } - virtual void set_rtcp_mux(bool mux) { rtcp_mux_ = mux; } + bool rtcp_mux() const { return rtcp_mux_; } + void set_rtcp_mux(bool mux) { rtcp_mux_ = mux; } - virtual bool rtcp_reduced_size() const { return rtcp_reduced_size_; } - virtual void set_rtcp_reduced_size(bool reduced_size) { + bool rtcp_reduced_size() const { return rtcp_reduced_size_; } + void set_rtcp_reduced_size(bool reduced_size) { rtcp_reduced_size_ = reduced_size; } // Indicates support for the remote network estimate packet type. This // functionality is experimental and subject to change without notice. - virtual bool remote_estimate() const { return remote_estimate_; } - virtual void set_remote_estimate(bool remote_estimate) { + bool remote_estimate() const { return remote_estimate_; } + void set_remote_estimate(bool remote_estimate) { remote_estimate_ = remote_estimate; } - virtual int bandwidth() const { return bandwidth_; } - virtual void set_bandwidth(int bandwidth) { bandwidth_ = bandwidth; } - virtual std::string bandwidth_type() const { return bandwidth_type_; } - virtual void set_bandwidth_type(std::string bandwidth_type) { + int bandwidth() const { return bandwidth_; } + void set_bandwidth(int bandwidth) { bandwidth_ = bandwidth; } + std::string bandwidth_type() const { return bandwidth_type_; } + void set_bandwidth_type(std::string bandwidth_type) { bandwidth_type_ = bandwidth_type; } - virtual const std::vector& cryptos() const { return cryptos_; } - virtual void AddCrypto(const CryptoParams& params) { - cryptos_.push_back(params); - } - virtual void set_cryptos(const std::vector& cryptos) { + const std::vector& cryptos() const { return cryptos_; } + void AddCrypto(const CryptoParams& params) { cryptos_.push_back(params); } + void set_cryptos(const std::vector& cryptos) { cryptos_ = cryptos; } @@ -142,19 +138,18 @@ class MediaContentDescription { // are present. // Use RtpExtension::FindHeaderExtensionByUri for finding and // RtpExtension::DeduplicateHeaderExtensions for filtering. - virtual const RtpHeaderExtensions& rtp_header_extensions() const { + const RtpHeaderExtensions& rtp_header_extensions() const { return rtp_header_extensions_; } - virtual void set_rtp_header_extensions( - const RtpHeaderExtensions& extensions) { + void set_rtp_header_extensions(const RtpHeaderExtensions& extensions) { rtp_header_extensions_ = extensions; rtp_header_extensions_set_ = true; } - virtual void AddRtpHeaderExtension(const webrtc::RtpExtension& ext) { + void AddRtpHeaderExtension(const webrtc::RtpExtension& ext) { rtp_header_extensions_.push_back(ext); rtp_header_extensions_set_ = true; } - virtual void ClearRtpHeaderExtensions() { + void ClearRtpHeaderExtensions() { rtp_header_extensions_.clear(); rtp_header_extensions_set_ = true; } @@ -163,14 +158,12 @@ class MediaContentDescription { // signal them. For now we assume an empty list means no signaling, but // provide the ClearRtpHeaderExtensions method to allow "no support" to be // clearly indicated (i.e. when derived from other information). - virtual bool rtp_header_extensions_set() const { - return rtp_header_extensions_set_; - } - virtual const StreamParamsVec& streams() const { return send_streams_; } + bool rtp_header_extensions_set() const { return rtp_header_extensions_set_; } + const StreamParamsVec& streams() const { return send_streams_; } // TODO(pthatcher): Remove this by giving mediamessage.cc access // to MediaContentDescription - virtual StreamParamsVec& mutable_streams() { return send_streams_; } - virtual void AddStream(const StreamParams& stream) { + StreamParamsVec& mutable_streams() { return send_streams_; } + void AddStream(const StreamParams& stream) { send_streams_.push_back(stream); } // Legacy streams have an ssrc, but nothing else. @@ -183,37 +176,36 @@ class MediaContentDescription { AddStream(sp); } - virtual uint32_t first_ssrc() const { + uint32_t first_ssrc() const { if (send_streams_.empty()) { return 0; } return send_streams_[0].first_ssrc(); } - virtual bool has_ssrcs() const { + bool has_ssrcs() const { if (send_streams_.empty()) { return false; } return send_streams_[0].has_ssrcs(); } - virtual void set_conference_mode(bool enable) { conference_mode_ = enable; } - virtual bool conference_mode() const { return conference_mode_; } + void set_conference_mode(bool enable) { conference_mode_ = enable; } + bool conference_mode() const { return conference_mode_; } // https://tools.ietf.org/html/rfc4566#section-5.7 // May be present at the media or session level of SDP. If present at both // levels, the media-level attribute overwrites the session-level one. - virtual void set_connection_address(const rtc::SocketAddress& address) { + void set_connection_address(const rtc::SocketAddress& address) { connection_address_ = address; } - virtual const rtc::SocketAddress& connection_address() const { + const rtc::SocketAddress& connection_address() const { return connection_address_; } // Determines if it's allowed to mix one- and two-byte rtp header extensions // within the same rtp stream. enum ExtmapAllowMixed { kNo, kSession, kMedia }; - virtual void set_extmap_allow_mixed_enum( - ExtmapAllowMixed new_extmap_allow_mixed) { + void set_extmap_allow_mixed_enum(ExtmapAllowMixed new_extmap_allow_mixed) { if (new_extmap_allow_mixed == kMedia && extmap_allow_mixed_enum_ == kSession) { // Do not downgrade from session level to media level. @@ -221,27 +213,24 @@ class MediaContentDescription { } extmap_allow_mixed_enum_ = new_extmap_allow_mixed; } - virtual ExtmapAllowMixed extmap_allow_mixed_enum() const { + ExtmapAllowMixed extmap_allow_mixed_enum() const { return extmap_allow_mixed_enum_; } - virtual bool extmap_allow_mixed() const { - return extmap_allow_mixed_enum_ != kNo; - } + bool extmap_allow_mixed() const { return extmap_allow_mixed_enum_ != kNo; } // Simulcast functionality. - virtual bool HasSimulcast() const { return !simulcast_.empty(); } - virtual SimulcastDescription& simulcast_description() { return simulcast_; } - virtual const SimulcastDescription& simulcast_description() const { + bool HasSimulcast() const { return !simulcast_.empty(); } + SimulcastDescription& simulcast_description() { return simulcast_; } + const SimulcastDescription& simulcast_description() const { return simulcast_; } - virtual void set_simulcast_description( - const SimulcastDescription& simulcast) { + void set_simulcast_description(const SimulcastDescription& simulcast) { simulcast_ = simulcast; } - virtual const std::vector& receive_rids() const { + const std::vector& receive_rids() const { return receive_rids_; } - virtual void set_receive_rids(const std::vector& rids) { + void set_receive_rids(const std::vector& rids) { receive_rids_ = rids; } @@ -283,10 +272,10 @@ class MediaContentDescriptionImpl : public MediaContentDescription { typedef C CodecType; // Codecs should be in preference order (most preferred codec first). - virtual const std::vector& codecs() const { return codecs_; } - virtual void set_codecs(const std::vector& codecs) { codecs_ = codecs; } + const std::vector& codecs() const { return codecs_; } + void set_codecs(const std::vector& codecs) { codecs_ = codecs; } bool has_codecs() const override { return !codecs_.empty(); } - virtual bool HasCodec(int id) { + bool HasCodec(int id) { bool found = false; for (typename std::vector::iterator iter = codecs_.begin(); iter != codecs_.end(); ++iter) { @@ -297,8 +286,8 @@ class MediaContentDescriptionImpl : public MediaContentDescription { } return found; } - virtual void AddCodec(const C& codec) { codecs_.push_back(codec); } - virtual void AddOrReplaceCodec(const C& codec) { + void AddCodec(const C& codec) { codecs_.push_back(codec); } + void AddOrReplaceCodec(const C& codec) { for (typename std::vector::iterator iter = codecs_.begin(); iter != codecs_.end(); ++iter) { if (iter->id == codec.id) { @@ -308,7 +297,7 @@ class MediaContentDescriptionImpl : public MediaContentDescription { } AddCodec(codec); } - virtual void AddCodecs(const std::vector& codecs) { + void AddCodecs(const std::vector& codecs) { typename std::vector::const_iterator codec; for (codec = codecs.begin(); codec != codecs.end(); ++codec) { AddCodec(*codec); diff --git a/third_party/libwebrtc/pc/test/mock_channel_interface.h b/third_party/libwebrtc/pc/test/mock_channel_interface.h index 97e873e72405..273e4a19f032 100644 --- a/third_party/libwebrtc/pc/test/mock_channel_interface.h +++ b/third_party/libwebrtc/pc/test/mock_channel_interface.h @@ -26,8 +26,24 @@ class MockChannelInterface : public cricket::ChannelInterface { public: MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); MOCK_METHOD(MediaChannel*, media_channel, (), (const, override)); - MOCK_METHOD(VoiceMediaChannel*, voice_media_channel, (), (const, override)); - MOCK_METHOD(VideoMediaChannel*, video_media_channel, (), (const, override)); + MOCK_METHOD(MediaChannel*, media_send_channel, (), (const, override)); + MOCK_METHOD(VoiceMediaChannel*, + voice_media_send_channel, + (), + (const, override)); + MOCK_METHOD(VideoMediaChannel*, + video_media_send_channel, + (), + (const, override)); + MOCK_METHOD(MediaChannel*, media_receive_channel, (), (const, override)); + MOCK_METHOD(VoiceMediaChannel*, + voice_media_receive_channel, + (), + (const, override)); + MOCK_METHOD(VideoMediaChannel*, + video_media_receive_channel, + (), + (const, override)); MOCK_METHOD(absl::string_view, transport_name, (), (const, override)); MOCK_METHOD(const std::string&, mid, (), (const, override)); MOCK_METHOD(void, Enable, (bool), (override)); diff --git a/third_party/libwebrtc/pc/test/mock_rtp_receiver_internal.h b/third_party/libwebrtc/pc/test/mock_rtp_receiver_internal.h index 779dcdcf086e..e2a81c0dd315 100644 --- a/third_party/libwebrtc/pc/test/mock_rtp_receiver_internal.h +++ b/third_party/libwebrtc/pc/test/mock_rtp_receiver_internal.h @@ -57,7 +57,10 @@ class MockRtpReceiverInternal : public RtpReceiverInternal { // RtpReceiverInternal methods. MOCK_METHOD(void, Stop, (), (override)); - MOCK_METHOD(void, SetMediaChannel, (cricket::MediaChannel*), (override)); + MOCK_METHOD(void, + SetMediaChannel, + (cricket::MediaReceiveChannelInterface*), + (override)); MOCK_METHOD(void, SetupMediaChannel, (uint32_t), (override)); MOCK_METHOD(void, SetupUnsignaledMediaChannel, (), (override)); MOCK_METHOD(uint32_t, ssrc, (), (const, override)); diff --git a/third_party/libwebrtc/pc/test/mock_rtp_sender_internal.h b/third_party/libwebrtc/pc/test/mock_rtp_sender_internal.h index 8b9e75a7fb1d..8ed0ede21baf 100644 --- a/third_party/libwebrtc/pc/test/mock_rtp_sender_internal.h +++ b/third_party/libwebrtc/pc/test/mock_rtp_sender_internal.h @@ -52,9 +52,13 @@ class MockRtpSenderInternal : public RtpSenderInternal { (), (const, override)); MOCK_METHOD(RTCError, SetParameters, (const RtpParameters&), (override)); - MOCK_METHOD(RTCError, + MOCK_METHOD(void, + SetParametersAsync, + (const RtpParameters&, SetParametersCallback), + (override)); + MOCK_METHOD(void, SetParametersInternal, - (const RtpParameters&), + (const RtpParameters&, SetParametersCallback, bool blocking), (override)); MOCK_METHOD(RTCError, SetParametersInternalWithAllLayers, @@ -87,7 +91,7 @@ class MockRtpSenderInternal : public RtpSenderInternal { (override)); // RtpSenderInternal methods. - MOCK_METHOD1(SetMediaChannel, void(cricket::MediaChannel*)); + MOCK_METHOD1(SetMediaChannel, void(cricket::MediaSendChannelInterface*)); MOCK_METHOD1(SetSsrc, void(uint32_t)); MOCK_METHOD1(set_stream_ids, void(const std::vector&)); MOCK_METHOD1(SetStreams, void(const std::vector&)); diff --git a/third_party/libwebrtc/pc/test/mock_voice_media_channel.h b/third_party/libwebrtc/pc/test/mock_voice_media_channel.h index 444ca5aed6ac..5a9b8802dd65 100644 --- a/third_party/libwebrtc/pc/test/mock_voice_media_channel.h +++ b/third_party/libwebrtc/pc/test/mock_voice_media_channel.h @@ -16,6 +16,7 @@ #include "api/call/audio_sink.h" #include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" #include "rtc_base/gunit.h" #include "test/gmock.h" #include "test/gtest.h" @@ -29,7 +30,10 @@ class MockVoiceMediaChannel : public VoiceMediaChannel { explicit MockVoiceMediaChannel(webrtc::TaskQueueBase* network_thread) : VoiceMediaChannel(network_thread) {} - MOCK_METHOD(void, SetInterface, (NetworkInterface * iface), (override)); + MOCK_METHOD(void, + SetInterface, + (MediaChannelNetworkInterface * iface), + (override)); MOCK_METHOD(void, OnPacketReceived, (rtc::CopyOnWriteBuffer packet, int64_t packet_time_us), @@ -64,14 +68,15 @@ class MockVoiceMediaChannel : public VoiceMediaChannel { (uint32_t ssrc, rtc::scoped_refptr frame_decryptor), (override)); - MOCK_METHOD(void, SetVideoCodecSwitchingEnabled, (bool enabled), (override)); MOCK_METHOD(webrtc::RtpParameters, GetRtpSendParameters, (uint32_t ssrc), (const, override)); MOCK_METHOD(webrtc::RTCError, SetRtpSendParameters, - (uint32_t ssrc, const webrtc::RtpParameters& parameters), + (uint32_t ssrc, + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback), (override)); MOCK_METHOD( void, diff --git a/third_party/libwebrtc/pc/test/svc_e2e_tests.cc b/third_party/libwebrtc/pc/test/svc_e2e_tests.cc index 5d98f7313d18..dea076375823 100644 --- a/third_party/libwebrtc/pc/test/svc_e2e_tests.cc +++ b/third_party/libwebrtc/pc/test/svc_e2e_tests.cc @@ -13,6 +13,7 @@ #include #include "api/media_stream_interface.h" +#include "api/stats/rtcstats_objects.h" #include "api/test/create_network_emulation_manager.h" #include "api/test/create_peer_connection_quality_test_frame_generator.h" #include "api/test/create_peerconnection_quality_test_fixture.h" @@ -46,6 +47,7 @@ using ::cricket::kH264CodecName; using ::cricket::kVp8CodecName; using ::cricket::kVp9CodecName; using ::testing::Combine; +using ::testing::Optional; using ::testing::UnitTest; using ::testing::Values; using ::testing::ValuesIn; @@ -203,16 +205,32 @@ class SvcVideoQualityAnalyzer : public DefaultVideoQualityAnalyzer { input_image); } + void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) override { + // Extract the scalability mode reported in the stats. + auto outbound_stats = report->GetStatsOfType(); + for (const auto& stat : outbound_stats) { + if (stat->scalability_mode.is_defined()) { + reported_scalability_mode_ = *stat->scalability_mode; + } + } + } + const SpatialTemporalLayerCounts& encoder_layers_seen() const { return encoder_layers_seen_; } const SpatialTemporalLayerCounts& decoder_layers_seen() const { return decoder_layers_seen_; } + const absl::optional reported_scalability_mode() const { + return reported_scalability_mode_; + } private: SpatialTemporalLayerCounts encoder_layers_seen_; SpatialTemporalLayerCounts decoder_layers_seen_; + absl::optional reported_scalability_mode_; }; MATCHER_P2(HasSpatialAndTemporalLayers, @@ -342,6 +360,8 @@ TEST_P(SvcTest, ScalabilityModeSupported) { SvcTestParameters().expected_spatial_layers, SvcTestParameters().expected_temporal_layers)); } + EXPECT_THAT(analyzer_ptr->reported_scalability_mode(), + Optional(SvcTestParameters().scalability_mode)); RTC_LOG(LS_INFO) << "Encoder layers seen: " << analyzer_ptr->encoder_layers_seen().size(); diff --git a/third_party/libwebrtc/pc/video_rtp_receiver.cc b/third_party/libwebrtc/pc/video_rtp_receiver.cc index 098ffde7cd1b..18dfc82a2e12 100644 --- a/third_party/libwebrtc/pc/video_rtp_receiver.cc +++ b/third_party/libwebrtc/pc/video_rtp_receiver.cc @@ -251,7 +251,8 @@ void VideoRtpReceiver::SetJitterBufferMinimumDelay( media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); } -void VideoRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { +void VideoRtpReceiver::SetMediaChannel( + cricket::MediaReceiveChannelInterface* media_channel) { RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); @@ -259,7 +260,8 @@ void VideoRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { SetMediaChannel_w(media_channel); } -void VideoRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) { +void VideoRtpReceiver::SetMediaChannel_w( + cricket::MediaReceiveChannelInterface* media_channel) { RTC_DCHECK_RUN_ON(worker_thread_); if (media_channel == media_channel_) return; @@ -274,7 +276,11 @@ void VideoRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) { SetEncodedSinkEnabled(false); } - media_channel_ = static_cast(media_channel); + if (media_channel) { + media_channel_ = media_channel->AsVideoReceiveChannel(); + } else { + media_channel_ = nullptr; + } if (media_channel_) { if (saved_generate_keyframe_) { @@ -310,8 +316,9 @@ std::vector VideoRtpReceiver::GetSources() const { return media_channel_->GetSources(*ssrc_); } -void VideoRtpReceiver::SetupMediaChannel(absl::optional ssrc, - cricket::MediaChannel* media_channel) { +void VideoRtpReceiver::SetupMediaChannel( + absl::optional ssrc, + cricket::MediaReceiveChannelInterface* media_channel) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RTC_DCHECK(media_channel); MediaSourceInterface::SourceState state = source_->state(); diff --git a/third_party/libwebrtc/pc/video_rtp_receiver.h b/third_party/libwebrtc/pc/video_rtp_receiver.h index 8b1f3c4140c8..086246daaeec 100644 --- a/third_party/libwebrtc/pc/video_rtp_receiver.h +++ b/third_party/libwebrtc/pc/video_rtp_receiver.h @@ -102,7 +102,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { void SetJitterBufferMinimumDelay( absl::optional delay_seconds) override; - void SetMediaChannel(cricket::MediaChannel* media_channel) override; + void SetMediaChannel( + cricket::MediaReceiveChannelInterface* media_channel) override; int AttachmentId() const override { return attachment_id_; } @@ -111,7 +112,7 @@ class VideoRtpReceiver : public RtpReceiverInternal { // Combines SetMediaChannel, SetupMediaChannel and // SetupUnsignaledMediaChannel. void SetupMediaChannel(absl::optional ssrc, - cricket::MediaChannel* media_channel); + cricket::MediaReceiveChannelInterface* media_channel); private: void RestartMediaChannel(absl::optional ssrc) @@ -121,7 +122,7 @@ class VideoRtpReceiver : public RtpReceiverInternal { RTC_RUN_ON(worker_thread_); void SetSink(rtc::VideoSinkInterface* sink) RTC_RUN_ON(worker_thread_); - void SetMediaChannel_w(cricket::MediaChannel* media_channel) + void SetMediaChannel_w(cricket::MediaReceiveChannelInterface* media_channel) RTC_RUN_ON(worker_thread_); // VideoRtpTrackSource::Callback @@ -148,8 +149,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { rtc::Thread* const worker_thread_; const std::string id_; - cricket::VideoMediaChannel* media_channel_ RTC_GUARDED_BY(worker_thread_) = - nullptr; + cricket::VideoMediaReceiveChannelInterface* media_channel_ + RTC_GUARDED_BY(worker_thread_) = nullptr; absl::optional ssrc_ RTC_GUARDED_BY(worker_thread_); // `source_` is held here to be able to change the state of the source when // the VideoRtpReceiver is stopped. diff --git a/third_party/libwebrtc/pc/webrtc_sdp.cc b/third_party/libwebrtc/pc/webrtc_sdp.cc index 39b16901a1c8..69fa62ca37ff 100644 --- a/third_party/libwebrtc/pc/webrtc_sdp.cc +++ b/third_party/libwebrtc/pc/webrtc_sdp.cc @@ -1940,7 +1940,7 @@ void BuildRtpmap(const MediaContentDescription* media_desc, if (GetMinValue(maxptimes, &min_maxptime)) { AddAttributeLine(kCodecParamMaxPTime, min_maxptime, message); } - RTC_DCHECK(min_maxptime > max_minptime); + RTC_DCHECK_GE(min_maxptime, max_minptime); // Populate the ptime attribute with the smallest ptime or the largest // minptime, whichever is the largest, for all codecs under the same m-line. int ptime = INT_MAX; @@ -2333,40 +2333,57 @@ static bool ParseMsidAttribute(absl::string_view line, std::vector* stream_ids, std::string* track_id, SdpParseError* error) { - // https://datatracker.ietf.org/doc/draft-ietf-mmusic-msid/16/ - // a=msid: + // https://datatracker.ietf.org/doc/rfc8830/ + // a=msid: // msid-value = msid-id [ SP msid-appdata ] // msid-id = 1*64token-char ; see RFC 4566 // msid-appdata = 1*64token-char ; see RFC 4566 - std::string field1; - std::string new_stream_id; - std::string new_track_id; - if (!rtc::tokenize_first(line.substr(kLinePrefixLength), - kSdpDelimiterSpaceChar, &field1, &new_track_id)) { - const size_t expected_fields = 2; - return ParseFailedExpectFieldNum(line, expected_fields, error); + // Note that JSEP stipulates not sending msid-appdata so + // a=msid: + // is supported for backward compability reasons only. + std::vector fields; + size_t num_fields = rtc::tokenize(line.substr(kLinePrefixLength), + kSdpDelimiterSpaceChar, &fields); + if (num_fields < 1 || num_fields > 2) { + return ParseFailed(line, "Expected a stream ID and optionally a track ID", + error); } + if (num_fields == 1) { + if (line.back() == kSdpDelimiterSpaceChar) { + return ParseFailed(line, "Missing track ID in msid attribute.", error); + } + if (!track_id->empty()) { + fields.push_back(*track_id); + } else { + // Ending with an empty string track will cause a random track id + // to be generated later in the process. + fields.push_back(""); + } + } + RTC_DCHECK_EQ(fields.size(), 2); - if (new_track_id.empty()) { - return ParseFailed(line, "Missing track ID in msid attribute.", error); - } // All track ids should be the same within an m section in a Unified Plan SDP. - if (!track_id->empty() && new_track_id.compare(*track_id) != 0) { + if (!track_id->empty() && track_id->compare(fields[1]) != 0) { return ParseFailed( line, "Two different track IDs in msid attribute in one m= section", error); } - *track_id = new_track_id; + *track_id = fields[1]; // msid: - if (!GetValue(field1, kAttributeMsid, &new_stream_id, error)) { + std::string new_stream_id; + if (!GetValue(fields[0], kAttributeMsid, &new_stream_id, error)) { return false; } if (new_stream_id.empty()) { return ParseFailed(line, "Missing stream ID in msid attribute.", error); } // The special value "-" indicates "no MediaStream". - if (new_stream_id.compare(kNoStreamMsid) != 0) { + if (new_stream_id.compare(kNoStreamMsid) != 0 && + !absl::c_any_of(*stream_ids, + [&new_stream_id](const std::string& existing_stream_id) { + return new_stream_id == existing_stream_id; + })) { stream_ids->push_back(new_stream_id); } return true; @@ -3330,6 +3347,10 @@ bool ParseContent(absl::string_view message, // still create a track. This isn't done for data media types because // StreamParams aren't used for SCTP streams, and RTP data channels don't // support unsignaled SSRCs. + // If track id was not specified, create a random one. + if (track_id.empty()) { + track_id = rtc::CreateRandomString(8); + } CreateTrackWithNoSsrcs(stream_ids, track_id, send_rids, &tracks); } diff --git a/third_party/libwebrtc/pc/webrtc_sdp_unittest.cc b/third_party/libwebrtc/pc/webrtc_sdp_unittest.cc index 7880af069380..9f1cfc9c96aa 100644 --- a/third_party/libwebrtc/pc/webrtc_sdp_unittest.cc +++ b/third_party/libwebrtc/pc/webrtc_sdp_unittest.cc @@ -4055,11 +4055,54 @@ TEST_F(WebRtcSdpTest, DeserializeInvalidPortInCandidateAttribute) { EXPECT_FALSE(SdpDeserialize(kSdpWithInvalidCandidatePort, &jdesc_output)); } +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithStreamIdAndTrackId) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id track_id\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 1u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + EXPECT_EQ(stream.id, "track_id"); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithEmptyStreamIdAndTrackId) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:- track_id\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 0u); + EXPECT_EQ(stream.id, "track_id"); +} + // Test that "a=msid" with a missing track ID is rejected and doesn't crash. // Regression test for: // https://bugs.chromium.org/p/chromium/issues/detail?id=686405 TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithMissingTrackId) { - static const char kSdpWithMissingTrackId[] = + std::string sdp = "v=0\r\n" "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" "s=-\r\n" @@ -4070,11 +4113,226 @@ TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithMissingTrackId) { "a=msid:stream_id \r\n"; JsepSessionDescription jdesc_output(kDummyType); - EXPECT_FALSE(SdpDeserialize(kSdpWithMissingTrackId, &jdesc_output)); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutColon) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAttributes) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithTooManySpaces) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id track_id bogus\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithDifferentTrackIds) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id track_id\r\n" + "a=msid:stream_id2 track_id2\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppData) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 1u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + // Track id is randomly generated. + EXPECT_NE(stream.id, ""); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataTwoStreams) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id\r\n" + "a=msid:stream_id2\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 2u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + EXPECT_EQ(stream.stream_ids()[1], "stream_id2"); + // Track id is randomly generated. + EXPECT_NE(stream.id, ""); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataDuplicate) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id\r\n" + "a=msid:stream_id\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + // This is somewhat silly but accept it. Duplicates get filtered. + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 1u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + // Track id is randomly generated. + EXPECT_NE(stream.id, ""); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataMixed) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id\r\n" + "a=msid:stream_id2 track_id\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + // Mixing the syntax like this is not a good idea but we accept it + // and the result is the second track_id. + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 2u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + EXPECT_EQ(stream.stream_ids()[1], "stream_id2"); + + // Track id is taken from second line. + EXPECT_EQ(stream.id, "track_id"); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataMixed2) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id track_id\r\n" + "a=msid:stream_id2\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + // Mixing the syntax like this is not a good idea but we accept it + // and the result is the second track_id. + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 2u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + EXPECT_EQ(stream.stream_ids()[1], "stream_id2"); + + // Track id is taken from first line. + EXPECT_EQ(stream.id, "track_id"); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataMixedNoStream) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id\r\n" + "a=msid:- track_id\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + // This is somewhat undefined behavior but accept it and expect a single + // stream. + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 1u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + EXPECT_EQ(stream.id, "track_id"); } TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithMissingStreamId) { - static const char kSdpWithMissingStreamId[] = + std::string sdp = "v=0\r\n" "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" "s=-\r\n" @@ -4085,7 +4343,7 @@ TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithMissingStreamId) { "a=msid: track_id\r\n"; JsepSessionDescription jdesc_output(kDummyType); - EXPECT_FALSE(SdpDeserialize(kSdpWithMissingStreamId, &jdesc_output)); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); } // Tests that if both session-level address and media-level address exist, use diff --git a/third_party/libwebrtc/rtc_base/BUILD.gn b/third_party/libwebrtc/rtc_base/BUILD.gn index aa7c56584e8d..c3cec9653284 100644 --- a/third_party/libwebrtc/rtc_base/BUILD.gn +++ b/third_party/libwebrtc/rtc_base/BUILD.gn @@ -316,7 +316,10 @@ rtc_library("platform_thread_types") { "platform_thread_types.cc", "platform_thread_types.h", ] - deps = [ ":macromagic" ] + deps = [ + ":checks", + ":macromagic", + ] } rtc_source_set("refcount") { @@ -1745,9 +1748,14 @@ if (rtc_include_tests) { "numerics/percentile_filter_unittest.cc", "numerics/running_statistics_unittest.cc", "numerics/sequence_number_util_unittest.cc", + "numerics/sequence_numbers_conformance_test.cc", ] deps = [ ":rtc_numerics", + ":strong_alias", + ":timeutils", + "../modules:module_api_public", + "../net/dcsctp/common:sequence_numbers", "../test:test_main", "../test:test_support", ] diff --git a/third_party/libwebrtc/rtc_base/DEPS b/third_party/libwebrtc/rtc_base/DEPS index 3a77b5502a45..3882f5acb580 100644 --- a/third_party/libwebrtc/rtc_base/DEPS +++ b/third_party/libwebrtc/rtc_base/DEPS @@ -12,4 +12,7 @@ specific_include_rules = { "gunit\.h": [ "+testing/base/public/gunit.h" ], + "sequence_numbers_conformance_test\.cc": [ + "+net/dcsctp/common/sequence_numbers.h", + ], } diff --git a/third_party/libwebrtc/rtc_base/byte_order.h b/third_party/libwebrtc/rtc_base/byte_order.h index e28079d735e4..382511daeb15 100644 --- a/third_party/libwebrtc/rtc_base/byte_order.h +++ b/third_party/libwebrtc/rtc_base/byte_order.h @@ -13,6 +13,8 @@ #include +#include + #if defined(WEBRTC_POSIX) && !defined(__native_client__) #include #endif @@ -109,74 +111,69 @@ inline uint8_t Get8(const void* memory, size_t offset) { } inline void SetBE16(void* memory, uint16_t v) { - *static_cast(memory) = htobe16(v); + uint16_t val = htobe16(v); + memcpy(memory, &val, sizeof(val)); } inline void SetBE32(void* memory, uint32_t v) { - *static_cast(memory) = htobe32(v); + uint32_t val = htobe32(v); + memcpy(memory, &val, sizeof(val)); } inline void SetBE64(void* memory, uint64_t v) { -#ifdef WEBRTC_WIN - //Mozilla: because we support Win7, htonll is not visible to us - Set8(memory, 0, static_cast(v >> 56)); - Set8(memory, 1, static_cast(v >> 48)); - Set8(memory, 2, static_cast(v >> 40)); - Set8(memory, 3, static_cast(v >> 32)); - Set8(memory, 4, static_cast(v >> 24)); - Set8(memory, 5, static_cast(v >> 16)); - Set8(memory, 6, static_cast(v >> 8)); - Set8(memory, 7, static_cast(v >> 0)); -#else - *static_cast(memory) = htobe64(v); -#endif + uint64_t val = htobe64(v); + memcpy(memory, &val, sizeof(val)); } inline uint16_t GetBE16(const void* memory) { - return be16toh(*static_cast(memory)); + uint16_t val; + memcpy(&val, memory, sizeof(val)); + return be16toh(val); } inline uint32_t GetBE32(const void* memory) { - return be32toh(*static_cast(memory)); + uint32_t val; + memcpy(&val, memory, sizeof(val)); + return be32toh(val); } inline uint64_t GetBE64(const void* memory) { -#ifdef WEBRTC_WIN - return (static_cast(Get8(memory, 0)) << 56) | - (static_cast(Get8(memory, 1)) << 48) | - (static_cast(Get8(memory, 2)) << 40) | - (static_cast(Get8(memory, 3)) << 32) | - (static_cast(Get8(memory, 4)) << 24) | - (static_cast(Get8(memory, 5)) << 16) | - (static_cast(Get8(memory, 6)) << 8) | - (static_cast(Get8(memory, 7)) << 0); -#else - return be64toh(*static_cast(memory)); -#endif + uint64_t val; + memcpy(&val, memory, sizeof(val)); + return be64toh(val); } inline void SetLE16(void* memory, uint16_t v) { - *static_cast(memory) = htole16(v); + uint16_t val = htole16(v); + memcpy(memory, &val, sizeof(val)); } inline void SetLE32(void* memory, uint32_t v) { - *static_cast(memory) = htole32(v); + uint32_t val = htole32(v); + memcpy(memory, &val, sizeof(val)); } inline void SetLE64(void* memory, uint64_t v) { - *static_cast(memory) = htole64(v); + uint64_t val = htole64(v); + memcpy(memory, &val, sizeof(val)); } inline uint16_t GetLE16(const void* memory) { - return le16toh(*static_cast(memory)); + uint16_t val; + memcpy(&val, memory, sizeof(val)); + return le16toh(val); } inline uint32_t GetLE32(const void* memory) { - return le32toh(*static_cast(memory)); + uint32_t val; + memcpy(&val, memory, sizeof(val)); + return le32toh(val); } inline uint64_t GetLE64(const void* memory) { - return le64toh(*static_cast(memory)); + uint64_t val; + memcpy(&val, memory, sizeof(val)); + return le64toh(val); } // Check if the current host is big endian. @@ -197,13 +194,7 @@ inline uint32_t HostToNetwork32(uint32_t n) { } inline uint64_t HostToNetwork64(uint64_t n) { -#ifdef WEBRTC_WIN - uint64_t result; - SetBE64(&result, n); - return result; -#else return htobe64(n); -#endif } inline uint16_t NetworkToHost16(uint16_t n) { @@ -215,11 +206,7 @@ inline uint32_t NetworkToHost32(uint32_t n) { } inline uint64_t NetworkToHost64(uint64_t n) { -#ifdef WEBRTC_WIN - return GetBE64(&n); -#else return be64toh(n); -#endif } } // namespace rtc diff --git a/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings.cc b/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings.cc index 8af52d66461a..5f0bf2d7aca5 100644 --- a/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings.cc +++ b/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings.cc @@ -188,7 +188,7 @@ EncoderInfoSettings::EncoderInfoSettings(absl::string_view name) resolution_bitrate_limits_ = ToResolutionBitrateLimits(bitrate_limits.Get()); } -absl::optional EncoderInfoSettings::requested_resolution_alignment() +absl::optional EncoderInfoSettings::requested_resolution_alignment() const { if (requested_resolution_alignment_ && requested_resolution_alignment_.Value() < 1) { diff --git a/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings.h b/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings.h index d450697f47d8..f4227ed63192 100644 --- a/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings.h +++ b/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings.h @@ -33,7 +33,7 @@ class EncoderInfoSettings { int max_bitrate_bps = 0; // The maximum bitrate. }; - absl::optional requested_resolution_alignment() const; + absl::optional requested_resolution_alignment() const; bool apply_alignment_to_all_simulcast_layers() const { return apply_alignment_to_all_simulcast_layers_.Get(); } @@ -62,7 +62,7 @@ class EncoderInfoSettings { explicit EncoderInfoSettings(absl::string_view name); private: - FieldTrialOptional requested_resolution_alignment_; + FieldTrialOptional requested_resolution_alignment_; FieldTrialFlag apply_alignment_to_all_simulcast_layers_; std::vector resolution_bitrate_limits_; }; diff --git a/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings_unittest.cc b/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings_unittest.cc index aabb68718c37..929c7778212b 100644 --- a/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings_unittest.cc +++ b/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings_unittest.cc @@ -38,7 +38,7 @@ TEST(SimulcastEncoderAdapterSettingsTest, GetResolutionAlignment) { "requested_resolution_alignment:2/"); SimulcastEncoderAdapterEncoderInfoSettings settings; - EXPECT_EQ(2, settings.requested_resolution_alignment()); + EXPECT_EQ(2u, settings.requested_resolution_alignment()); EXPECT_FALSE(settings.apply_alignment_to_all_simulcast_layers()); EXPECT_TRUE(settings.resolution_bitrate_limits().empty()); } @@ -50,7 +50,7 @@ TEST(SimulcastEncoderAdapterSettingsTest, GetApplyAlignment) { "apply_alignment_to_all_simulcast_layers/"); SimulcastEncoderAdapterEncoderInfoSettings settings; - EXPECT_EQ(3, settings.requested_resolution_alignment()); + EXPECT_EQ(3u, settings.requested_resolution_alignment()); EXPECT_TRUE(settings.apply_alignment_to_all_simulcast_layers()); EXPECT_TRUE(settings.resolution_bitrate_limits().empty()); } @@ -94,9 +94,9 @@ TEST(EncoderSettingsTest, CommonSettingsUsedIfEncoderNameUnspecified) { "WebRTC-GetEncoderInfoOverride/requested_resolution_alignment:3/"); LibvpxVp8EncoderInfoSettings vp8_settings; - EXPECT_EQ(2, vp8_settings.requested_resolution_alignment()); + EXPECT_EQ(2u, vp8_settings.requested_resolution_alignment()); LibvpxVp9EncoderInfoSettings vp9_settings; - EXPECT_EQ(3, vp9_settings.requested_resolution_alignment()); + EXPECT_EQ(3u, vp9_settings.requested_resolution_alignment()); } } // namespace webrtc diff --git a/third_party/libwebrtc/rtc_base/experiments/field_trial_parser_unittest.cc b/third_party/libwebrtc/rtc_base/experiments/field_trial_parser_unittest.cc index ea423526ef3e..33d275a31c71 100644 --- a/third_party/libwebrtc/rtc_base/experiments/field_trial_parser_unittest.cc +++ b/third_party/libwebrtc/rtc_base/experiments/field_trial_parser_unittest.cc @@ -30,17 +30,15 @@ struct DummyExperiment { FieldTrialParameter hash = FieldTrialParameter("h", "a80"); - field_trial::ScopedGlobalFieldTrialsForTesting g{{kDummyExperiment}}; + field_trial::FieldTrialsAllowedInScopeForTesting k{{kDummyExperiment}}; + + DummyExperiment() + : DummyExperiment(field_trial::FindFullName(kDummyExperiment)) {} explicit DummyExperiment(absl::string_view field_trial) { ParseFieldTrial({&enabled, &factor, &retries, &size, &ping, &hash}, field_trial); } - DummyExperiment() { - std::string trial_string = field_trial::FindFullName(kDummyExperiment); - ParseFieldTrial({&enabled, &factor, &retries, &size, &ping, &hash}, - trial_string); - } }; enum class CustomEnum { diff --git a/third_party/libwebrtc/rtc_base/memory/fifo_buffer.cc b/third_party/libwebrtc/rtc_base/memory/fifo_buffer.cc index 116badd91578..c159bc979faa 100644 --- a/third_party/libwebrtc/rtc_base/memory/fifo_buffer.cc +++ b/third_party/libwebrtc/rtc_base/memory/fifo_buffer.cc @@ -49,23 +49,20 @@ StreamState FifoBuffer::GetState() const { return state_; } -StreamResult FifoBuffer::Read(void* buffer, - size_t bytes, - size_t* bytes_read, - int* error) { +StreamResult FifoBuffer::Read(rtc::ArrayView buffer, + size_t& bytes_read, + int& error) { webrtc::MutexLock lock(&mutex_); const bool was_writable = data_length_ < buffer_length_; size_t copy = 0; - StreamResult result = ReadLocked(buffer, bytes, ©); + StreamResult result = ReadLocked(buffer.data(), buffer.size(), ©); if (result == SR_SUCCESS) { // If read was successful then adjust the read position and number of // bytes buffered. read_position_ = (read_position_ + copy) % buffer_length_; data_length_ -= copy; - if (bytes_read) { - *bytes_read = copy; - } + bytes_read = copy; // if we were full before, and now we're not, post an event if (!was_writable && copy > 0) { @@ -75,23 +72,19 @@ StreamResult FifoBuffer::Read(void* buffer, return result; } -StreamResult FifoBuffer::Write(const void* buffer, - size_t bytes, - size_t* bytes_written, - int* error) { +StreamResult FifoBuffer::Write(rtc::ArrayView buffer, + size_t& bytes_written, + int& error) { webrtc::MutexLock lock(&mutex_); const bool was_readable = (data_length_ > 0); size_t copy = 0; - StreamResult result = WriteLocked(buffer, bytes, ©); + StreamResult result = WriteLocked(buffer.data(), buffer.size(), ©); if (result == SR_SUCCESS) { // If write was successful then adjust the number of readable bytes. data_length_ += copy; - if (bytes_written) { - *bytes_written = copy; - } - + bytes_written = copy; // if we didn't have any data to read before, and now we do, post an event if (!was_readable && copy > 0) { PostEvent(SE_READ, 0); diff --git a/third_party/libwebrtc/rtc_base/memory/fifo_buffer.h b/third_party/libwebrtc/rtc_base/memory/fifo_buffer.h index aa3164f09a3b..a225c688ac1b 100644 --- a/third_party/libwebrtc/rtc_base/memory/fifo_buffer.h +++ b/third_party/libwebrtc/rtc_base/memory/fifo_buffer.h @@ -37,14 +37,12 @@ class FifoBuffer final : public StreamInterface { // StreamInterface methods StreamState GetState() const override; - StreamResult Read(void* buffer, - size_t bytes, - size_t* bytes_read, - int* error) override; - StreamResult Write(const void* buffer, - size_t bytes, - size_t* bytes_written, - int* error) override; + StreamResult Read(rtc::ArrayView buffer, + size_t& bytes_read, + int& error) override; + StreamResult Write(rtc::ArrayView buffer, + size_t& bytes_written, + int& error) override; void Close() override; // Seek to a byte offset from the beginning of the stream. Returns false if diff --git a/third_party/libwebrtc/rtc_base/memory/fifo_buffer_unittest.cc b/third_party/libwebrtc/rtc_base/memory/fifo_buffer_unittest.cc index 0e44bf2095b1..27eb8d8b4586 100644 --- a/third_party/libwebrtc/rtc_base/memory/fifo_buffer_unittest.cc +++ b/third_party/libwebrtc/rtc_base/memory/fifo_buffer_unittest.cc @@ -19,8 +19,8 @@ namespace rtc { TEST(FifoBufferTest, TestAll) { rtc::AutoThread main_thread; const size_t kSize = 16; - const char in[kSize * 2 + 1] = "0123456789ABCDEFGHIJKLMNOPQRSTUV"; - char out[kSize * 2]; + const uint8_t in[kSize * 2 + 1] = "0123456789ABCDEFGHIJKLMNOPQRSTUV"; + uint8_t out[kSize * 2]; void* p; const void* q; size_t bytes; @@ -28,49 +28,58 @@ TEST(FifoBufferTest, TestAll) { // Test assumptions about base state EXPECT_EQ(SS_OPEN, buf.GetState()); - EXPECT_EQ(SR_BLOCK, buf.Read(out, kSize, &bytes, nullptr)); + int error; + EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); EXPECT_TRUE(nullptr != buf.GetWriteBuffer(&bytes)); EXPECT_EQ(kSize, bytes); buf.ConsumeWriteBuffer(0); // Try a full write - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); EXPECT_EQ(kSize, bytes); // Try a write that should block - EXPECT_EQ(SR_BLOCK, buf.Write(in, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_BLOCK, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); // Try a full read - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(in, out, kSize)); // Try a read that should block - EXPECT_EQ(SR_BLOCK, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); // Try a too-big write - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize * 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize * 2), bytes, error)); EXPECT_EQ(bytes, kSize); // Try a too-big read - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize * 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize * 2), bytes, error)); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(in, out, kSize)); // Try some small writes and reads - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); @@ -82,22 +91,29 @@ TEST(FifoBufferTest, TestAll) { // XXXXWWWWWWWWXXXX 4567012345670123 // RRRRXXXXXXXXRRRR ....01234567.... // ....RRRRRRRR.... ................ - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize * 3 / 4, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize * 3 / 4), bytes, error)); EXPECT_EQ(kSize * 3 / 4, bytes); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 4, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 4), bytes, error)); EXPECT_EQ(kSize / 4, bytes); EXPECT_EQ(0, memcmp(in + kSize / 2, out, kSize / 4)); - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); @@ -106,16 +122,16 @@ TEST(FifoBufferTest, TestAll) { buf.ConsumeWriteBuffer(0); // Try using GetReadData to do a full read - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); q = buf.GetReadData(&bytes); EXPECT_TRUE(nullptr != q); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(q, in, kSize)); buf.ConsumeReadData(kSize); - EXPECT_EQ(SR_BLOCK, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); // Try using GetReadData to do some small reads - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); q = buf.GetReadData(&bytes); EXPECT_TRUE(nullptr != q); EXPECT_EQ(kSize, bytes); @@ -126,7 +142,7 @@ TEST(FifoBufferTest, TestAll) { EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(q, in + kSize / 2, kSize / 2)); buf.ConsumeReadData(kSize / 2); - EXPECT_EQ(SR_BLOCK, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); // Try using GetReadData in a wraparound case // WWWWWWWWWWWWWWWW 0123456789ABCDEF @@ -134,9 +150,11 @@ TEST(FifoBufferTest, TestAll) { // WWWWWWWW....XXXX 01234567....CDEF // ............RRRR 01234567........ // RRRRRRRR........ ................ - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize, &bytes, nullptr)); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize * 3 / 4, &bytes, nullptr)); - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize * 3 / 4), bytes, error)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); q = buf.GetReadData(&bytes); EXPECT_TRUE(nullptr != q); EXPECT_EQ(kSize / 4, bytes); @@ -158,7 +176,7 @@ TEST(FifoBufferTest, TestAll) { EXPECT_EQ(kSize, bytes); memcpy(p, in, kSize); buf.ConsumeWriteBuffer(kSize); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(in, out, kSize)); @@ -173,7 +191,7 @@ TEST(FifoBufferTest, TestAll) { EXPECT_EQ(kSize / 2, bytes); memcpy(p, in + kSize / 2, kSize / 2); buf.ConsumeWriteBuffer(kSize / 2); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(in, out, kSize)); @@ -183,8 +201,10 @@ TEST(FifoBufferTest, TestAll) { // ........XXXXWWWW ........89AB0123 // WWWW....XXXXXXXX 4567....89AB0123 // RRRR....RRRRRRRR ................ - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize * 3 / 4, &bytes, nullptr)); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize * 3 / 4), bytes, error)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); p = buf.GetWriteBuffer(&bytes); EXPECT_TRUE(nullptr != p); EXPECT_EQ(kSize / 4, bytes); @@ -195,22 +215,25 @@ TEST(FifoBufferTest, TestAll) { EXPECT_EQ(kSize / 2, bytes); memcpy(p, in + kSize / 4, kSize / 4); buf.ConsumeWriteBuffer(kSize / 4); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize * 3 / 4, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize * 3 / 4), bytes, error)); EXPECT_EQ(kSize * 3 / 4, bytes); EXPECT_EQ(0, memcmp(in + kSize / 2, out, kSize / 4)); EXPECT_EQ(0, memcmp(in, out + kSize / 4, kSize / 4)); // Check that the stream is now empty - EXPECT_EQ(SR_BLOCK, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); // Write to the stream, close it, read the remaining bytes - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); buf.Close(); EXPECT_EQ(SS_CLOSED, buf.GetState()); - EXPECT_EQ(SR_EOS, buf.Write(in, kSize / 2, &bytes, nullptr)); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_EOS, buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_EOS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_EOS, buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); } TEST(FifoBufferTest, FullBufferCheck) { diff --git a/third_party/libwebrtc/rtc_base/memory_stream.cc b/third_party/libwebrtc/rtc_base/memory_stream.cc index cbd78ac14abb..8ceab7aa9b7a 100644 --- a/third_party/libwebrtc/rtc_base/memory_stream.cc +++ b/third_party/libwebrtc/rtc_base/memory_stream.cc @@ -23,38 +23,37 @@ StreamState MemoryStream::GetState() const { return SS_OPEN; } -StreamResult MemoryStream::Read(void* buffer, - size_t bytes, - size_t* bytes_read, - int* error) { +StreamResult MemoryStream::Read(rtc::ArrayView buffer, + size_t& bytes_read, + int& error) { if (seek_position_ >= data_length_) { return SR_EOS; } size_t available = data_length_ - seek_position_; - if (bytes > available) { + size_t bytes; + if (buffer.size() > available) { // Read partial buffer bytes = available; + } else { + bytes = buffer.size(); } - memcpy(buffer, &buffer_[seek_position_], bytes); + memcpy(buffer.data(), &buffer_[seek_position_], bytes); seek_position_ += bytes; - if (bytes_read) { - *bytes_read = bytes; - } + bytes_read = bytes; return SR_SUCCESS; } -StreamResult MemoryStream::Write(const void* buffer, - size_t bytes, - size_t* bytes_written, - int* error) { +StreamResult MemoryStream::Write(rtc::ArrayView buffer, + size_t& bytes_written, + int& error) { size_t available = buffer_length_ - seek_position_; if (0 == available) { // Increase buffer size to the larger of: // a) new position rounded up to next 256 bytes // b) double the previous length - size_t new_buffer_length = - std::max(((seek_position_ + bytes) | 0xFF) + 1, buffer_length_ * 2); - StreamResult result = DoReserve(new_buffer_length, error); + size_t new_buffer_length = std::max( + ((seek_position_ + buffer.size()) | 0xFF) + 1, buffer_length_ * 2); + StreamResult result = DoReserve(new_buffer_length, &error); if (SR_SUCCESS != result) { return result; } @@ -62,17 +61,16 @@ StreamResult MemoryStream::Write(const void* buffer, available = buffer_length_ - seek_position_; } + size_t bytes = buffer.size(); if (bytes > available) { bytes = available; } - memcpy(&buffer_[seek_position_], buffer, bytes); + memcpy(&buffer_[seek_position_], buffer.data(), bytes); seek_position_ += bytes; if (data_length_ < seek_position_) { data_length_ = seek_position_; } - if (bytes_written) { - *bytes_written = bytes; - } + bytes_written = bytes; return SR_SUCCESS; } diff --git a/third_party/libwebrtc/rtc_base/memory_stream.h b/third_party/libwebrtc/rtc_base/memory_stream.h index 7deb4b3006c3..07e07f0694e5 100644 --- a/third_party/libwebrtc/rtc_base/memory_stream.h +++ b/third_party/libwebrtc/rtc_base/memory_stream.h @@ -25,14 +25,12 @@ class MemoryStream final : public StreamInterface { ~MemoryStream() override; StreamState GetState() const override; - StreamResult Read(void* buffer, - size_t bytes, - size_t* bytes_read, - int* error) override; - StreamResult Write(const void* buffer, - size_t bytes, - size_t* bytes_written, - int* error) override; + StreamResult Read(rtc::ArrayView buffer, + size_t& bytes_read, + int& error) override; + StreamResult Write(rtc::ArrayView buffer, + size_t& bytes_written, + int& error) override; void Close() override; bool GetSize(size_t* size) const; bool ReserveSize(size_t size); diff --git a/third_party/libwebrtc/rtc_base/network.cc b/third_party/libwebrtc/rtc_base/network.cc index 5ff8d143d9f1..bbcf690915da 100644 --- a/third_party/libwebrtc/rtc_base/network.cc +++ b/third_party/libwebrtc/rtc_base/network.cc @@ -50,9 +50,11 @@ using ::webrtc::SafeTask; using ::webrtc::TimeDelta; // List of MAC addresses of known VPN (for windows). -constexpr uint8_t kVpns[2][6] = { - // Cisco AnyConnect. +constexpr uint8_t kVpns[3][6] = { + // Cisco AnyConnect SSL VPN Client. {0x0, 0x5, 0x9A, 0x3C, 0x7A, 0x0}, + // Cisco AnyConnect IPSEC VPN Client. + {0x0, 0x5, 0x9A, 0x3C, 0x78, 0x0}, // GlobalProtect Virtual Ethernet. {0x2, 0x50, 0x41, 0x0, 0x0, 0x1}, }; @@ -316,12 +318,22 @@ NetworkManagerBase::enumeration_permission() const { return enumeration_permission_; } +std::unique_ptr NetworkManagerBase::CreateNetwork( + absl::string_view name, + absl::string_view description, + const IPAddress& prefix, + int prefix_length, + AdapterType type) const { + return std::make_unique(name, description, prefix, prefix_length, + type, field_trials_.get()); +} + std::vector NetworkManagerBase::GetAnyAddressNetworks() { std::vector networks; if (!ipv4_any_address_network_) { const rtc::IPAddress ipv4_any_address(INADDR_ANY); - ipv4_any_address_network_ = std::make_unique( - "any", "any", ipv4_any_address, 0, ADAPTER_TYPE_ANY, field_trials_); + ipv4_any_address_network_ = + CreateNetwork("any", "any", ipv4_any_address, 0, ADAPTER_TYPE_ANY); ipv4_any_address_network_->set_default_local_address_provider(this); ipv4_any_address_network_->set_mdns_responder_provider(this); ipv4_any_address_network_->AddIP(ipv4_any_address); @@ -330,8 +342,8 @@ std::vector NetworkManagerBase::GetAnyAddressNetworks() { if (!ipv6_any_address_network_) { const rtc::IPAddress ipv6_any_address(in6addr_any); - ipv6_any_address_network_ = std::make_unique( - "any", "any", ipv6_any_address, 0, ADAPTER_TYPE_ANY, field_trials_); + ipv6_any_address_network_ = + CreateNetwork("any", "any", ipv6_any_address, 0, ADAPTER_TYPE_ANY); ipv6_any_address_network_->set_default_local_address_provider(this); ipv6_any_address_network_->set_mdns_responder_provider(this); ipv6_any_address_network_->AddIP(ipv6_any_address); @@ -531,14 +543,14 @@ bool NetworkManagerBase::IsVpnMacAddress( BasicNetworkManager::BasicNetworkManager( NetworkMonitorFactory* network_monitor_factory, SocketFactory* socket_factory, - const webrtc::FieldTrialsView* field_trials) - : field_trials_(field_trials), + const webrtc::FieldTrialsView* field_trials_view) + : NetworkManagerBase(field_trials_view), network_monitor_factory_(network_monitor_factory), socket_factory_(socket_factory), allow_mac_based_ipv6_( - field_trials_->IsEnabled("WebRTC-AllowMACBasedIPv6")), + field_trials()->IsEnabled("WebRTC-AllowMACBasedIPv6")), bind_using_ifname_( - !field_trials_->IsDisabled("WebRTC-BindUsingInterfaceName")) { + !field_trials()->IsDisabled("WebRTC-BindUsingInterfaceName")) { RTC_DCHECK(socket_factory_); } @@ -668,9 +680,8 @@ void BasicNetworkManager::ConvertIfAddrs( if_info.adapter_type = ADAPTER_TYPE_VPN; } - auto network = std::make_unique( - cursor->ifa_name, cursor->ifa_name, prefix, prefix_length, - if_info.adapter_type, field_trials_.get()); + auto network = CreateNetwork(cursor->ifa_name, cursor->ifa_name, prefix, + prefix_length, if_info.adapter_type); network->set_default_local_address_provider(this); network->set_scope_id(scope_id); network->AddIP(ip); @@ -855,12 +866,14 @@ bool BasicNetworkManager::CreateNetworks( reinterpret_cast( adapter_addrs->PhysicalAddress), adapter_addrs->PhysicalAddressLength))) { - underlying_type_for_vpn = adapter_type; + // With MAC-based detection we do not know the + // underlying adapter type. + underlying_type_for_vpn = ADAPTER_TYPE_UNKNOWN; adapter_type = ADAPTER_TYPE_VPN; } - auto network = std::make_unique(name, description, prefix, - prefix_length, adapter_type); + auto network = CreateNetwork(name, description, prefix, prefix_length, + adapter_type); network->set_underlying_type_for_vpn(underlying_type_for_vpn); network->set_default_local_address_provider(this); network->set_mdns_responder_provider(this); @@ -965,7 +978,7 @@ void BasicNetworkManager::StartNetworkMonitor() { } if (!network_monitor_) { network_monitor_.reset( - network_monitor_factory_->CreateNetworkMonitor(*field_trials_)); + network_monitor_factory_->CreateNetworkMonitor(*field_trials())); if (!network_monitor_) { return; } diff --git a/third_party/libwebrtc/rtc_base/network.h b/third_party/libwebrtc/rtc_base/network.h index d82ddeed88d6..c7d73bff7ad2 100644 --- a/third_party/libwebrtc/rtc_base/network.h +++ b/third_party/libwebrtc/rtc_base/network.h @@ -231,9 +231,21 @@ class RTC_EXPORT NetworkManagerBase : public NetworkManager { // refactoring of the interface GetNetworks method. const std::vector& GetNetworksInternal() const { return networks_; } + std::unique_ptr CreateNetwork(absl::string_view name, + absl::string_view description, + const IPAddress& prefix, + int prefix_length, + AdapterType type) const; + + const webrtc::FieldTrialsView* field_trials() const { + return field_trials_.get(); + } + private: friend class NetworkTest; - const webrtc::FieldTrialsView* field_trials_ = nullptr; + webrtc::AlwaysValidPointer + field_trials_; EnumerationPermission enumeration_permission_; std::vector networks_; @@ -348,10 +360,7 @@ class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, Thread* thread_ = nullptr; bool sent_first_update_ = true; int start_count_ = 0; - // Chromium create BasicNetworkManager() w/o field trials. - webrtc::AlwaysValidPointer - field_trials_; + std::vector network_ignore_list_; NetworkMonitorFactory* const network_monitor_factory_; SocketFactory* const socket_factory_; diff --git a/third_party/libwebrtc/rtc_base/numerics/sequence_numbers_conformance_test.cc b/third_party/libwebrtc/rtc_base/numerics/sequence_numbers_conformance_test.cc new file mode 100644 index 000000000000..8c5bc62e56c8 --- /dev/null +++ b/third_party/libwebrtc/rtc_base/numerics/sequence_numbers_conformance_test.cc @@ -0,0 +1,202 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include + +#include "modules/include/module_common_types_public.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/strong_alias.h" +#include "rtc_base/time_utils.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::Test; + +using dcsctp::UnwrappedSequenceNumber; +using Wrapped = webrtc::StrongAlias; +using TestSequence = UnwrappedSequenceNumber; + +template +class UnwrapperHelper; + +template <> +class UnwrapperHelper { + public: + int64_t Unwrap(uint32_t val) { + TestSequence s = unwrapper_.Unwrap(Wrapped(val)); + // UnwrappedSequenceNumber starts counting at 2^32. + constexpr int64_t kDcsctpUnwrapStart = int64_t{1} << 32; + return s.value() - kDcsctpUnwrapStart; + } + + private: + TestSequence::Unwrapper unwrapper_; +}; + +// MaxVal is the max of the wrapped space, ie MaxVal + 1 = 0 when wrapped. +template ::max()> +struct FixtureParams { + using Unwrapper = U; + static constexpr int64_t kMaxVal = MaxVal; +}; + +template +class UnwrapperConformanceFixture : public Test { + public: + static constexpr int64_t kMaxVal = F::kMaxVal; + static constexpr int64_t kMaxIncrease = kMaxVal / 2; + static constexpr int64_t kMaxBackwardsIncrease = kMaxVal - kMaxIncrease + 1; + + template + static constexpr bool UnwrapperIs() { + return std::is_same(); + } + + typename F::Unwrapper ref_unwrapper_; +}; + +TYPED_TEST_SUITE_P(UnwrapperConformanceFixture); + +TYPED_TEST_P(UnwrapperConformanceFixture, PositiveWrapAround) { + EXPECT_EQ(0, this->ref_unwrapper_.Unwrap(0)); + EXPECT_EQ(TestFixture::kMaxIncrease, + this->ref_unwrapper_.Unwrap(TestFixture::kMaxIncrease)); + EXPECT_EQ(2 * TestFixture::kMaxIncrease, + this->ref_unwrapper_.Unwrap(2 * TestFixture::kMaxIncrease)); + // Now unwrapping 0 should wrap around to be kMaxVal + 1. + EXPECT_EQ(TestFixture::kMaxVal + 1, this->ref_unwrapper_.Unwrap(0)); + EXPECT_EQ(TestFixture::kMaxVal + 1 + TestFixture::kMaxIncrease, + this->ref_unwrapper_.Unwrap(TestFixture::kMaxIncrease)); +} + +TYPED_TEST_P(UnwrapperConformanceFixture, NegativeUnwrap) { + using UnwrapperT = decltype(this->ref_unwrapper_); + // webrtc::TimestampUnwrapper known to not handle negative numbers. + // rtc::TimestampWrapAroundHandler does not wrap around correctly. + if constexpr (std::is_same() || + std::is_same()) { + return; + } + EXPECT_EQ(0, this->ref_unwrapper_.Unwrap(0)); + // Max backwards wrap is negative. + EXPECT_EQ(-TestFixture::kMaxIncrease, + this->ref_unwrapper_.Unwrap(this->kMaxBackwardsIncrease)); + // Increase to a larger negative number. + EXPECT_EQ(-2, this->ref_unwrapper_.Unwrap(TestFixture::kMaxVal - 1)); + // Increase back positive. + EXPECT_EQ(1, this->ref_unwrapper_.Unwrap(1)); +} + +TYPED_TEST_P(UnwrapperConformanceFixture, BackwardUnwrap) { + EXPECT_EQ(127, this->ref_unwrapper_.Unwrap(127)); + EXPECT_EQ(128, this->ref_unwrapper_.Unwrap(128)); + EXPECT_EQ(127, this->ref_unwrapper_.Unwrap(127)); +} + +TYPED_TEST_P(UnwrapperConformanceFixture, MultiplePositiveWrapArounds) { + using UnwrapperT = decltype(this->ref_unwrapper_); + // rtc::TimestampWrapAroundHandler does not wrap around correctly. + if constexpr (std::is_same()) { + return; + } + int64_t val = 0; + uint32_t wrapped_val = 0; + for (int i = 0; i < 16; ++i) { + EXPECT_EQ(val, this->ref_unwrapper_.Unwrap(wrapped_val)); + val += TestFixture::kMaxIncrease; + wrapped_val = + (wrapped_val + TestFixture::kMaxIncrease) % (TestFixture::kMaxVal + 1); + } +} + +TYPED_TEST_P(UnwrapperConformanceFixture, WrapBoundaries) { + EXPECT_EQ(0, this->ref_unwrapper_.Unwrap(0)); + EXPECT_EQ(TestFixture::kMaxIncrease, + this->ref_unwrapper_.Unwrap(TestFixture::kMaxIncrease)); + // Increases by more than TestFixture::kMaxIncrease which indicates a negative + // rollback. + EXPECT_EQ(0, this->ref_unwrapper_.Unwrap(0)); + EXPECT_EQ(10, this->ref_unwrapper_.Unwrap(10)); +} + +TYPED_TEST_P(UnwrapperConformanceFixture, MultipleNegativeWrapArounds) { + using UnwrapperT = decltype(this->ref_unwrapper_); + // webrtc::TimestampUnwrapper known to not handle negative numbers. + // webrtc::SequenceNumberUnwrapper can only wrap negative once. + // rtc::TimestampWrapAroundHandler does not wrap around correctly. + if constexpr (std::is_same() || + std::is_same>() || + std::is_same()) { + return; + } + int64_t val = 0; + uint32_t wrapped_val = 0; + for (int i = 0; i < 16; ++i) { + EXPECT_EQ(val, this->ref_unwrapper_.Unwrap(wrapped_val)); + val -= TestFixture::kMaxIncrease; + wrapped_val = (wrapped_val + this->kMaxBackwardsIncrease) % + (TestFixture::kMaxVal + 1); + } +} + +REGISTER_TYPED_TEST_SUITE_P(UnwrapperConformanceFixture, + NegativeUnwrap, + PositiveWrapAround, + BackwardUnwrap, + WrapBoundaries, + MultiplePositiveWrapArounds, + MultipleNegativeWrapArounds); + +constexpr int64_t k15BitMax = (int64_t{1} << 15) - 1; +using UnwrapperTypes = ::testing::Types< + FixtureParams, + FixtureParams, + FixtureParams>, + FixtureParams>, + // SeqNumUnwrapper supports arbitrary limits. + FixtureParams, k15BitMax>>; + +class TestNames { + public: + template + static std::string GetName(int) { + if constexpr (std::is_same()) + return "TimestampWrapAroundHandler"; + if constexpr (std::is_same()) + return "TimestampUnwrapper"; + if constexpr (std::is_same>()) + return "SeqNumUnwrapper"; + if constexpr (std::is_same< + typename T::Unwrapper, + webrtc::SeqNumUnwrapper>()) + return "SeqNumUnwrapper15bit"; + if constexpr (std::is_same>()) + return "UnwrappedSequenceNumber"; + } +}; + +INSTANTIATE_TYPED_TEST_SUITE_P(UnwrapperConformanceTest, + UnwrapperConformanceFixture, + UnwrapperTypes, + TestNames); + +} // namespace +} // namespace webrtc diff --git a/third_party/libwebrtc/rtc_base/openssl_stream_adapter.cc b/third_party/libwebrtc/rtc_base/openssl_stream_adapter.cc index 5fa1a3c50266..9fd8c8f3953e 100644 --- a/third_party/libwebrtc/rtc_base/openssl_stream_adapter.cc +++ b/third_party/libwebrtc/rtc_base/openssl_stream_adapter.cc @@ -561,21 +561,6 @@ void OpenSSLStreamAdapter::SetInitialRetransmissionTimeout(int timeout_ms) { // // StreamInterface Implementation // -// Backwards compatible Write() method using deprecated API. -// Needed because deprecated API is still =0 in API definition. -StreamResult OpenSSLStreamAdapter::Write(const void* data, - size_t data_len, - size_t* written, - int* error) { - // TODO(bugs.webrtc.org/14632): Consider doing - // RTC_CHECK_NOTREACHED(); when downstream usage is eliminated. - size_t dummy_written; - int dummy_error; - return Write( - rtc::MakeArrayView(reinterpret_cast(data), data_len), - written ? *written : dummy_written, error ? *error : dummy_error); -} - StreamResult OpenSSLStreamAdapter::Write(rtc::ArrayView data, size_t& written, int& error) { @@ -637,19 +622,6 @@ StreamResult OpenSSLStreamAdapter::Write(rtc::ArrayView data, // not reached } -// Backwards compatible Read() method using deprecated API. -StreamResult OpenSSLStreamAdapter::Read(void* data, - size_t data_len, - size_t* read, - int* error) { - // TODO(bugs.webrtc.org/14632): Consider doing - // RTC_CHECK_NOTREACHED() when downstream usage is thought to be eliminated. - size_t dummy_read; - int dummy_error; - return Read(rtc::MakeArrayView(reinterpret_cast(data), data_len), - read ? *read : dummy_read, error ? *error : dummy_error); -} - StreamResult OpenSSLStreamAdapter::Read(rtc::ArrayView data, size_t& read, int& error) { diff --git a/third_party/libwebrtc/rtc_base/openssl_stream_adapter.h b/third_party/libwebrtc/rtc_base/openssl_stream_adapter.h index 3c94ecd2ced9..aee8d36aadf8 100644 --- a/third_party/libwebrtc/rtc_base/openssl_stream_adapter.h +++ b/third_party/libwebrtc/rtc_base/openssl_stream_adapter.h @@ -95,16 +95,9 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { void SetMaxProtocolVersion(SSLProtocolVersion version) override; void SetInitialRetransmissionTimeout(int timeout_ms) override; - [[deprecated("bugs.webrtc.org/14632")]] StreamResult - Read(void* data, size_t data_len, size_t* read, int* error) override; StreamResult Read(rtc::ArrayView data, size_t& read, int& error) override; - [[deprecated("bugs.webrtc.org/14632")]] StreamResult Write( - const void* data, - size_t data_len, - size_t* written, - int* error) override; StreamResult Write(rtc::ArrayView data, size_t& written, int& error) override; diff --git a/third_party/libwebrtc/rtc_base/platform_thread_types.cc b/third_party/libwebrtc/rtc_base/platform_thread_types.cc index f5fc7b94ef90..c3c6955a7b27 100644 --- a/third_party/libwebrtc/rtc_base/platform_thread_types.cc +++ b/third_party/libwebrtc/rtc_base/platform_thread_types.cc @@ -25,6 +25,13 @@ typedef HRESULT(WINAPI* RTC_SetThreadDescription)(HANDLE hThread, PCWSTR lpThreadDescription); #endif +#if defined(WEBRTC_FUCHSIA) +#include +#include + +#include "rtc_base/checks.h" +#endif + namespace rtc { PlatformThreadId CurrentThreadId() { @@ -111,6 +118,10 @@ void SetCurrentThreadName(const char* name) { prctl(PR_SET_NAME, reinterpret_cast(name)); // NOLINT #elif defined(WEBRTC_MAC) || defined(WEBRTC_IOS) pthread_setname_np(name); +#elif defined(WEBRTC_FUCHSIA) + zx_status_t status = zx_object_set_property(zx_thread_self(), ZX_PROP_NAME, + name, strlen(name)); + RTC_DCHECK_EQ(status, ZX_OK); #endif } diff --git a/third_party/libwebrtc/rtc_base/platform_thread_types_gn/moz.build b/third_party/libwebrtc/rtc_base/platform_thread_types_gn/moz.build index 4d20e71a83df..0ff0b0a96f69 100644 --- a/third_party/libwebrtc/rtc_base/platform_thread_types_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/platform_thread_types_gn/moz.build @@ -58,6 +58,10 @@ if CONFIG["OS_TARGET"] == "Android": DEFINES["__STDC_CONSTANT_MACROS"] = True DEFINES["__STDC_FORMAT_MACROS"] = True + OS_LIBS += [ + "log" + ] + if CONFIG["OS_TARGET"] == "Darwin": DEFINES["WEBRTC_ENABLE_AVX2"] = True diff --git a/third_party/libwebrtc/rtc_base/socket_stream.cc b/third_party/libwebrtc/rtc_base/socket_stream.cc index a526f0c0c899..5c993ea233ea 100644 --- a/third_party/libwebrtc/rtc_base/socket_stream.cc +++ b/third_party/libwebrtc/rtc_base/socket_stream.cc @@ -60,42 +60,36 @@ StreamState SocketStream::GetState() const { } } -StreamResult SocketStream::Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) { +StreamResult SocketStream::Read(rtc::ArrayView buffer, + size_t& read, + int& error) { RTC_DCHECK(socket_ != nullptr); - int result = socket_->Recv(buffer, buffer_len, nullptr); + int result = socket_->Recv(buffer.data(), buffer.size(), nullptr); if (result < 0) { if (socket_->IsBlocking()) return SR_BLOCK; - if (error) - *error = socket_->GetError(); + error = socket_->GetError(); return SR_ERROR; } - if ((result > 0) || (buffer_len == 0)) { - if (read) - *read = result; + if ((result > 0) || (buffer.size() == 0)) { + read = result; return SR_SUCCESS; } return SR_EOS; } -StreamResult SocketStream::Write(const void* data, - size_t data_len, - size_t* written, - int* error) { +StreamResult SocketStream::Write(rtc::ArrayView data, + size_t& written, + int& error) { RTC_DCHECK(socket_ != nullptr); - int result = socket_->Send(data, data_len); + int result = socket_->Send(data.data(), data.size()); if (result < 0) { if (socket_->IsBlocking()) return SR_BLOCK; - if (error) - *error = socket_->GetError(); + error = socket_->GetError(); return SR_ERROR; } - if (written) - *written = result; + written = result; return SR_SUCCESS; } diff --git a/third_party/libwebrtc/rtc_base/socket_stream.h b/third_party/libwebrtc/rtc_base/socket_stream.h index f678f805d7fb..492cc42e96ea 100644 --- a/third_party/libwebrtc/rtc_base/socket_stream.h +++ b/third_party/libwebrtc/rtc_base/socket_stream.h @@ -36,15 +36,13 @@ class SocketStream : public StreamInterface, public sigslot::has_slots<> { StreamState GetState() const override; - StreamResult Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) override; + StreamResult Read(rtc::ArrayView buffer, + size_t& read, + int& error) override; - StreamResult Write(const void* data, - size_t data_len, - size_t* written, - int* error) override; + StreamResult Write(rtc::ArrayView data, + size_t& written, + int& error) override; void Close() override; diff --git a/third_party/libwebrtc/rtc_base/ssl_stream_adapter_unittest.cc b/third_party/libwebrtc/rtc_base/ssl_stream_adapter_unittest.cc index f8a015dc04ed..fb909e7ea184 100644 --- a/third_party/libwebrtc/rtc_base/ssl_stream_adapter_unittest.cc +++ b/third_party/libwebrtc/rtc_base/ssl_stream_adapter_unittest.cc @@ -160,15 +160,12 @@ class SSLDummyStreamBase : public rtc::StreamInterface, rtc::StreamState GetState() const override { return rtc::SS_OPEN; } - rtc::StreamResult Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) override { + rtc::StreamResult Read(rtc::ArrayView buffer, + size_t& read, + int& error) override { rtc::StreamResult r; - r = in_->Read( - rtc::MakeArrayView(reinterpret_cast(buffer), buffer_len), - *read, *error); + r = in_->Read(buffer, read, error); if (r == rtc::SR_BLOCK) return rtc::SR_BLOCK; if (r == rtc::SR_EOS) @@ -255,12 +252,11 @@ class BufferQueueStream : public rtc::StreamInterface { rtc::StreamState GetState() const override { return rtc::SS_OPEN; } // Reading a buffer queue stream will either succeed or block. - rtc::StreamResult Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) override { + rtc::StreamResult Read(rtc::ArrayView buffer, + size_t& read, + int& error) override { const bool was_writable = buffer_.is_writable(); - if (!buffer_.ReadFront(buffer, buffer_len, read)) + if (!buffer_.ReadFront(buffer.data(), buffer.size(), &read)) return rtc::SR_BLOCK; if (!was_writable) @@ -270,12 +266,11 @@ class BufferQueueStream : public rtc::StreamInterface { } // Writing to a buffer queue stream will either succeed or block. - rtc::StreamResult Write(const void* data, - size_t data_len, - size_t* written, - int* error) override { + rtc::StreamResult Write(rtc::ArrayView data, + size_t& written, + int& error) override { const bool was_readable = buffer_.is_readable(); - if (!buffer_.WriteBack(data, data_len, written)) + if (!buffer_.WriteBack(data.data(), data.size(), &written)) return rtc::SR_BLOCK; if (!was_readable) @@ -815,8 +810,10 @@ class SSLStreamAdapterTestTLS send_stream_.ReserveSize(size); for (int i = 0; i < size; ++i) { - char ch = static_cast(i); - send_stream_.Write(&ch, 1, nullptr, nullptr); + uint8_t ch = static_cast(i); + size_t written; + int error; + send_stream_.Write(rtc::MakeArrayView(&ch, 1), written, error); } send_stream_.Rewind(); @@ -849,8 +846,8 @@ class SSLStreamAdapterTestTLS for (;;) { send_stream_.GetPosition(&position); - if (send_stream_.Read(block, sizeof(block), &tosend, nullptr) != - rtc::SR_EOS) { + int dummy_error; + if (send_stream_.Read(block, tosend, dummy_error) != rtc::SR_EOS) { int error; rv = client_ssl_->Write(rtc::MakeArrayView(block, tosend), sent, error); @@ -895,8 +892,9 @@ class SSLStreamAdapterTestTLS ASSERT_EQ(rtc::SR_SUCCESS, r); RTC_LOG(LS_VERBOSE) << "Read " << bread; - - recv_stream_.Write(buffer, bread, nullptr, nullptr); + size_t written; + int error; + recv_stream_.Write(rtc::MakeArrayView(buffer, bread), written, error); } } diff --git a/third_party/libwebrtc/rtc_base/stream.h b/third_party/libwebrtc/rtc_base/stream.h index fa928fc9ae92..e02349aed3de 100644 --- a/third_party/libwebrtc/rtc_base/stream.h +++ b/third_party/libwebrtc/rtc_base/stream.h @@ -70,46 +70,12 @@ class RTC_EXPORT StreamInterface { // SR_EOS: the end-of-stream has been reached, or the stream is in the // SS_CLOSED state. - // The deprecated method has a default implementation that may be - // overridden in subclasses, rather than being =0. - // This allows subclasses to delete the method. - // TODO(bugs.webrtc.org/14632): Remove when downstream is converted. - [[deprecated("Use ArrayView version")]] virtual StreamResult - Read(void* buffer, size_t buffer_len, size_t* read, int* error) { - RTC_CHECK_NOTREACHED(); - } - -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wdeprecated-declarations" - // Preserve backwards compatibility using a default implementation - // because there are subclasses - // outside of the WebRTC codebase that need to be converted. - // - // TODO(bugs.webrtc.org/14632): Remove when downstream is converted. virtual StreamResult Read(rtc::ArrayView buffer, size_t& read, - int& error) { - return Read(buffer.data(), buffer.size(), &read, &error); - } -#pragma clang diagnostic pop - - // The deprecated method has a default implementation that may be - // overridden in subclasses, rather than being =0. - // This allows subclasses to delete the method. - // TODO(bugs.webrtc.org/14632): Remove when downstream is converted. - [[deprecated("Use ArrayView version")]] virtual StreamResult - Write(const void* data, size_t data_len, size_t* written, int* error) { - RTC_CHECK_NOTREACHED(); - } - -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wdeprecated-declarations" + int& error) = 0; virtual StreamResult Write(rtc::ArrayView data, size_t& written, - int& error) { - return Write(data.data(), data.size(), &written, &error); - } -#pragma clang diagnostic pop + int& error) = 0; // Attempt to transition to the SS_CLOSED state. SE_CLOSE will not be // signalled as a result of this call. diff --git a/third_party/libwebrtc/rtc_tools/BUILD.gn b/third_party/libwebrtc/rtc_tools/BUILD.gn index d219f598a9ba..60e39f22a281 100644 --- a/third_party/libwebrtc/rtc_tools/BUILD.gn +++ b/third_party/libwebrtc/rtc_tools/BUILD.gn @@ -14,14 +14,11 @@ group("rtc_tools") { # This target shall build all targets in tools/. testonly = true - deps = [ - ":frame_analyzer", - ":video_file_reader", - ] + deps = [ ":video_file_reader" ] if (!build_with_chromium) { deps += [ + ":frame_analyzer", ":psnr_ssim_analyzer", - ":rgba_to_i420_converter", ":video_quality_analysis", ] } @@ -29,10 +26,7 @@ group("rtc_tools") { deps += [ ":chart_proto" ] } if (!build_with_chromium && rtc_include_tests) { - deps += [ - ":tools_unittests", - ":yuv_to_ivf_converter", - ] + deps += [ ":tools_unittests" ] } if (rtc_include_tests && rtc_enable_protobuf) { deps += [ @@ -124,40 +118,6 @@ rtc_library("video_quality_analysis") { absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } -# Abseil dependencies are not moved to the absl_deps field deliberately. -# If build_with_chromium is true, the absl_deps replaces the dependencies with -# the "//third_party/abseil-cpp:absl" target. Which doesn't include absl/flags -# (and some others) because they cannot be used in Chromiums. Special exception -# for the "frame_analyzer" target in "third_party/abseil-cpp/absl.gni" allows -# it to be build in chromium. -rtc_executable("frame_analyzer") { - visibility = [ "*" ] - testonly = true - sources = [ "frame_analyzer/frame_analyzer.cc" ] - - deps = [ - ":video_file_reader", - ":video_file_writer", - ":video_quality_analysis", - "../api:make_ref_counted", - "../api:scoped_refptr", - "../api/test/metrics:chrome_perf_dashboard_metrics_exporter", - "../api/test/metrics:global_metrics_logger_and_exporter", - "../api/test/metrics:metrics_exporter", - "../api/test/metrics:stdout_metrics_exporter", - "../rtc_base:stringutils", - "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/strings", - ] - - if (build_with_chromium) { - # When building from Chromium, WebRTC's metrics and field trial - # implementations need to be replaced by the Chromium ones. - deps += [ "//third_party/webrtc_overrides:webrtc_component" ] - } -} - # TODO(bugs.webrtc.org/11474): Enable this on win if needed. For now it # is only required for Linux and Android. if (!build_with_chromium && !build_with_mozilla && !is_win && !is_ios) { @@ -182,6 +142,44 @@ if (!is_component_build) { # (and some others) because they cannot be used in Chromiums. Special exception # for the "frame_analyzer" target in "third_party/abseil-cpp/absl.gni" allows # it to be build in chromium. + rtc_executable("frame_analyzer") { + visibility = [ "*" ] + testonly = true + sources = [ "frame_analyzer/frame_analyzer.cc" ] + + deps = [ + ":video_file_reader", + ":video_file_writer", + ":video_quality_analysis", + "../api:make_ref_counted", + "../api:scoped_refptr", + "../api/test/metrics:chrome_perf_dashboard_metrics_exporter", + "../api/test/metrics:global_metrics_logger_and_exporter", + "../api/test/metrics:metrics_exporter", + "../api/test/metrics:stdout_metrics_exporter", + "../rtc_base:stringutils", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/flags:parse", + "//third_party/abseil-cpp/absl/strings", + ] + + if (build_with_chromium) { + # When building from Chromium, WebRTC's metrics and field trial + # implementations need to be replaced by the Chromium ones. + deps += [ "//third_party/webrtc_overrides:webrtc_component" ] + } + } + + # This target can be built from Chromium but it doesn't support + # is_component_build=true because it depends on WebRTC testonly code + # which is not part of //third_party/webrtc_overrides:webrtc_component. + + # Abseil dependencies are not moved to the absl_deps field deliberately. + # If build_with_chromium is true, the absl_deps replaces the dependencies with + # the "//third_party/abseil-cpp:absl" target. Which doesn't include absl/flags + # (and some others) because they cannot be used in Chromiums. Special exception + # for the "rtp_generator" target in "third_party/abseil-cpp/absl.gni" allows + # it to be build in chromium. rtc_executable("rtp_generator") { visibility = [ "*" ] testonly = true @@ -241,7 +239,7 @@ if (!is_component_build) { # If build_with_chromium is true, the absl_deps replaces the dependencies with # the "//third_party/abseil-cpp:absl" target. Which doesn't include absl/flags # (and some others) because they cannot be used in Chromiums. Special exception - # for the "frame_analyzer" target in "third_party/abseil-cpp/absl.gni" allows + # for the "video_replay" target in "third_party/abseil-cpp/absl.gni" allows # it to be build in chromium. rtc_executable("video_replay") { visibility = [ "*" ] @@ -338,24 +336,6 @@ if (!build_with_chromium) { ] } - rtc_executable("rgba_to_i420_converter") { - visibility = [ "*" ] - testonly = true - sources = [ - "converter/converter.cc", - "converter/converter.h", - "converter/rgba_to_i420_converter.cc", - ] - - deps = [ - "../common_video", - "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/flags:usage", - "//third_party/libyuv", - ] - } - if (rtc_enable_protobuf) { proto_library("chart_proto") { visibility = [ "*" ] @@ -437,41 +417,6 @@ if (!build_with_chromium) { if (rtc_include_tests) { if (!build_with_chromium) { - rtc_executable("yuv_to_ivf_converter") { - visibility = [ "*" ] - testonly = true - sources = [ "converter/yuv_to_ivf_converter.cc" ] - deps = [ - "../api:create_frame_generator", - "../api:frame_generator_api", - "../api/task_queue:default_task_queue_factory", - "../api/video:encoded_image", - "../api/video:video_frame", - "../api/video_codecs:video_codecs_api", - "../media:rtc_media_base", - "../modules/rtp_rtcp:rtp_rtcp_format", - "../modules/video_coding:video_codec_interface", - "../modules/video_coding:video_coding_utility", - "../modules/video_coding:webrtc_h264", - "../modules/video_coding:webrtc_vp8", - "../modules/video_coding:webrtc_vp9", - "../rtc_base:checks", - "../rtc_base:criticalsection", - "../rtc_base:logging", - "../rtc_base:rtc_event", - "../rtc_base:rtc_task_queue", - "../rtc_base/synchronization:mutex", - "../rtc_base/system:file_wrapper", - "../test:video_test_common", - "../test:video_test_support", - "//third_party/abseil-cpp/absl/debugging:failure_signal_handler", - "//third_party/abseil-cpp/absl/debugging:symbolize", - "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/strings", - ] - } - if (rtc_enable_protobuf) { rtc_executable("event_log_visualizer") { # TODO(bugs.webrtc.org/14248): Remove once usage of std::tmpnam diff --git a/third_party/libwebrtc/rtc_tools/converter/converter.cc b/third_party/libwebrtc/rtc_tools/converter/converter.cc deleted file mode 100644 index ca5eb26703ea..000000000000 --- a/third_party/libwebrtc/rtc_tools/converter/converter.cc +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "rtc_tools/converter/converter.h" - -#include -#include - -#include -#include - -#include "third_party/libyuv/include/libyuv/compare.h" -#include "third_party/libyuv/include/libyuv/convert.h" - -#ifdef WIN32 -#define SEPARATOR '\\' -#define STAT _stat -#else -#define SEPARATOR '/' -#define STAT stat -#endif - -namespace webrtc { -namespace test { - -Converter::Converter(int width, int height) : width_(width), height_(height) {} - -bool Converter::ConvertRGBAToI420Video(std::string frames_dir, - std::string output_file_name, - bool delete_frames) { - FILE* output_file = fopen(output_file_name.c_str(), "wb"); - - // Open output file in append mode. - if (output_file == NULL) { - fprintf(stderr, "Couldn't open input file for reading: %s\n", - output_file_name.c_str()); - return false; - } - - int input_frame_size = InputFrameSize(); - uint8_t* rgba_buffer = new uint8_t[input_frame_size]; - int y_plane_size = YPlaneSize(); - uint8_t* dst_y = new uint8_t[y_plane_size]; - int u_plane_size = UPlaneSize(); - uint8_t* dst_u = new uint8_t[u_plane_size]; - int v_plane_size = VPlaneSize(); - uint8_t* dst_v = new uint8_t[v_plane_size]; - - int counter = 0; // Counter to form frame names. - bool success = false; // Is conversion successful. - - while (true) { - std::string file_name = FormFrameName(4, counter); - // Get full path file name. - std::string input_file_name = FindFullFileName(frames_dir, file_name); - - if (FileExists(input_file_name)) { - ++counter; // Update counter for the next round. - } else { - fprintf(stdout, "Reached end of frames list\n"); - break; - } - - // Read the RGBA frame into rgba_buffer. - ReadRGBAFrame(input_file_name.c_str(), input_frame_size, rgba_buffer); - - // Delete the input frame. - if (delete_frames) { - if (remove(input_file_name.c_str()) != 0) { - fprintf(stderr, "Cannot delete file %s\n", input_file_name.c_str()); - } - } - - // Convert to I420 frame. - libyuv::ABGRToI420(rgba_buffer, SrcStrideFrame(), dst_y, DstStrideY(), - dst_u, DstStrideU(), dst_v, DstStrideV(), width_, - height_); - - // Add the I420 frame to the YUV video file. - success = AddYUVToFile(dst_y, y_plane_size, dst_u, u_plane_size, dst_v, - v_plane_size, output_file); - - if (!success) { - fprintf(stderr, "LibYUV error during RGBA to I420 frame conversion\n"); - break; - } - } - - delete[] rgba_buffer; - delete[] dst_y; - delete[] dst_u; - delete[] dst_v; - - fclose(output_file); - - return success; -} - -bool Converter::AddYUVToFile(uint8_t* y_plane, - int y_plane_size, - uint8_t* u_plane, - int u_plane_size, - uint8_t* v_plane, - int v_plane_size, - FILE* output_file) { - bool success = AddYUVPlaneToFile(y_plane, y_plane_size, output_file) && - AddYUVPlaneToFile(u_plane, u_plane_size, output_file) && - AddYUVPlaneToFile(v_plane, v_plane_size, output_file); - return success; -} - -bool Converter::AddYUVPlaneToFile(uint8_t* yuv_plane, - int yuv_plane_size, - FILE* file) { - size_t bytes_written = fwrite(yuv_plane, 1, yuv_plane_size, file); - - if (bytes_written != static_cast(yuv_plane_size)) { - fprintf(stderr, - "Number of bytes written (%d) doesn't match size of y plane" - " (%d)\n", - static_cast(bytes_written), yuv_plane_size); - return false; - } - return true; -} - -bool Converter::ReadRGBAFrame(const char* input_file_name, - int input_frame_size, - unsigned char* buffer) { - FILE* input_file = fopen(input_file_name, "rb"); - if (input_file == NULL) { - fprintf(stderr, "Couldn't open input file for reading: %s\n", - input_file_name); - return false; - } - - size_t nbr_read = fread(buffer, 1, input_frame_size, input_file); - fclose(input_file); - - if (nbr_read != static_cast(input_frame_size)) { - fprintf(stderr, "Error reading from input file: %s\n", input_file_name); - return false; - } - - return true; -} - -std::string Converter::FindFullFileName(std::string dir_name, - std::string file_name) { - return dir_name + SEPARATOR + file_name; -} - -bool Converter::FileExists(std::string file_name_to_check) { - struct STAT file_info; - int result = STAT(file_name_to_check.c_str(), &file_info); - return (result == 0); -} - -std::string Converter::FormFrameName(int width, int number) { - std::stringstream tmp; - - // Zero-pad number to a string. - tmp << std::setfill('0') << std::setw(width) << number; - - return "frame_" + tmp.str(); -} - -} // namespace test -} // namespace webrtc diff --git a/third_party/libwebrtc/rtc_tools/converter/converter.h b/third_party/libwebrtc/rtc_tools/converter/converter.h deleted file mode 100644 index 976458cf8867..000000000000 --- a/third_party/libwebrtc/rtc_tools/converter/converter.h +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_TOOLS_CONVERTER_CONVERTER_H_ -#define RTC_TOOLS_CONVERTER_CONVERTER_H_ - -#include - -#include - -namespace webrtc { -namespace test { - -// Handles a conversion between a set of RGBA frames to a YUV (I420) video. -class Converter { - public: - Converter(int width, int height); - - // Converts RGBA to YUV video. If the delete_frames argument is true, the - // method will delete the input frames after conversion. - bool ConvertRGBAToI420Video(std::string frames_dir, - std::string output_file_name, - bool delete_frames); - - private: - int width_; // Width of the video (respectively of the RGBA frames). - int height_; // Height of the video (respectively of the RGBA frames). - - // Returns the size of the Y plane in bytes. - int YPlaneSize() const { return width_ * height_; } - - // Returns the size of the U plane in bytes. - int UPlaneSize() const { return ((width_ + 1) / 2) * ((height_) / 2); } - - // Returns the size of the V plane in bytes. - int VPlaneSize() const { return ((width_ + 1) / 2) * ((height_) / 2); } - - // Returns the number of bytes per row in the RGBA frame. - int SrcStrideFrame() const { return width_ * 4; } - - // Returns the number of bytes in the Y plane. - int DstStrideY() const { return width_; } - - // Returns the number of bytes in the U plane. - int DstStrideU() const { return (width_ + 1) / 2; } - - // Returns the number of bytes in the V plane. - int DstStrideV() const { return (width_ + 1) / 2; } - - // Returns the size in bytes of the input RGBA frames. - int InputFrameSize() const { return width_ * height_ * 4; } - - // Writes the Y, U and V (in this order) planes to the file, thus adding a - // raw YUV frame to the file. - bool AddYUVToFile(uint8_t* y_plane, - int y_plane_size, - uint8_t* u_plane, - int u_plane_size, - uint8_t* v_plane, - int v_plane_size, - FILE* output_file); - - // Adds the Y, U or V plane to the file. - bool AddYUVPlaneToFile(uint8_t* yuv_plane, int yuv_plane_size, FILE* file); - - // Reads a RGBA frame from input_file_name with input_frame_size size in bytes - // into the buffer. - bool ReadRGBAFrame(const char* input_file_name, - int input_frame_size, - unsigned char* buffer); - - // Finds the full path name of the file - concatenates the directory and file - // names. - std::string FindFullFileName(std::string dir_name, std::string file_name); - - // Checks if a file exists. - bool FileExists(std::string file_name_to_check); - - // Returns the name of the file in the form frame_, where is - // 4 zero padded (i.e. frame_0000, frame_0001, etc.). - std::string FormFrameName(int width, int number); -}; - -} // namespace test -} // namespace webrtc - -#endif // RTC_TOOLS_CONVERTER_CONVERTER_H_ diff --git a/third_party/libwebrtc/rtc_tools/converter/rgba_to_i420_converter.cc b/third_party/libwebrtc/rtc_tools/converter/rgba_to_i420_converter.cc deleted file mode 100644 index 6e186ae8fe70..000000000000 --- a/third_party/libwebrtc/rtc_tools/converter/rgba_to_i420_converter.cc +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include - -#include - -#include "absl/flags/flag.h" -#include "absl/flags/parse.h" -#include "absl/flags/usage.h" -#include "rtc_tools/converter/converter.h" - -ABSL_FLAG(int, width, -1, "Width in pixels of the frames in the input file"); -ABSL_FLAG(int, height, -1, "Height in pixels of the frames in the input file"); -ABSL_FLAG(std::string, - frames_dir, - ".", - "The path to the directory where the frames reside"); -ABSL_FLAG(std::string, - output_file, - "output.yuv", - " The output file to which frames are written"); -ABSL_FLAG(bool, - delete_frames, - false, - " Whether or not to delete the input frames after the conversion"); - -/* - * A command-line tool based on libyuv to convert a set of RGBA files to a YUV - * video. - * Usage: - * rgba_to_i420_converter --frames_dir= - * --output_file= --width= - * --height= - */ -int main(int argc, char* argv[]) { - absl::SetProgramUsageMessage( - "Converts RGBA raw image files to I420 frames " - "for YUV.\n" - "Example usage:\n" - "./rgba_to_i420_converter --frames_dir=. " - "--output_file=output.yuv --width=320 " - "--height=240\n" - "IMPORTANT: If you pass the --delete_frames " - "command line parameter, the tool will delete " - "the input frames after conversion.\n"); - absl::ParseCommandLine(argc, argv); - - int width = absl::GetFlag(FLAGS_width); - int height = absl::GetFlag(FLAGS_height); - - if (width <= 0 || height <= 0) { - fprintf(stderr, "Error: width or height cannot be <= 0!\n"); - return -1; - } - - bool del_frames = absl::GetFlag(FLAGS_delete_frames); - - webrtc::test::Converter converter(width, height); - bool success = converter.ConvertRGBAToI420Video( - absl::GetFlag(FLAGS_frames_dir), absl::GetFlag(FLAGS_output_file), - del_frames); - - if (success) { - fprintf(stdout, "Successful conversion of RGBA frames to YUV video!\n"); - return 0; - } else { - fprintf(stdout, "Unsuccessful conversion of RGBA frames to YUV video!\n"); - return -1; - } -} diff --git a/third_party/libwebrtc/rtc_tools/converter/yuv_to_ivf_converter.cc b/third_party/libwebrtc/rtc_tools/converter/yuv_to_ivf_converter.cc deleted file mode 100644 index 7c2c2ad1e0cc..000000000000 --- a/third_party/libwebrtc/rtc_tools/converter/yuv_to_ivf_converter.cc +++ /dev/null @@ -1,285 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include - -#include "absl/debugging/failure_signal_handler.h" -#include "absl/debugging/symbolize.h" -#include "absl/flags/flag.h" -#include "absl/flags/parse.h" -#include "absl/strings/match.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "api/test/create_frame_generator.h" -#include "api/test/frame_generator_interface.h" -#include "api/video/encoded_image.h" -#include "api/video/video_codec_type.h" -#include "api/video_codecs/video_codec.h" -#include "api/video_codecs/video_encoder.h" -#include "media/base/media_constants.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/video_coding/codecs/vp8/include/vp8.h" -#include "modules/video_coding/codecs/vp9/include/vp9.h" -#include "modules/video_coding/include/video_error_codes.h" -#include "modules/video_coding/utility/ivf_file_writer.h" -#include "rtc_base/checks.h" -#include "rtc_base/event.h" -#include "rtc_base/logging.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/system/file_wrapper.h" -#include "rtc_base/task_queue.h" -#include "test/testsupport/frame_reader.h" -#include "test/video_codec_settings.h" - -#if defined(WEBRTC_USE_H264) -#include "modules/video_coding/codecs/h264/include/h264.h" -#endif - -ABSL_FLAG(std::string, input, "", "Input YUV file to convert to IVF"); -ABSL_FLAG(int, width, 0, "Input frame width"); -ABSL_FLAG(int, height, 0, "Input frame height"); -ABSL_FLAG(std::string, codec, cricket::kVp8CodecName, "Codec to use"); -ABSL_FLAG(std::string, output, "", "Output IVF file"); - -namespace webrtc { -namespace test { -namespace { - -constexpr int kMaxFramerate = 30; -// We use very big value here to ensure that codec won't hit any limits. -constexpr uint32_t kBitrateBps = 100000000; -constexpr int kKeyFrameIntervalMs = 30000; -constexpr TimeDelta kMaxFrameEncodeWaitTimeout = TimeDelta::Seconds(2); -constexpr int kFrameLogInterval = 100; -static const VideoEncoder::Capabilities kCapabilities(false); - -class IvfFileWriterEncodedCallback : public EncodedImageCallback { - public: - IvfFileWriterEncodedCallback(const std::string& file_name, - VideoCodecType video_codec_type, - int expected_frames_count) - : file_writer_( - IvfFileWriter::Wrap(FileWrapper::OpenWriteOnly(file_name), 0)), - video_codec_type_(video_codec_type), - expected_frames_count_(expected_frames_count) { - RTC_CHECK(file_writer_.get()); - } - ~IvfFileWriterEncodedCallback() { RTC_CHECK(file_writer_->Close()); } - - Result OnEncodedImage(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info) override { - RTC_CHECK(file_writer_->WriteFrame(encoded_image, video_codec_type_)); - - MutexLock lock(&lock_); - received_frames_count_++; - RTC_CHECK_LE(received_frames_count_, expected_frames_count_); - if (received_frames_count_ % kFrameLogInterval == 0) { - RTC_LOG(LS_INFO) << received_frames_count_ << " out of " - << expected_frames_count_ << " frames written"; - } - next_frame_written_.Set(); - return Result(Result::Error::OK); - } - - void WaitNextFrameWritten(TimeDelta timeout) { - RTC_CHECK(next_frame_written_.Wait(timeout)); - next_frame_written_.Reset(); - } - - private: - std::unique_ptr file_writer_; - const VideoCodecType video_codec_type_; - const int expected_frames_count_; - - Mutex lock_; - int received_frames_count_ RTC_GUARDED_BY(lock_) = 0; - rtc::Event next_frame_written_; -}; - -class Encoder { - public: - Encoder(int width, - int height, - int frames_count, - const std::string& output_file_name, - VideoCodecType video_codec_type, - std::unique_ptr video_encoder) - : video_encoder_(std::move(video_encoder)), - task_queue_(CreateDefaultTaskQueueFactory()->CreateTaskQueue( - "Encoder", - TaskQueueFactory::Priority::HIGH)) { - ivf_writer_callback_ = std::make_unique( - output_file_name, video_codec_type, frames_count); - - task_queue_.PostTask([width, height, video_codec_type, this]() { - VideoCodec codec_settings; - CodecSettings(video_codec_type, &codec_settings); - codec_settings.width = width; - codec_settings.height = height; - codec_settings.maxFramerate = kMaxFramerate; - codec_settings.startBitrate = kBitrateBps; - codec_settings.minBitrate = kBitrateBps; - codec_settings.maxBitrate = kBitrateBps; - codec_settings.SetFrameDropEnabled(false); - switch (video_codec_type) { - case VideoCodecType::kVideoCodecVP8: { - VideoCodecVP8* vp8_settings = codec_settings.VP8(); - vp8_settings->keyFrameInterval = kKeyFrameIntervalMs; - vp8_settings->denoisingOn = false; - } break; - case VideoCodecType::kVideoCodecVP9: { - VideoCodecVP9* vp9_settings = codec_settings.VP9(); - vp9_settings->denoisingOn = false; - vp9_settings->keyFrameInterval = kKeyFrameIntervalMs; - vp9_settings->automaticResizeOn = false; - } break; - case VideoCodecType::kVideoCodecH264: { - VideoCodecH264* h264_settings = codec_settings.H264(); - h264_settings->keyFrameInterval = kKeyFrameIntervalMs; - } break; - default: - RTC_CHECK(false) << "Unsupported codec type"; - } - VideoBitrateAllocation bitrate_allocation; - bitrate_allocation.SetBitrate(0, 0, kBitrateBps); - - video_encoder_->RegisterEncodeCompleteCallback( - ivf_writer_callback_.get()); - RTC_CHECK_EQ( - WEBRTC_VIDEO_CODEC_OK, - video_encoder_->InitEncode( - &codec_settings, - VideoEncoder::Settings(kCapabilities, /*number_of_cores=*/4, - /*max_payload_size=*/0))); - video_encoder_->SetRates(VideoEncoder::RateControlParameters( - bitrate_allocation, - static_cast(codec_settings.maxFramerate))); - }); - } - - void Encode(const VideoFrame& frame) { - task_queue_.PostTask([frame, this]() { - RTC_CHECK_EQ(WEBRTC_VIDEO_CODEC_OK, - video_encoder_->Encode(frame, nullptr)); - }); - } - - void WaitNextFrameWritten(TimeDelta timeout) { - ivf_writer_callback_->WaitNextFrameWritten(timeout); - } - - private: - std::unique_ptr video_encoder_; - std::unique_ptr ivf_writer_callback_; - - rtc::TaskQueue task_queue_; -}; - -int GetFrameCount(std::string yuv_file_name, int width, int height) { - std::unique_ptr yuv_reader = - std::make_unique(std::move(yuv_file_name), width, - height); - RTC_CHECK(yuv_reader->Init()); - int frames_count = yuv_reader->NumberOfFrames(); - yuv_reader->Close(); - return frames_count; -} - -VideoFrame BuildFrame(FrameGeneratorInterface::VideoFrameData frame_data, - uint32_t rtp_timestamp) { - return VideoFrame::Builder() - .set_video_frame_buffer(frame_data.buffer) - .set_update_rect(frame_data.update_rect) - .set_timestamp_rtp(rtp_timestamp) - .build(); -} - -void WriteVideoFile(std::string input_file_name, - int width, - int height, - std::string output_file_name, - VideoCodecType video_codec_type, - std::unique_ptr video_encoder) { - int frames_count = GetFrameCount(input_file_name, width, height); - - std::unique_ptr frame_generator = - CreateFromYuvFileFrameGenerator({input_file_name}, width, height, - /*frame_repeat_count=*/1); - - Encoder encoder(width, height, frames_count, output_file_name, - video_codec_type, std::move(video_encoder)); - - uint32_t last_frame_timestamp = 0; - - for (int i = 0; i < frames_count; ++i) { - const uint32_t timestamp = - last_frame_timestamp + kVideoPayloadTypeFrequency / kMaxFramerate; - VideoFrame frame = BuildFrame(frame_generator->NextFrame(), timestamp); - - last_frame_timestamp = timestamp; - - encoder.Encode(frame); - encoder.WaitNextFrameWritten(kMaxFrameEncodeWaitTimeout); - - if ((i + 1) % kFrameLogInterval == 0) { - RTC_LOG(LS_INFO) << i + 1 << " out of " << frames_count - << " frames are sent for encoding"; - } - } - RTC_LOG(LS_INFO) << "All " << frames_count << " frame are sent for encoding"; -} - -} // namespace -} // namespace test -} // namespace webrtc - -int main(int argc, char* argv[]) { - // Initialize the symbolizer to get a human-readable stack trace. - absl::InitializeSymbolizer(argv[0]); - - absl::FailureSignalHandlerOptions options; - absl::InstallFailureSignalHandler(options); - - absl::ParseCommandLine(argc, argv); - - std::string codec_name = absl::GetFlag(FLAGS_codec); - std::string input_file_name = absl::GetFlag(FLAGS_input); - std::string output_file_name = absl::GetFlag(FLAGS_output); - int width = absl::GetFlag(FLAGS_width); - int height = absl::GetFlag(FLAGS_height); - RTC_CHECK_NE(input_file_name, "") << "--input is required"; - RTC_CHECK_NE(output_file_name, "") << "--output is required"; - RTC_CHECK_GT(width, 0) << "width must be greater then 0"; - RTC_CHECK_GT(height, 0) << "height must be greater then 0"; - if (absl::EqualsIgnoreCase(codec_name, cricket::kVp8CodecName)) { - webrtc::test::WriteVideoFile( - input_file_name, width, height, output_file_name, - webrtc::VideoCodecType::kVideoCodecVP8, webrtc::VP8Encoder::Create()); - return 0; - } - if (absl::EqualsIgnoreCase(codec_name, cricket::kVp9CodecName)) { - webrtc::test::WriteVideoFile( - input_file_name, width, height, output_file_name, - webrtc::VideoCodecType::kVideoCodecVP9, webrtc::VP9Encoder::Create()); - return 0; - } -#if defined(WEBRTC_USE_H264) - if (absl::EqualsIgnoreCase(codec_name, cricket::kH264CodecName)) { - webrtc::test::WriteVideoFile( - input_file_name, width, height, output_file_name, - webrtc::VideoCodecType::kVideoCodecH264, - webrtc::H264Encoder::Create( - cricket::VideoCodec(cricket::kH264CodecName))); - return 0; - } -#endif - RTC_CHECK(false) << "Unsupported codec: " << codec_name; - return 1; -} diff --git a/third_party/libwebrtc/sdk/BUILD.gn b/third_party/libwebrtc/sdk/BUILD.gn index 3d847842c4e8..deeee1408056 100644 --- a/third_party/libwebrtc/sdk/BUILD.gn +++ b/third_party/libwebrtc/sdk/BUILD.gn @@ -1067,11 +1067,13 @@ if (is_ios || is_mac) { ":videorendereradapter_objc", ":videosource_objc", ":videotoolbox_objc", + "../api:dtmf_sender_interface", "../api:libjingle_peerconnection_api", "../api:media_stream_interface", "../api:rtc_event_log_output_file", "../api:rtc_stats_api", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api/audio_codecs:audio_codecs_api", "../api/audio_codecs:builtin_audio_decoder_factory", diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.h index 1f290d8a66c0..3e8fcc807564 100644 --- a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.h +++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.h @@ -13,9 +13,7 @@ #import "RTCMacros.h" /** The only valid value for the following if set is kRTCFieldTrialEnabledValue. */ -RTC_EXTERN NSString * const kRTCFieldTrialAudioForceNoTWCCKey; -RTC_EXTERN NSString * const kRTCFieldTrialAudioForceABWENoTWCCKey; -RTC_EXTERN NSString * const kRTCFieldTrialSendSideBweWithOverheadKey; +RTC_EXTERN NSString *const kRTCFieldTrialAudioForceABWENoTWCCKey; RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03AdvertisedKey; RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03Key; RTC_EXTERN NSString * const kRTCFieldTrialH264HighProfileKey; diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.mm index 852aeeec8468..193da9e4f75a 100644 --- a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.mm +++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.mm @@ -16,9 +16,7 @@ #include "system_wrappers/include/field_trial.h" -NSString * const kRTCFieldTrialAudioForceNoTWCCKey = @"WebRTC-Audio-ForceNoTWCC"; -NSString * const kRTCFieldTrialAudioForceABWENoTWCCKey = @"WebRTC-Audio-ABWENoTWCC"; -NSString * const kRTCFieldTrialSendSideBweWithOverheadKey = @"WebRTC-SendSideBwe-WithOverhead"; +NSString *const kRTCFieldTrialAudioForceABWENoTWCCKey = @"WebRTC-Audio-ABWENoTWCC"; NSString * const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised"; NSString * const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03"; NSString * const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile"; diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h index faa796282153..7374b2b72fd1 100644 --- a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h +++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h @@ -10,7 +10,7 @@ #import "RTCLegacyStatsReport.h" -#include "api/stats_types.h" +#include "api/legacy_stats_types.h" NS_ASSUME_NONNULL_BEGIN diff --git a/third_party/libwebrtc/stats/rtcstats_objects.cc b/third_party/libwebrtc/stats/rtcstats_objects.cc index 7c20b87562b8..734f16273c2d 100644 --- a/third_party/libwebrtc/stats/rtcstats_objects.cc +++ b/third_party/libwebrtc/stats/rtcstats_objects.cc @@ -678,7 +678,8 @@ WEBRTC_RTCSTATS_IMPL( &nack_count, &qp_sum, &active, - &power_efficient_encoder) + &power_efficient_encoder, + &scalability_mode) // clang-format on RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats(const std::string& id, @@ -719,7 +720,8 @@ RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats(std::string&& id, nack_count("nackCount"), qp_sum("qpSum"), active("active"), - power_efficient_encoder("powerEfficientEncoder") {} + power_efficient_encoder("powerEfficientEncoder"), + scalability_mode("scalabilityMode") {} RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats( const RTCOutboundRTPStreamStats& other) = default; diff --git a/third_party/libwebrtc/system_wrappers/include/field_trial.h b/third_party/libwebrtc/system_wrappers/include/field_trial.h index ffbd864a6a36..8d0ad258c111 100644 --- a/third_party/libwebrtc/system_wrappers/include/field_trial.h +++ b/third_party/libwebrtc/system_wrappers/include/field_trial.h @@ -98,11 +98,16 @@ bool FieldTrialsStringIsValid(absl::string_view trials_string); std::string MergeFieldTrialsStrings(absl::string_view first, absl::string_view second); -// RAII type that ensures global state is consistent between tests. -class ScopedGlobalFieldTrialsForTesting { +// This helper allows to temporary "register" a field trial within the current +// scope. This is only useful for tests that use the global field trial string, +// otherwise you can use `webrtc::FieldTrialsRegistry`. +// +// If you want to isolate changes to the global field trial string itself within +// the current scope you should use `webrtc::test::ScopedFieldTrials`. +class FieldTrialsAllowedInScopeForTesting { public: - explicit ScopedGlobalFieldTrialsForTesting(flat_set keys); - ~ScopedGlobalFieldTrialsForTesting(); + explicit FieldTrialsAllowedInScopeForTesting(flat_set keys); + ~FieldTrialsAllowedInScopeForTesting(); }; } // namespace field_trial diff --git a/third_party/libwebrtc/system_wrappers/source/field_trial.cc b/third_party/libwebrtc/system_wrappers/source/field_trial.cc index bdf84bd6266c..8f15b4eb7a60 100644 --- a/third_party/libwebrtc/system_wrappers/source/field_trial.cc +++ b/third_party/libwebrtc/system_wrappers/source/field_trial.cc @@ -168,12 +168,12 @@ const char* GetFieldTrialString() { return trials_init_string; } -ScopedGlobalFieldTrialsForTesting::ScopedGlobalFieldTrialsForTesting( +FieldTrialsAllowedInScopeForTesting::FieldTrialsAllowedInScopeForTesting( flat_set keys) { TestKeys() = std::move(keys); } -ScopedGlobalFieldTrialsForTesting::~ScopedGlobalFieldTrialsForTesting() { +FieldTrialsAllowedInScopeForTesting::~FieldTrialsAllowedInScopeForTesting() { TestKeys().clear(); } diff --git a/third_party/libwebrtc/test/BUILD.gn b/third_party/libwebrtc/test/BUILD.gn index 84deb3ed8479..9dc86074748d 100644 --- a/third_party/libwebrtc/test/BUILD.gn +++ b/third_party/libwebrtc/test/BUILD.gn @@ -338,6 +338,7 @@ if (is_ios) { "../api/test/metrics:chrome_perf_dashboard_metrics_exporter", "../api/test/metrics:global_metrics_logger_and_exporter", "../api/test/metrics:metrics_exporter", + "../api/test/metrics:metrics_set_proto_file_exporter", "../api/test/metrics:print_result_proxy_metrics_exporter", "../api/test/metrics:stdout_metrics_exporter", "../sdk:helpers_objc", @@ -450,6 +451,7 @@ rtc_library("video_test_support") { "../api:sequence_checker", "../api/test/video:video_frame_writer", "../api/video:encoded_image", + "../api/video:resolution", "../api/video:video_frame", "../api/video_codecs:video_codecs_api", "../common_video", @@ -526,6 +528,7 @@ if (rtc_include_tests && !build_with_chromium) { "../api/test/metrics:chrome_perf_dashboard_metrics_exporter", "../api/test/metrics:global_metrics_logger_and_exporter", "../api/test/metrics:metrics_exporter", + "../api/test/metrics:metrics_set_proto_file_exporter", "../api/test/metrics:print_result_proxy_metrics_exporter", "../api/test/metrics:stdout_metrics_exporter", "../rtc_base", @@ -661,6 +664,7 @@ if (rtc_include_tests && !build_with_chromium) { "../rtc_base/synchronization:mutex", "../rtc_base/system:file_wrapper", "pc/e2e:e2e_unittests", + "pc/e2e/analyzer/video:video_analyzer_unittests", "peer_scenario/tests", "scenario:scenario_unittests", "time_controller:time_controller", @@ -1096,6 +1100,7 @@ if (is_mac) { "OpenGL.framework", "CoreVideo.framework", ] + defines = [ "GL_SILENCE_DEPRECATION" ] } } @@ -1131,6 +1136,9 @@ rtc_library("test_renderer_generic") { "gl/gl_renderer.h", ] } + if (is_mac) { + defines = [ "GL_SILENCE_DEPRECATION" ] + } if ((is_linux || is_chromeos) && rtc_use_x11) { sources += [ diff --git a/third_party/libwebrtc/test/OWNERS b/third_party/libwebrtc/test/OWNERS index 9938fac84680..a1bd81224456 100644 --- a/third_party/libwebrtc/test/OWNERS +++ b/third_party/libwebrtc/test/OWNERS @@ -3,3 +3,5 @@ srte@webrtc.org stefan@webrtc.org titovartem@webrtc.org landrey@webrtc.org +mbonadei@webrtc.org +jleconte@webrtc.org diff --git a/third_party/libwebrtc/test/call_test.cc b/third_party/libwebrtc/test/call_test.cc index 7e7c9bb67470..156b8a7f9ebe 100644 --- a/third_party/libwebrtc/test/call_test.cc +++ b/third_party/libwebrtc/test/call_test.cc @@ -591,8 +591,11 @@ void CallTest::Start() { } void CallTest::StartVideoStreams() { - for (VideoSendStream* video_send_stream : video_send_streams_) - video_send_stream->Start(); + for (size_t i = 0; i < video_send_streams_.size(); ++i) { + std::vector active_rtp_streams( + video_send_configs_[i].rtp.ssrcs.size(), true); + video_send_streams_[i]->StartPerRtpStream(active_rtp_streams); + } for (VideoReceiveStreamInterface* video_recv_stream : video_receive_streams_) video_recv_stream->Start(); } diff --git a/third_party/libwebrtc/test/fuzzers/BUILD.gn b/third_party/libwebrtc/test/fuzzers/BUILD.gn index 9896e075631d..fd67372506ad 100644 --- a/third_party/libwebrtc/test/fuzzers/BUILD.gn +++ b/third_party/libwebrtc/test/fuzzers/BUILD.gn @@ -269,22 +269,6 @@ webrtc_fuzzer_test("audio_decoder_ilbc_fuzzer") { ] } -webrtc_fuzzer_test("audio_decoder_isac_fuzzer") { - sources = [ "audio_decoder_isac_fuzzer.cc" ] - deps = [ - ":audio_decoder_fuzzer", - "../../modules/audio_coding:isac", - ] -} - -webrtc_fuzzer_test("audio_decoder_isacfix_fuzzer") { - sources = [ "audio_decoder_isacfix_fuzzer.cc" ] - deps = [ - ":audio_decoder_fuzzer", - "../../modules/audio_coding:isac_fix", - ] -} - webrtc_fuzzer_test("audio_decoder_opus_fuzzer") { sources = [ "audio_decoder_opus_fuzzer.cc" ] deps = [ @@ -350,24 +334,6 @@ webrtc_fuzzer_test("audio_encoder_opus_fuzzer") { ] } -webrtc_fuzzer_test("audio_encoder_isac_fixed_fuzzer") { - sources = [ "audio_encoder_isac_fixed_fuzzer.cc" ] - deps = [ - ":audio_encoder_fuzzer", - "../../api/audio_codecs/isac:audio_encoder_isac_fix", - "../../rtc_base:checks", - ] -} - -webrtc_fuzzer_test("audio_encoder_isac_float_fuzzer") { - sources = [ "audio_encoder_isac_float_fuzzer.cc" ] - deps = [ - ":audio_encoder_fuzzer", - "../../api/audio_codecs/isac:audio_encoder_isac_float", - "../../rtc_base:checks", - ] -} - webrtc_fuzzer_test("turn_unwrap_fuzzer") { sources = [ "turn_unwrap_fuzzer.cc" ] deps = [ diff --git a/third_party/libwebrtc/test/fuzzers/audio_decoder_isac_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_decoder_isac_fuzzer.cc deleted file mode 100644 index 96fa75d6ce25..000000000000 --- a/third_party/libwebrtc/test/fuzzers/audio_decoder_isac_fuzzer.cc +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h" -#include "test/fuzzers/audio_decoder_fuzzer.h" - -namespace webrtc { -void FuzzOneInput(const uint8_t* data, size_t size) { - if (size > 20000) { - return; - } - const int sample_rate_hz = size % 2 == 0 ? 16000 : 32000; // 16 or 32 kHz. - static const size_t kAllocatedOuputSizeSamples = 32000 / 10; // 100 ms. - int16_t output[kAllocatedOuputSizeSamples]; - AudioDecoderIsacFloatImpl::Config c; - c.sample_rate_hz = sample_rate_hz; - AudioDecoderIsacFloatImpl dec(c); - FuzzAudioDecoder(DecoderFunctionType::kNormalDecode, data, size, &dec, - sample_rate_hz, sizeof(output), output); -} -} // namespace webrtc diff --git a/third_party/libwebrtc/test/fuzzers/audio_decoder_isacfix_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_decoder_isacfix_fuzzer.cc deleted file mode 100644 index 08aa69feb428..000000000000 --- a/third_party/libwebrtc/test/fuzzers/audio_decoder_isacfix_fuzzer.cc +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h" -#include "test/fuzzers/audio_decoder_fuzzer.h" - -namespace webrtc { -void FuzzOneInput(const uint8_t* data, size_t size) { - if (size > 20000) { - return; - } - static const int kSampleRateHz = 16000; - static const size_t kAllocatedOuputSizeSamples = 16000 / 10; // 100 ms. - int16_t output[kAllocatedOuputSizeSamples]; - AudioDecoderIsacFixImpl::Config c; - c.sample_rate_hz = kSampleRateHz; - AudioDecoderIsacFixImpl dec(c); - FuzzAudioDecoder(DecoderFunctionType::kNormalDecode, data, size, &dec, - kSampleRateHz, sizeof(output), output); -} -} // namespace webrtc diff --git a/third_party/libwebrtc/test/fuzzers/audio_encoder_isac_fixed_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_encoder_isac_fixed_fuzzer.cc deleted file mode 100644 index 5357dc1b3ece..000000000000 --- a/third_party/libwebrtc/test/fuzzers/audio_encoder_isac_fixed_fuzzer.cc +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" -#include "rtc_base/checks.h" -#include "test/fuzzers/audio_encoder_fuzzer.h" - -namespace webrtc { - -void FuzzOneInput(const uint8_t* data, size_t size) { - AudioEncoderIsacFix::Config config; - RTC_CHECK(config.IsOk()); - constexpr int kPayloadType = 100; - FuzzAudioEncoder( - /*data_view=*/{data, size}, - /*encoder=*/AudioEncoderIsacFix::MakeAudioEncoder(config, kPayloadType)); -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/test/fuzzers/audio_encoder_isac_float_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_encoder_isac_float_fuzzer.cc deleted file mode 100644 index f9e2e0206d24..000000000000 --- a/third_party/libwebrtc/test/fuzzers/audio_encoder_isac_float_fuzzer.cc +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" -#include "rtc_base/checks.h" -#include "test/fuzzers/audio_encoder_fuzzer.h" - -namespace webrtc { - -void FuzzOneInput(const uint8_t* data, size_t size) { - AudioEncoderIsacFloat::Config config; - config.sample_rate_hz = 16000; - RTC_CHECK(config.IsOk()); - constexpr int kPayloadType = 100; - FuzzAudioEncoder(/*data_view=*/{data, size}, - /*encoder=*/AudioEncoderIsacFloat::MakeAudioEncoder( - config, kPayloadType)); -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/test/fuzzers/audio_processing_sample_rate_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_processing_sample_rate_fuzzer.cc index 825303d31a3f..ca3946988cbd 100644 --- a/third_party/libwebrtc/test/fuzzers/audio_processing_sample_rate_fuzzer.cc +++ b/third_party/libwebrtc/test/fuzzers/audio_processing_sample_rate_fuzzer.cc @@ -13,8 +13,6 @@ #include #include -#include "api/audio/audio_frame.h" -#include "modules/audio_processing/include/audio_frame_proxies.h" #include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/test/audio_processing_builder_for_testing.h" #include "rtc_base/checks.h" @@ -23,13 +21,14 @@ namespace webrtc { namespace { constexpr int kMaxNumChannels = 2; -constexpr int kMaxSamplesPerChannel = - AudioFrame::kMaxDataSizeSamples / kMaxNumChannels; +// APM supported max rate is 384000 Hz, using a limit slightly above lets the +// fuzzer exercise the handling of too high rates. +constexpr int kMaxSampleRateHz = 400000; +constexpr int kMaxSamplesPerChannel = kMaxSampleRateHz / 100; void GenerateFloatFrame(test::FuzzDataHelper& fuzz_data, int input_rate, int num_channels, - bool is_capture, float* const* float_frames) { const int samples_per_input_channel = AudioProcessing::GetFrameSize(input_rate); @@ -45,20 +44,16 @@ void GenerateFloatFrame(test::FuzzDataHelper& fuzz_data, void GenerateFixedFrame(test::FuzzDataHelper& fuzz_data, int input_rate, int num_channels, - AudioFrame& fixed_frame) { + int16_t* fixed_frames) { const int samples_per_input_channel = AudioProcessing::GetFrameSize(input_rate); - fixed_frame.samples_per_channel_ = samples_per_input_channel; - fixed_frame.sample_rate_hz_ = input_rate; - fixed_frame.num_channels_ = num_channels; - RTC_DCHECK_LE(samples_per_input_channel * num_channels, - AudioFrame::kMaxDataSizeSamples); + RTC_DCHECK_LE(samples_per_input_channel, kMaxSamplesPerChannel); // Write interleaved samples. for (int ch = 0; ch < num_channels; ++ch) { const int16_t channel_value = fuzz_data.ReadOrDefaultValue(0); for (int i = ch; i < samples_per_input_channel * num_channels; i += num_channels) { - fixed_frame.mutable_data()[i] = channel_value; + fixed_frames[i] = channel_value; } } } @@ -103,7 +98,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { .Create(); RTC_DCHECK(apm); - AudioFrame fixed_frame; + std::array fixed_frame; std::array, kMaxNumChannels> float_frames; std::array float_frame_ptrs; @@ -112,12 +107,6 @@ void FuzzOneInput(const uint8_t* data, size_t size) { } float* const* ptr_to_float_frames = &float_frame_ptrs[0]; - // These are all the sample rates logged by UMA metric - // WebAudio.AudioContext.HardwareSampleRate. - constexpr int kSampleRatesHz[] = {8000, 11025, 16000, 22050, 24000, - 32000, 44100, 46875, 48000, 88200, - 96000, 176400, 192000, 352800, 384000}; - // Choose whether to fuzz the float or int16_t interfaces of APM. const bool is_float = fuzz_data.ReadOrDefaultValue(true); @@ -126,18 +115,19 @@ void FuzzOneInput(const uint8_t* data, size_t size) { // iteration. while (fuzz_data.CanReadBytes(1)) { // Decide input/output rate for this iteration. - const int input_rate = fuzz_data.SelectOneOf(kSampleRatesHz); - const int output_rate = fuzz_data.SelectOneOf(kSampleRatesHz); + const int input_rate = static_cast( + fuzz_data.ReadOrDefaultValue(8000) % kMaxSampleRateHz); + const int output_rate = static_cast( + fuzz_data.ReadOrDefaultValue(8000) % kMaxSampleRateHz); const int num_channels = fuzz_data.ReadOrDefaultValue(true) ? 2 : 1; // Since render and capture calls have slightly different reinitialization // procedures, we let the fuzzer choose the order. const bool is_capture = fuzz_data.ReadOrDefaultValue(true); - // Fill the arrays with audio samples from the data. int apm_return_code = AudioProcessing::Error::kNoError; if (is_float) { - GenerateFloatFrame(fuzz_data, input_rate, num_channels, is_capture, + GenerateFloatFrame(fuzz_data, input_rate, num_channels, ptr_to_float_frames); if (is_capture) { @@ -149,20 +139,23 @@ void FuzzOneInput(const uint8_t* data, size_t size) { ptr_to_float_frames, StreamConfig(input_rate, num_channels), StreamConfig(output_rate, num_channels), ptr_to_float_frames); } - RTC_DCHECK_EQ(apm_return_code, AudioProcessing::kNoError); } else { - GenerateFixedFrame(fuzz_data, input_rate, num_channels, fixed_frame); + GenerateFixedFrame(fuzz_data, input_rate, num_channels, + fixed_frame.data()); if (is_capture) { - apm_return_code = ProcessAudioFrame(apm.get(), &fixed_frame); + apm_return_code = apm->ProcessStream( + fixed_frame.data(), StreamConfig(input_rate, num_channels), + StreamConfig(output_rate, num_channels), fixed_frame.data()); } else { - apm_return_code = ProcessReverseAudioFrame(apm.get(), &fixed_frame); + apm_return_code = apm->ProcessReverseStream( + fixed_frame.data(), StreamConfig(input_rate, num_channels), + StreamConfig(output_rate, num_channels), fixed_frame.data()); } - // The AudioFrame interface does not allow non-native sample rates, but it - // should not crash. - RTC_DCHECK(apm_return_code == AudioProcessing::kNoError || - apm_return_code == AudioProcessing::kBadSampleRateError); } + // APM may flag an error on unsupported audio formats, but should not crash. + RTC_DCHECK(apm_return_code == AudioProcessing::kNoError || + apm_return_code == AudioProcessing::kBadSampleRateError); } } diff --git a/third_party/libwebrtc/test/ios/test_support.h b/third_party/libwebrtc/test/ios/test_support.h index 2699923e0920..5ac731393fa8 100644 --- a/third_party/libwebrtc/test/ios/test_support.h +++ b/third_party/libwebrtc/test/ios/test_support.h @@ -27,6 +27,7 @@ void InitTestSuite(int (*test_suite)(void), char* argv[], bool save_chartjson_result, bool export_perf_results_new_api, + std::string webrtc_test_metrics_output_path, absl::optional> metrics_to_plot); // Returns true if unittests should be run by the XCTest runnner. diff --git a/third_party/libwebrtc/test/ios/test_support.mm b/third_party/libwebrtc/test/ios/test_support.mm index 1c7968237510..d3c9ee0c744a 100644 --- a/third_party/libwebrtc/test/ios/test_support.mm +++ b/third_party/libwebrtc/test/ios/test_support.mm @@ -13,6 +13,7 @@ #include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metrics_exporter.h" +#include "api/test/metrics/metrics_set_proto_file_exporter.h" #include "api/test/metrics/print_result_proxy_metrics_exporter.h" #include "api/test/metrics/stdout_metrics_exporter.h" #include "test/ios/coverage_util_ios.h" @@ -44,6 +45,7 @@ static int g_argc; static char **g_argv; static bool g_write_perf_output; static bool g_export_perf_results_new_api; +static std::string g_webrtc_test_metrics_output_path; static absl::optional g_is_xctest; static absl::optional> g_metrics_to_plot; @@ -95,14 +97,14 @@ static absl::optional> g_metrics_to_plot; int exitStatus = g_test_suite(); + NSArray *outputDirectories = + NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); std::vector> exporters; if (g_export_perf_results_new_api) { exporters.push_back(std::make_unique()); if (g_write_perf_output) { // Stores data into a proto file under the app's document directory. NSString *fileName = @"perftest-output.pb"; - NSArray *outputDirectories = - NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); if ([outputDirectories count] != 0) { NSString *outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName]; @@ -110,6 +112,18 @@ static absl::optional> g_metrics_to_plot; [NSString stdStringForString:outputPath])); } } + if (!g_webrtc_test_metrics_output_path.empty()) { + RTC_CHECK_EQ(g_webrtc_test_metrics_output_path.find('/'), std::string::npos) + << "On iOS, --webrtc_test_metrics_output_path must only be a file name."; + if ([outputDirectories count] != 0) { + NSString *fileName = [NSString stringWithCString:g_webrtc_test_metrics_output_path.c_str() + encoding:[NSString defaultCStringEncoding]]; + NSString *outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName]; + exporters.push_back(std::make_unique( + webrtc::test::MetricsSetProtoFileExporter::Options( + [NSString stdStringForString:outputPath]))); + } + } } else { exporters.push_back(std::make_unique()); } @@ -118,8 +132,6 @@ static absl::optional> g_metrics_to_plot; if (g_write_perf_output) { // Stores data into a proto file under the app's document directory. NSString *fileName = @"perftest-output.pb"; - NSArray *outputDirectories = - NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); if ([outputDirectories count] != 0) { NSString *outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName]; @@ -167,12 +179,14 @@ void InitTestSuite(int (*test_suite)(void), char *argv[], bool write_perf_output, bool export_perf_results_new_api, + std::string webrtc_test_metrics_output_path, absl::optional> metrics_to_plot) { g_test_suite = test_suite; g_argc = argc; g_argv = argv; g_write_perf_output = write_perf_output; g_export_perf_results_new_api = export_perf_results_new_api; + g_webrtc_test_metrics_output_path = webrtc_test_metrics_output_path; g_metrics_to_plot = std::move(metrics_to_plot); } diff --git a/third_party/libwebrtc/test/network/BUILD.gn b/third_party/libwebrtc/test/network/BUILD.gn index 71cf2d79f3c2..379f6048cdb2 100644 --- a/third_party/libwebrtc/test/network/BUILD.gn +++ b/third_party/libwebrtc/test/network/BUILD.gn @@ -76,6 +76,7 @@ rtc_library("emulated_network") { "../../rtc_base:threading", "../../rtc_base/memory:always_valid_pointer", "../../rtc_base/synchronization:mutex", + "../../rtc_base/system:no_unique_address", "../../rtc_base/task_utils:repeating_task", "../../system_wrappers", "../../test:scoped_key_value_config", diff --git a/third_party/libwebrtc/test/network/cross_traffic_unittest.cc b/third_party/libwebrtc/test/network/cross_traffic_unittest.cc index 0c1bb46302c2..36aff67bb263 100644 --- a/third_party/libwebrtc/test/network/cross_traffic_unittest.cc +++ b/third_party/libwebrtc/test/network/cross_traffic_unittest.cc @@ -53,6 +53,7 @@ struct TrafficCounterFixture { /*id=*/1, rtc::IPAddress(kTestIpAddress), EmulatedEndpointConfig(), + EmulatedNetworkStatsGatheringMode::kDefault, }, /*is_enabled=*/true, &task_queue_, &clock}; }; @@ -124,7 +125,8 @@ TEST(CrossTrafficTest, RandomWalkCrossTraffic) { } TEST(TcpMessageRouteTest, DeliveredOnLossyNetwork) { - NetworkEmulationManagerImpl net(TimeMode::kSimulated); + NetworkEmulationManagerImpl net(TimeMode::kSimulated, + EmulatedNetworkStatsGatheringMode::kDefault); BuiltInNetworkBehaviorConfig send; // 800 kbps means that the 100 kB message would be delivered in ca 1 second // under ideal conditions and no overhead. diff --git a/third_party/libwebrtc/test/network/emulated_network_manager.cc b/third_party/libwebrtc/test/network/emulated_network_manager.cc index a540b9d720e8..fa4037e5dbbb 100644 --- a/third_party/libwebrtc/test/network/emulated_network_manager.cc +++ b/third_party/libwebrtc/test/network/emulated_network_manager.cc @@ -84,15 +84,6 @@ void EmulatedNetworkManager::StopUpdating() { } } -void EmulatedNetworkManager::GetStats( - std::function)> stats_callback) - const { - task_queue_->PostTask([stats_callback, this]() { - stats_callback(std::make_unique( - endpoints_container_->GetStats())); - }); -} - void EmulatedNetworkManager::GetStats( std::function stats_callback) const { task_queue_->PostTask([stats_callback, this]() { diff --git a/third_party/libwebrtc/test/network/emulated_network_manager.h b/third_party/libwebrtc/test/network/emulated_network_manager.h index a53cf47ff39f..fb4ee1ee8560 100644 --- a/third_party/libwebrtc/test/network/emulated_network_manager.h +++ b/third_party/libwebrtc/test/network/emulated_network_manager.h @@ -58,8 +58,6 @@ class EmulatedNetworkManager : public rtc::NetworkManagerBase, std::vector endpoints() const override { return endpoints_container_->GetEndpoints(); } - void GetStats(std::function)> - stats_callback) const override; void GetStats( std::function stats_callback) const override; diff --git a/third_party/libwebrtc/test/network/feedback_generator.cc b/third_party/libwebrtc/test/network/feedback_generator.cc index 68fbcc02a5fe..e339fd87b0db 100644 --- a/third_party/libwebrtc/test/network/feedback_generator.cc +++ b/third_party/libwebrtc/test/network/feedback_generator.cc @@ -18,7 +18,7 @@ namespace webrtc { FeedbackGeneratorImpl::FeedbackGeneratorImpl( FeedbackGeneratorImpl::Config config) : conf_(config), - net_(TimeMode::kSimulated), + net_(TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault), send_link_{new SimulatedNetwork(conf_.send_link)}, ret_link_{new SimulatedNetwork(conf_.return_link)}, route_(this, diff --git a/third_party/libwebrtc/test/network/network_emulation.cc b/third_party/libwebrtc/test/network/network_emulation.cc index 0ceb9ab08f24..f1c9ca80dd33 100644 --- a/third_party/libwebrtc/test/network/network_emulation.cc +++ b/third_party/libwebrtc/test/network/network_emulation.cc @@ -17,8 +17,11 @@ #include "absl/types/optional.h" #include "api/numerics/samples_stats_counter.h" +#include "api/sequence_checker.h" #include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/test/network_emulation_manager.h" #include "api/units/data_size.h" +#include "api/units/time_delta.h" #include "rtc_base/logging.h" namespace webrtc { @@ -26,8 +29,9 @@ namespace { EmulatedNetworkOutgoingStats GetOverallOutgoingStats( const std::map& - outgoing_stats) { - EmulatedNetworkOutgoingStatsBuilder builder; + outgoing_stats, + EmulatedNetworkStatsGatheringMode mode) { + EmulatedNetworkOutgoingStatsBuilder builder(mode); for (const auto& entry : outgoing_stats) { builder.AddOutgoingStats(entry.second); } @@ -36,8 +40,9 @@ EmulatedNetworkOutgoingStats GetOverallOutgoingStats( EmulatedNetworkIncomingStats GetOverallIncomingStats( const std::map& - incoming_stats) { - EmulatedNetworkIncomingStatsBuilder builder; + incoming_stats, + EmulatedNetworkStatsGatheringMode mode) { + EmulatedNetworkIncomingStatsBuilder builder(mode); for (const auto& entry : incoming_stats) { builder.AddIncomingStats(entry.second); } @@ -46,14 +51,14 @@ EmulatedNetworkIncomingStats GetOverallIncomingStats( } // namespace -EmulatedNetworkOutgoingStatsBuilder::EmulatedNetworkOutgoingStatsBuilder() { +EmulatedNetworkOutgoingStatsBuilder::EmulatedNetworkOutgoingStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode) + : stats_gathering_mode_(stats_gathering_mode) { sequence_checker_.Detach(); } -void EmulatedNetworkOutgoingStatsBuilder::OnPacketSent( - Timestamp sent_time, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode) { +void EmulatedNetworkOutgoingStatsBuilder::OnPacketSent(Timestamp sent_time, + DataSize packet_size) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_CHECK_GE(packet_size, DataSize::Zero()); if (stats_.first_packet_sent_time.IsInfinite()) { @@ -63,7 +68,7 @@ void EmulatedNetworkOutgoingStatsBuilder::OnPacketSent( stats_.last_packet_sent_time = sent_time; stats_.packets_sent++; stats_.bytes_sent += packet_size; - if (mode == EmulatedEndpointConfig::StatsGatheringMode::kDebug) { + if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) { stats_.sent_packets_size.AddSample(packet_size.bytes()); } } @@ -89,25 +94,25 @@ EmulatedNetworkOutgoingStats EmulatedNetworkOutgoingStatsBuilder::Build() return stats_; } -EmulatedNetworkIncomingStatsBuilder::EmulatedNetworkIncomingStatsBuilder() { +EmulatedNetworkIncomingStatsBuilder::EmulatedNetworkIncomingStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode) + : stats_gathering_mode_(stats_gathering_mode) { sequence_checker_.Detach(); } void EmulatedNetworkIncomingStatsBuilder::OnPacketDropped( - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode) { + DataSize packet_size) { RTC_DCHECK_RUN_ON(&sequence_checker_); stats_.packets_discarded_no_receiver++; stats_.bytes_discarded_no_receiver += packet_size; - if (mode == EmulatedEndpointConfig::StatsGatheringMode::kDebug) { + if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) { stats_.packets_discarded_no_receiver_size.AddSample(packet_size.bytes()); } } void EmulatedNetworkIncomingStatsBuilder::OnPacketReceived( Timestamp received_time, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode) { + DataSize packet_size) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_CHECK_GE(packet_size, DataSize::Zero()); if (stats_.first_packet_received_time.IsInfinite()) { @@ -117,7 +122,7 @@ void EmulatedNetworkIncomingStatsBuilder::OnPacketReceived( stats_.last_packet_received_time = received_time; stats_.packets_received++; stats_.bytes_received += packet_size; - if (mode == EmulatedEndpointConfig::StatsGatheringMode::kDebug) { + if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) { stats_.received_packets_size.AddSample(packet_size.bytes()); } } @@ -147,46 +152,69 @@ EmulatedNetworkIncomingStats EmulatedNetworkIncomingStatsBuilder::Build() return stats_; } -EmulatedNetworkStatsBuilder::EmulatedNetworkStatsBuilder() { +EmulatedNetworkStatsBuilder::EmulatedNetworkStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode) + : stats_gathering_mode_(stats_gathering_mode) { sequence_checker_.Detach(); } EmulatedNetworkStatsBuilder::EmulatedNetworkStatsBuilder( - rtc::IPAddress local_ip) { + rtc::IPAddress local_ip, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) + : stats_gathering_mode_(stats_gathering_mode) { local_addresses_.push_back(local_ip); sequence_checker_.Detach(); } -void EmulatedNetworkStatsBuilder::OnPacketSent( - Timestamp queued_time, - Timestamp sent_time, - rtc::IPAddress destination_ip, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode) { +void EmulatedNetworkStatsBuilder::OnPacketSent(Timestamp queued_time, + Timestamp sent_time, + rtc::IPAddress destination_ip, + DataSize packet_size) { RTC_DCHECK_RUN_ON(&sequence_checker_); - if (mode == EmulatedEndpointConfig::StatsGatheringMode::kDebug) { + if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) { sent_packets_queue_wait_time_us_.AddSample((sent_time - queued_time).us()); } - outgoing_stats_per_destination_[destination_ip].OnPacketSent( - sent_time, packet_size, mode); + auto it = outgoing_stats_per_destination_.find(destination_ip); + if (it == outgoing_stats_per_destination_.end()) { + outgoing_stats_per_destination_ + .emplace(destination_ip, + std::make_unique( + stats_gathering_mode_)) + .first->second->OnPacketSent(sent_time, packet_size); + } else { + it->second->OnPacketSent(sent_time, packet_size); + } } -void EmulatedNetworkStatsBuilder::OnPacketDropped( - rtc::IPAddress source_ip, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode) { +void EmulatedNetworkStatsBuilder::OnPacketDropped(rtc::IPAddress source_ip, + DataSize packet_size) { RTC_DCHECK_RUN_ON(&sequence_checker_); - incoming_stats_per_source_[source_ip].OnPacketDropped(packet_size, mode); + auto it = incoming_stats_per_source_.find(source_ip); + if (it == incoming_stats_per_source_.end()) { + incoming_stats_per_source_ + .emplace(source_ip, + std::make_unique( + stats_gathering_mode_)) + .first->second->OnPacketDropped(packet_size); + } else { + it->second->OnPacketDropped(packet_size); + } } -void EmulatedNetworkStatsBuilder::OnPacketReceived( - Timestamp received_time, - rtc::IPAddress source_ip, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode) { +void EmulatedNetworkStatsBuilder::OnPacketReceived(Timestamp received_time, + rtc::IPAddress source_ip, + DataSize packet_size) { RTC_DCHECK_RUN_ON(&sequence_checker_); - incoming_stats_per_source_[source_ip].OnPacketReceived(received_time, - packet_size, mode); + auto it = incoming_stats_per_source_.find(source_ip); + if (it == incoming_stats_per_source_.end()) { + incoming_stats_per_source_ + .emplace(source_ip, + std::make_unique( + stats_gathering_mode_)) + .first->second->OnPacketReceived(received_time, packet_size); + } else { + it->second->OnPacketReceived(received_time, packet_size); + } } void EmulatedNetworkStatsBuilder::AddEmulatedNetworkStats( @@ -203,12 +231,30 @@ void EmulatedNetworkStatsBuilder::AddEmulatedNetworkStats( // Add outgoing stats from other endpoints to the builder. for (const auto& entry : stats.outgoing_stats_per_destination) { - outgoing_stats_per_destination_[entry.first].AddOutgoingStats(entry.second); + auto it = outgoing_stats_per_destination_.find(entry.first); + if (it == outgoing_stats_per_destination_.end()) { + outgoing_stats_per_destination_ + .emplace(entry.first, + std::make_unique( + stats_gathering_mode_)) + .first->second->AddOutgoingStats(entry.second); + } else { + it->second->AddOutgoingStats(entry.second); + } } // Add incoming stats from other endpoints to the builder. for (const auto& entry : stats.incoming_stats_per_source) { - incoming_stats_per_source_[entry.first].AddIncomingStats(entry.second); + auto it = incoming_stats_per_source_.find(entry.first); + if (it == incoming_stats_per_source_.end()) { + incoming_stats_per_source_ + .emplace(entry.first, + std::make_unique( + stats_gathering_mode_)) + .first->second->AddIncomingStats(entry.second); + } else { + it->second->AddIncomingStats(entry.second); + } } } @@ -216,21 +262,53 @@ EmulatedNetworkStats EmulatedNetworkStatsBuilder::Build() const { RTC_DCHECK_RUN_ON(&sequence_checker_); std::map outgoing_stats; for (const auto& entry : outgoing_stats_per_destination_) { - outgoing_stats.emplace(entry.first, entry.second.Build()); + outgoing_stats.emplace(entry.first, entry.second->Build()); } std::map incoming_stats; for (const auto& entry : incoming_stats_per_source_) { - incoming_stats.emplace(entry.first, entry.second.Build()); + incoming_stats.emplace(entry.first, entry.second->Build()); } return EmulatedNetworkStats{ .local_addresses = local_addresses_, - .overall_outgoing_stats = GetOverallOutgoingStats(outgoing_stats), - .overall_incoming_stats = GetOverallIncomingStats(incoming_stats), + .overall_outgoing_stats = + GetOverallOutgoingStats(outgoing_stats, stats_gathering_mode_), + .overall_incoming_stats = + GetOverallIncomingStats(incoming_stats, stats_gathering_mode_), .outgoing_stats_per_destination = std::move(outgoing_stats), .incoming_stats_per_source = std::move(incoming_stats), .sent_packets_queue_wait_time_us = sent_packets_queue_wait_time_us_}; } +EmulatedNetworkNodeStatsBuilder::EmulatedNetworkNodeStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode) + : stats_gathering_mode_(stats_gathering_mode) { + sequence_checker_.Detach(); +} + +void EmulatedNetworkNodeStatsBuilder::AddPacketTransportTime( + TimeDelta time, + size_t packet_size) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) { + stats_.packet_transport_time.AddSample(time.ms()); + stats_.size_to_packet_transport_time.AddSample(packet_size / + time.ms()); + } +} + +void EmulatedNetworkNodeStatsBuilder::AddEmulatedNetworkNodeStats( + const EmulatedNetworkNodeStats& stats) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + stats_.packet_transport_time.AddSamples(stats.packet_transport_time); + stats_.size_to_packet_transport_time.AddSamples( + stats.size_to_packet_transport_time); +} + +EmulatedNetworkNodeStats EmulatedNetworkNodeStatsBuilder::Build() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return stats_; +} + void LinkEmulation::OnPacketReceived(EmulatedIpPacket packet) { task_queue_->PostTask([this, packet = std::move(packet)]() mutable { RTC_DCHECK_RUN_ON(task_queue_); @@ -239,7 +317,10 @@ void LinkEmulation::OnPacketReceived(EmulatedIpPacket packet) { bool sent = network_behavior_->EnqueuePacket(PacketInFlightInfo( packet.ip_packet_size(), packet.arrival_time.us(), packet_id)); if (sent) { - packets_.emplace_back(StoredPacket{packet_id, std::move(packet), false}); + packets_.emplace_back(StoredPacket{.id = packet_id, + .sent_time = clock_->CurrentTime(), + .packet = std::move(packet), + .removed = false}); } if (process_task_.Running()) return; @@ -268,6 +349,11 @@ void LinkEmulation::OnPacketReceived(EmulatedIpPacket packet) { }); } +EmulatedNetworkNodeStats LinkEmulation::stats() const { + RTC_DCHECK_RUN_ON(task_queue_); + return stats_builder_.Build(); +} + void LinkEmulation::Process(Timestamp at_time) { std::vector delivery_infos = network_behavior_->DequeueDeliverablePackets(at_time.us()); @@ -282,6 +368,9 @@ void LinkEmulation::Process(Timestamp at_time) { RTC_CHECK(packet); RTC_DCHECK(!packet->removed); packet->removed = true; + stats_builder_.AddPacketTransportTime( + clock_->CurrentTime() - packet->sent_time, + packet->packet.ip_packet_size()); if (delivery_info.receive_time_us != PacketDeliveryInfo::kNotReceived) { packet->packet.arrival_time = @@ -371,14 +460,23 @@ void NetworkRouterNode::SetFilter( EmulatedNetworkNode::EmulatedNetworkNode( Clock* clock, rtc::TaskQueue* task_queue, - std::unique_ptr network_behavior) + std::unique_ptr network_behavior, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) : router_(task_queue), - link_(clock, task_queue, std::move(network_behavior), &router_) {} + link_(clock, + task_queue, + std::move(network_behavior), + &router_, + stats_gathering_mode) {} void EmulatedNetworkNode::OnPacketReceived(EmulatedIpPacket packet) { link_.OnPacketReceived(std::move(packet)); } +EmulatedNetworkNodeStats EmulatedNetworkNode::stats() const { + return link_.stats(); +} + void EmulatedNetworkNode::CreateRoute( const rtc::IPAddress& receiver_ip, std::vector nodes, @@ -397,12 +495,14 @@ void EmulatedNetworkNode::ClearRoute(const rtc::IPAddress& receiver_ip, EmulatedNetworkNode::~EmulatedNetworkNode() = default; -EmulatedEndpointImpl::Options::Options(uint64_t id, - const rtc::IPAddress& ip, - const EmulatedEndpointConfig& config) +EmulatedEndpointImpl::Options::Options( + uint64_t id, + const rtc::IPAddress& ip, + const EmulatedEndpointConfig& config, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) : id(id), ip(ip), - stats_gathering_mode(config.stats_gathering_mode), + stats_gathering_mode(stats_gathering_mode), type(config.type), allow_send_packet_with_different_source_ip( config.allow_send_packet_with_different_source_ip), @@ -420,7 +520,7 @@ EmulatedEndpointImpl::EmulatedEndpointImpl(const Options& options, task_queue_(task_queue), router_(task_queue_), next_port_(kFirstEphemeralPort), - stats_builder_(options_.ip) { + stats_builder_(options_.ip, options_.stats_gathering_mode) { constexpr int kIPv4NetworkPrefixLength = 24; constexpr int kIPv6NetworkPrefixLength = 64; @@ -459,8 +559,7 @@ void EmulatedEndpointImpl::SendPacket(const rtc::SocketAddress& from, RTC_DCHECK_RUN_ON(task_queue_); stats_builder_.OnPacketSent(packet.arrival_time, clock_->CurrentTime(), packet.to.ipaddr(), - DataSize::Bytes(packet.ip_packet_size()), - options_.stats_gathering_mode); + DataSize::Bytes(packet.ip_packet_size())); if (packet.to.ipaddr() == options_.ip) { OnPacketReceived(std::move(packet)); @@ -566,8 +665,7 @@ void EmulatedEndpointImpl::OnPacketReceived(EmulatedIpPacket packet) { } MutexLock lock(&receiver_lock_); stats_builder_.OnPacketReceived(clock_->CurrentTime(), packet.from.ipaddr(), - DataSize::Bytes(packet.ip_packet_size()), - options_.stats_gathering_mode); + DataSize::Bytes(packet.ip_packet_size())); auto it = port_to_receiver_.find(packet.to.port()); if (it == port_to_receiver_.end()) { if (default_receiver_.has_value()) { @@ -582,8 +680,7 @@ void EmulatedEndpointImpl::OnPacketReceived(EmulatedIpPacket packet) { << " on port " << packet.to.port() << ". Packet source: " << packet.from.ToString(); stats_builder_.OnPacketDropped(packet.from.ipaddr(), - DataSize::Bytes(packet.ip_packet_size()), - options_.stats_gathering_mode); + DataSize::Bytes(packet.ip_packet_size())); return; } // Endpoint holds lock during packet processing to ensure that a call to @@ -618,10 +715,6 @@ EmulatedNetworkStats EmulatedEndpointImpl::stats() const { return stats_builder_.Build(); } -EndpointsContainer::EndpointsContainer( - const std::vector& endpoints) - : endpoints_(endpoints) {} - EmulatedEndpointImpl* EndpointsContainer::LookupByLocalAddress( const rtc::IPAddress& local_ip) const { for (auto* endpoint : endpoints_) { @@ -633,6 +726,11 @@ EmulatedEndpointImpl* EndpointsContainer::LookupByLocalAddress( RTC_CHECK(false) << "No network found for address" << local_ip.ToString(); } +EndpointsContainer::EndpointsContainer( + const std::vector& endpoints, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) + : endpoints_(endpoints), stats_gathering_mode_(stats_gathering_mode) {} + bool EndpointsContainer::HasEndpoint(EmulatedEndpointImpl* endpoint) const { for (auto* e : endpoints_) { if (e->GetId() == endpoint->GetId()) { @@ -659,7 +757,7 @@ std::vector EndpointsContainer::GetEndpoints() const { } EmulatedNetworkStats EndpointsContainer::GetStats() const { - EmulatedNetworkStatsBuilder stats_builder; + EmulatedNetworkStatsBuilder stats_builder(stats_gathering_mode_); for (auto* endpoint : endpoints_) { stats_builder.AddEmulatedNetworkStats(endpoint->stats()); } diff --git a/third_party/libwebrtc/test/network/network_emulation.h b/third_party/libwebrtc/test/network/network_emulation.h index 4c99d85d24bd..dffabafa7cce 100644 --- a/third_party/libwebrtc/test/network/network_emulation.h +++ b/third_party/libwebrtc/test/network/network_emulation.h @@ -23,14 +23,17 @@ #include "api/array_view.h" #include "api/numerics/samples_stats_counter.h" #include "api/sequence_checker.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/network_emulation_manager.h" #include "api/test/simulated_network.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/network.h" #include "rtc_base/network_constants.h" #include "rtc_base/socket_address.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" @@ -42,19 +45,19 @@ namespace webrtc { // single thread. It may be created on another thread. class EmulatedNetworkOutgoingStatsBuilder { public: - EmulatedNetworkOutgoingStatsBuilder(); + explicit EmulatedNetworkOutgoingStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode); - void OnPacketSent(Timestamp sent_time, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode); + void OnPacketSent(Timestamp sent_time, DataSize packet_size); void AddOutgoingStats(const EmulatedNetworkOutgoingStats& stats); EmulatedNetworkOutgoingStats Build() const; private: - RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; + const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; EmulatedNetworkOutgoingStats stats_ RTC_GUARDED_BY(sequence_checker_); }; @@ -62,14 +65,12 @@ class EmulatedNetworkOutgoingStatsBuilder { // single thread. It may be created on another thread. class EmulatedNetworkIncomingStatsBuilder { public: - EmulatedNetworkIncomingStatsBuilder(); + explicit EmulatedNetworkIncomingStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode); - void OnPacketDropped(DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode); + void OnPacketDropped(DataSize packet_size); - void OnPacketReceived(Timestamp received_time, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode); + void OnPacketReceived(Timestamp received_time, DataSize packet_size); // Adds stats collected from another endpoints to the builder. void AddIncomingStats(const EmulatedNetworkIncomingStats& stats); @@ -77,8 +78,9 @@ class EmulatedNetworkIncomingStatsBuilder { EmulatedNetworkIncomingStats Build() const; private: - RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; + const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; EmulatedNetworkIncomingStats stats_ RTC_GUARDED_BY(sequence_checker_); }; @@ -86,55 +88,80 @@ class EmulatedNetworkIncomingStatsBuilder { // thread. It may be created on another thread. class EmulatedNetworkStatsBuilder { public: - EmulatedNetworkStatsBuilder(); - explicit EmulatedNetworkStatsBuilder(rtc::IPAddress local_ip); + explicit EmulatedNetworkStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode); + explicit EmulatedNetworkStatsBuilder( + rtc::IPAddress local_ip, + EmulatedNetworkStatsGatheringMode stats_gathering_mode); void OnPacketSent(Timestamp queued_time, Timestamp sent_time, rtc::IPAddress destination_ip, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode); + DataSize packet_size); - void OnPacketDropped(rtc::IPAddress source_ip, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode); + void OnPacketDropped(rtc::IPAddress source_ip, DataSize packet_size); void OnPacketReceived(Timestamp received_time, rtc::IPAddress source_ip, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode); + DataSize packet_size); void AddEmulatedNetworkStats(const EmulatedNetworkStats& stats); EmulatedNetworkStats Build() const; private: - RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; + const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; std::vector local_addresses_ RTC_GUARDED_BY(sequence_checker_); SamplesStatsCounter sent_packets_queue_wait_time_us_; - std::map + std::map> outgoing_stats_per_destination_ RTC_GUARDED_BY(sequence_checker_); - std::map + std::map> incoming_stats_per_source_ RTC_GUARDED_BY(sequence_checker_); }; +// All methods of EmulatedNetworkNodeStatsBuilder have to be used on a +// single thread. It may be created on another thread. +class EmulatedNetworkNodeStatsBuilder { + public: + explicit EmulatedNetworkNodeStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode); + + void AddPacketTransportTime(TimeDelta time, size_t packet_size); + + void AddEmulatedNetworkNodeStats(const EmulatedNetworkNodeStats& stats); + + EmulatedNetworkNodeStats Build() const; + + private: + const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; + + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; + EmulatedNetworkNodeStats stats_ RTC_GUARDED_BY(sequence_checker_); +}; + class LinkEmulation : public EmulatedNetworkReceiverInterface { public: LinkEmulation(Clock* clock, rtc::TaskQueue* task_queue, std::unique_ptr network_behavior, - EmulatedNetworkReceiverInterface* receiver) + EmulatedNetworkReceiverInterface* receiver, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) : clock_(clock), task_queue_(task_queue), network_behavior_(std::move(network_behavior)), - receiver_(receiver) {} + receiver_(receiver), + stats_builder_(stats_gathering_mode) {} void OnPacketReceived(EmulatedIpPacket packet) override; + EmulatedNetworkNodeStats stats() const; + private: struct StoredPacket { uint64_t id; + Timestamp sent_time; EmulatedIpPacket packet; bool removed; }; @@ -145,9 +172,12 @@ class LinkEmulation : public EmulatedNetworkReceiverInterface { const std::unique_ptr network_behavior_ RTC_GUARDED_BY(task_queue_); EmulatedNetworkReceiverInterface* const receiver_; + RepeatingTaskHandle process_task_ RTC_GUARDED_BY(task_queue_); std::deque packets_ RTC_GUARDED_BY(task_queue_); uint64_t next_packet_id_ RTC_GUARDED_BY(task_queue_) = 1; + + EmulatedNetworkNodeStatsBuilder stats_builder_ RTC_GUARDED_BY(task_queue_); }; // Represents a component responsible for routing packets based on their IP @@ -195,7 +225,8 @@ class EmulatedNetworkNode : public EmulatedNetworkReceiverInterface { EmulatedNetworkNode( Clock* clock, rtc::TaskQueue* task_queue, - std::unique_ptr network_behavior); + std::unique_ptr network_behavior, + EmulatedNetworkStatsGatheringMode stats_gathering_mode); ~EmulatedNetworkNode() override; EmulatedNetworkNode(const EmulatedNetworkNode&) = delete; @@ -205,6 +236,7 @@ class EmulatedNetworkNode : public EmulatedNetworkReceiverInterface { LinkEmulation* link() { return &link_; } NetworkRouterNode* router() { return &router_; } + EmulatedNetworkNodeStats stats() const; // Creates a route for the given receiver_ip over all the given nodes to the // given receiver. @@ -228,13 +260,14 @@ class EmulatedEndpointImpl : public EmulatedEndpoint { struct Options { Options(uint64_t id, const rtc::IPAddress& ip, - const EmulatedEndpointConfig& config); + const EmulatedEndpointConfig& config, + EmulatedNetworkStatsGatheringMode stats_gathering_mode); // TODO(titovartem) check if we can remove id. uint64_t id; // Endpoint local IP address. rtc::IPAddress ip; - EmulatedEndpointConfig::StatsGatheringMode stats_gathering_mode; + EmulatedNetworkStatsGatheringMode stats_gathering_mode; rtc::AdapterType type; // Allow endpoint to send packets specifying source IP address different to // the current endpoint IP address. If false endpoint will crash if attempt @@ -343,8 +376,8 @@ class EmulatedRoute { // This object is immutable and so thread safe. class EndpointsContainer { public: - explicit EndpointsContainer( - const std::vector& endpoints); + EndpointsContainer(const std::vector& endpoints, + EmulatedNetworkStatsGatheringMode stats_gathering_mode); EmulatedEndpointImpl* LookupByLocalAddress( const rtc::IPAddress& local_ip) const; @@ -357,6 +390,7 @@ class EndpointsContainer { private: const std::vector endpoints_; + const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; }; template diff --git a/third_party/libwebrtc/test/network/network_emulation_manager.cc b/third_party/libwebrtc/test/network/network_emulation_manager.cc index 5b342490b4ca..97c0bc1ba8db 100644 --- a/third_party/libwebrtc/test/network/network_emulation_manager.cc +++ b/third_party/libwebrtc/test/network/network_emulation_manager.cc @@ -16,7 +16,6 @@ #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "call/simulated_network.h" -#include "rtc_base/fake_network.h" #include "test/network/emulated_turn_server.h" #include "test/network/traffic_route.h" #include "test/time_controller/real_time_controller.h" @@ -45,8 +44,11 @@ std::unique_ptr CreateTimeController(TimeMode mode) { } } // namespace -NetworkEmulationManagerImpl::NetworkEmulationManagerImpl(TimeMode mode) +NetworkEmulationManagerImpl::NetworkEmulationManagerImpl( + TimeMode mode, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) : time_mode_(mode), + stats_gathering_mode_(stats_gathering_mode), time_controller_(CreateTimeController(mode)), clock_(time_controller_->GetClock()), next_node_id_(1), @@ -74,7 +76,7 @@ EmulatedNetworkNode* NetworkEmulationManagerImpl::CreateEmulatedNode( EmulatedNetworkNode* NetworkEmulationManagerImpl::CreateEmulatedNode( std::unique_ptr network_behavior) { auto node = std::make_unique( - clock_, &task_queue_, std::move(network_behavior)); + clock_, &task_queue_, std::move(network_behavior), stats_gathering_mode_); EmulatedNetworkNode* out = node.get(); task_queue_.PostTask([this, node = std::move(node)]() mutable { network_nodes_.push_back(std::move(node)); @@ -107,7 +109,8 @@ EmulatedEndpointImpl* NetworkEmulationManagerImpl::CreateEndpoint( bool res = used_ip_addresses_.insert(*ip).second; RTC_CHECK(res) << "IP=" << ip->ToString() << " already in use"; auto node = std::make_unique( - EmulatedEndpointImpl::Options(next_node_id_++, *ip, config), + EmulatedEndpointImpl::Options(next_node_id_++, *ip, config, + stats_gathering_mode_), config.start_as_enabled, &task_queue_, clock_); EmulatedEndpointImpl* out = node.get(); endpoints_.push_back(std::move(node)); @@ -279,8 +282,8 @@ NetworkEmulationManagerImpl::CreateEmulatedNetworkManagerInterface( for (EmulatedEndpoint* endpoint : endpoints) { endpoint_impls.push_back(static_cast(endpoint)); } - auto endpoints_container = - std::make_unique(endpoint_impls); + auto endpoints_container = std::make_unique( + endpoint_impls, stats_gathering_mode_); auto network_manager = std::make_unique( time_controller_.get(), &task_queue_, endpoints_container.get()); for (auto* endpoint : endpoints) { @@ -300,28 +303,12 @@ NetworkEmulationManagerImpl::CreateEmulatedNetworkManagerInterface( return out; } -void NetworkEmulationManagerImpl::GetStats( - rtc::ArrayView endpoints, - std::function)> stats_callback) { - task_queue_.PostTask([endpoints, stats_callback]() { - EmulatedNetworkStatsBuilder stats_builder; - for (auto* endpoint : endpoints) { - // It's safe to cast here because EmulatedEndpointImpl can be the only - // implementation of EmulatedEndpoint, because only it has access to - // EmulatedEndpoint constructor. - auto endpoint_impl = static_cast(endpoint); - stats_builder.AddEmulatedNetworkStats(endpoint_impl->stats()); - } - stats_callback( - std::make_unique(stats_builder.Build())); - }); -} - void NetworkEmulationManagerImpl::GetStats( rtc::ArrayView endpoints, std::function stats_callback) { - task_queue_.PostTask([endpoints, stats_callback]() { - EmulatedNetworkStatsBuilder stats_builder; + task_queue_.PostTask([endpoints, stats_callback, + stats_gathering_mode = stats_gathering_mode_]() { + EmulatedNetworkStatsBuilder stats_builder(stats_gathering_mode); for (auto* endpoint : endpoints) { // It's safe to cast here because EmulatedEndpointImpl can be the only // implementation of EmulatedEndpoint, because only it has access to @@ -333,6 +320,19 @@ void NetworkEmulationManagerImpl::GetStats( }); } +void NetworkEmulationManagerImpl::GetStats( + rtc::ArrayView nodes, + std::function stats_callback) { + task_queue_.PostTask( + [nodes, stats_callback, stats_gathering_mode = stats_gathering_mode_]() { + EmulatedNetworkNodeStatsBuilder stats_builder(stats_gathering_mode); + for (auto* node : nodes) { + stats_builder.AddEmulatedNetworkNodeStats(node->stats()); + } + stats_callback(stats_builder.Build()); + }); +} + absl::optional NetworkEmulationManagerImpl::GetNextIPv4Address() { uint32_t addresses_count = kMaxIPv4Address - kMinIPv4Address; diff --git a/third_party/libwebrtc/test/network/network_emulation_manager.h b/third_party/libwebrtc/test/network/network_emulation_manager.h index 163137ab31f3..29debca693db 100644 --- a/third_party/libwebrtc/test/network/network_emulation_manager.h +++ b/third_party/libwebrtc/test/network/network_emulation_manager.h @@ -23,16 +23,12 @@ #include "api/test/time_controller.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "rtc_base/logging.h" -#include "rtc_base/network.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/repeating_task.h" -#include "rtc_base/thread.h" #include "system_wrappers/include/clock.h" #include "test/network/cross_traffic.h" #include "test/network/emulated_network_manager.h" #include "test/network/emulated_turn_server.h" -#include "test/network/fake_network_socket_server.h" #include "test/network/network_emulation.h" namespace webrtc { @@ -40,7 +36,9 @@ namespace test { class NetworkEmulationManagerImpl : public NetworkEmulationManager { public: - explicit NetworkEmulationManagerImpl(TimeMode mode); + NetworkEmulationManagerImpl( + TimeMode mode, + EmulatedNetworkStatsGatheringMode stats_gathering_mode); ~NetworkEmulationManagerImpl(); EmulatedNetworkNode* CreateEmulatedNode(BuiltInNetworkBehaviorConfig config, @@ -81,13 +79,14 @@ class NetworkEmulationManagerImpl : public NetworkEmulationManager { EmulatedNetworkManagerInterface* CreateEmulatedNetworkManagerInterface( const std::vector& endpoints) override; - void GetStats(rtc::ArrayView endpoints, - std::function)> - stats_callback) override; void GetStats( rtc::ArrayView endpoints, std::function stats_callback) override; + void GetStats( + rtc::ArrayView nodes, + std::function stats_callback) override; + TimeController* time_controller() override { return time_controller_.get(); } TimeMode time_mode() const override { return time_mode_; } @@ -104,6 +103,7 @@ class NetworkEmulationManagerImpl : public NetworkEmulationManager { absl::optional GetNextIPv4Address(); const TimeMode time_mode_; + const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; const std::unique_ptr time_controller_; Clock* const clock_; int next_node_id_; diff --git a/third_party/libwebrtc/test/network/network_emulation_pc_unittest.cc b/third_party/libwebrtc/test/network/network_emulation_pc_unittest.cc index 0519dd816d93..51a45a8234b7 100644 --- a/third_party/libwebrtc/test/network/network_emulation_pc_unittest.cc +++ b/third_party/libwebrtc/test/network/network_emulation_pc_unittest.cc @@ -118,7 +118,8 @@ TEST(NetworkEmulationManagerPCTest, Run) { signaling_thread->Start(); // Setup emulated network - NetworkEmulationManagerImpl emulation(TimeMode::kRealTime); + NetworkEmulationManagerImpl emulation( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); EmulatedNetworkNode* alice_node = emulation.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); @@ -209,7 +210,8 @@ TEST(NetworkEmulationManagerPCTest, RunTURN) { signaling_thread->Start(); // Setup emulated network - NetworkEmulationManagerImpl emulation(TimeMode::kRealTime); + NetworkEmulationManagerImpl emulation( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); EmulatedNetworkNode* alice_node = emulation.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); diff --git a/third_party/libwebrtc/test/network/network_emulation_unittest.cc b/third_party/libwebrtc/test/network/network_emulation_unittest.cc index 49ba88d62c0c..2e67a5a00a4f 100644 --- a/third_party/libwebrtc/test/network/network_emulation_unittest.cc +++ b/third_party/libwebrtc/test/network/network_emulation_unittest.cc @@ -142,7 +142,8 @@ class NetworkEmulationManagerThreeNodesRoutingTest : public ::testing::Test { MockReceiver r_e1_e3_; MockReceiver r_e3_e1_; - NetworkEmulationManagerImpl emulation_{TimeMode::kRealTime}; + NetworkEmulationManagerImpl emulation_{ + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault}; EmulatedEndpoint* e1_; EmulatedEndpoint* e2_; EmulatedEndpoint* e3_; @@ -159,7 +160,8 @@ EmulatedNetworkNode* CreateEmulatedNodeWithDefaultBuiltInConfig( using ::testing::_; TEST(NetworkEmulationManagerTest, GeneratedIpv4AddressDoesNotCollide) { - NetworkEmulationManagerImpl network_manager(TimeMode::kRealTime); + NetworkEmulationManagerImpl network_manager( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); std::set ips; EmulatedEndpointConfig config; config.generated_ip_family = EmulatedEndpointConfig::IpAddressFamily::kIpv4; @@ -172,7 +174,8 @@ TEST(NetworkEmulationManagerTest, GeneratedIpv4AddressDoesNotCollide) { } TEST(NetworkEmulationManagerTest, GeneratedIpv6AddressDoesNotCollide) { - NetworkEmulationManagerImpl network_manager(TimeMode::kRealTime); + NetworkEmulationManagerImpl network_manager( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); std::set ips; EmulatedEndpointConfig config; config.generated_ip_family = EmulatedEndpointConfig::IpAddressFamily::kIpv6; @@ -185,7 +188,8 @@ TEST(NetworkEmulationManagerTest, GeneratedIpv6AddressDoesNotCollide) { } TEST(NetworkEmulationManagerTest, Run) { - NetworkEmulationManagerImpl network_manager(TimeMode::kRealTime); + NetworkEmulationManagerImpl network_manager( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); @@ -251,19 +255,19 @@ TEST(NetworkEmulationManagerTest, Run) { const int64_t single_packet_size = data.size() + kOverheadIpv4Udp; std::atomic received_stats_count{0}; - nt1->GetStats([&](std::unique_ptr st) { - EXPECT_EQ(st->PacketsSent(), 2000l); - EXPECT_EQ(st->BytesSent().bytes(), single_packet_size * 2000l); - EXPECT_THAT(st->local_addresses, + nt1->GetStats([&](EmulatedNetworkStats st) { + EXPECT_EQ(st.PacketsSent(), 2000l); + EXPECT_EQ(st.BytesSent().bytes(), single_packet_size * 2000l); + EXPECT_THAT(st.local_addresses, ElementsAreArray({alice_endpoint->GetPeerLocalAddress()})); - EXPECT_EQ(st->PacketsReceived(), 2000l); - EXPECT_EQ(st->BytesReceived().bytes(), single_packet_size * 2000l); - EXPECT_EQ(st->PacketsDiscardedNoReceiver(), 0l); - EXPECT_EQ(st->BytesDiscardedNoReceiver().bytes(), 0l); + EXPECT_EQ(st.PacketsReceived(), 2000l); + EXPECT_EQ(st.BytesReceived().bytes(), single_packet_size * 2000l); + EXPECT_EQ(st.PacketsDiscardedNoReceiver(), 0l); + EXPECT_EQ(st.BytesDiscardedNoReceiver().bytes(), 0l); rtc::IPAddress bob_ip = bob_endpoint->GetPeerLocalAddress(); std::map source_st = - st->incoming_stats_per_source; + st.incoming_stats_per_source; ASSERT_EQ(source_st.size(), 1lu); EXPECT_EQ(source_st.at(bob_ip).packets_received, 2000l); EXPECT_EQ(source_st.at(bob_ip).bytes_received.bytes(), @@ -272,17 +276,17 @@ TEST(NetworkEmulationManagerTest, Run) { EXPECT_EQ(source_st.at(bob_ip).bytes_discarded_no_receiver.bytes(), 0l); std::map dest_st = - st->outgoing_stats_per_destination; + st.outgoing_stats_per_destination; ASSERT_EQ(dest_st.size(), 1lu); EXPECT_EQ(dest_st.at(bob_ip).packets_sent, 2000l); EXPECT_EQ(dest_st.at(bob_ip).bytes_sent.bytes(), single_packet_size * 2000l); // No debug stats are collected by default. - EXPECT_TRUE(st->SentPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(st->sent_packets_queue_wait_time_us.IsEmpty()); - EXPECT_TRUE(st->ReceivedPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(st->PacketsDiscardedNoReceiverSizeCounter().IsEmpty()); + EXPECT_TRUE(st.SentPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st.sent_packets_queue_wait_time_us.IsEmpty()); + EXPECT_TRUE(st.ReceivedPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st.PacketsDiscardedNoReceiverSizeCounter().IsEmpty()); EXPECT_TRUE(dest_st.at(bob_ip).sent_packets_size.IsEmpty()); EXPECT_TRUE(source_st.at(bob_ip).received_packets_size.IsEmpty()); EXPECT_TRUE( @@ -290,22 +294,22 @@ TEST(NetworkEmulationManagerTest, Run) { received_stats_count++; }); - nt2->GetStats([&](std::unique_ptr st) { - EXPECT_EQ(st->PacketsSent(), 2000l); - EXPECT_EQ(st->BytesSent().bytes(), single_packet_size * 2000l); - EXPECT_THAT(st->local_addresses, + nt2->GetStats([&](EmulatedNetworkStats st) { + EXPECT_EQ(st.PacketsSent(), 2000l); + EXPECT_EQ(st.BytesSent().bytes(), single_packet_size * 2000l); + EXPECT_THAT(st.local_addresses, ElementsAreArray({bob_endpoint->GetPeerLocalAddress()})); - EXPECT_EQ(st->PacketsReceived(), 2000l); - EXPECT_EQ(st->BytesReceived().bytes(), single_packet_size * 2000l); - EXPECT_EQ(st->PacketsDiscardedNoReceiver(), 0l); - EXPECT_EQ(st->BytesDiscardedNoReceiver().bytes(), 0l); - EXPECT_GT(st->FirstReceivedPacketSize(), DataSize::Zero()); - EXPECT_TRUE(st->FirstPacketReceivedTime().IsFinite()); - EXPECT_TRUE(st->LastPacketReceivedTime().IsFinite()); + EXPECT_EQ(st.PacketsReceived(), 2000l); + EXPECT_EQ(st.BytesReceived().bytes(), single_packet_size * 2000l); + EXPECT_EQ(st.PacketsDiscardedNoReceiver(), 0l); + EXPECT_EQ(st.BytesDiscardedNoReceiver().bytes(), 0l); + EXPECT_GT(st.FirstReceivedPacketSize(), DataSize::Zero()); + EXPECT_TRUE(st.FirstPacketReceivedTime().IsFinite()); + EXPECT_TRUE(st.LastPacketReceivedTime().IsFinite()); rtc::IPAddress alice_ip = alice_endpoint->GetPeerLocalAddress(); std::map source_st = - st->incoming_stats_per_source; + st.incoming_stats_per_source; ASSERT_EQ(source_st.size(), 1lu); EXPECT_EQ(source_st.at(alice_ip).packets_received, 2000l); EXPECT_EQ(source_st.at(alice_ip).bytes_received.bytes(), @@ -314,17 +318,17 @@ TEST(NetworkEmulationManagerTest, Run) { EXPECT_EQ(source_st.at(alice_ip).bytes_discarded_no_receiver.bytes(), 0l); std::map dest_st = - st->outgoing_stats_per_destination; + st.outgoing_stats_per_destination; ASSERT_EQ(dest_st.size(), 1lu); EXPECT_EQ(dest_st.at(alice_ip).packets_sent, 2000l); EXPECT_EQ(dest_st.at(alice_ip).bytes_sent.bytes(), single_packet_size * 2000l); // No debug stats are collected by default. - EXPECT_TRUE(st->SentPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(st->sent_packets_queue_wait_time_us.IsEmpty()); - EXPECT_TRUE(st->ReceivedPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(st->PacketsDiscardedNoReceiverSizeCounter().IsEmpty()); + EXPECT_TRUE(st.SentPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st.sent_packets_queue_wait_time_us.IsEmpty()); + EXPECT_TRUE(st.ReceivedPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st.PacketsDiscardedNoReceiverSizeCounter().IsEmpty()); EXPECT_TRUE(dest_st.at(alice_ip).sent_packets_size.IsEmpty()); EXPECT_TRUE(source_st.at(alice_ip).received_packets_size.IsEmpty()); EXPECT_TRUE( @@ -338,17 +342,15 @@ TEST(NetworkEmulationManagerTest, Run) { } TEST(NetworkEmulationManagerTest, DebugStatsCollectedInDebugMode) { - NetworkEmulationManagerImpl network_manager(TimeMode::kSimulated); + NetworkEmulationManagerImpl network_manager( + TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDebug); EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); EmulatedNetworkNode* bob_node = network_manager.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); - EmulatedEndpointConfig debug_config; - debug_config.stats_gathering_mode = - EmulatedEndpointConfig::StatsGatheringMode::kDebug; EmulatedEndpoint* alice_endpoint = - network_manager.CreateEndpoint(debug_config); + network_manager.CreateEndpoint(EmulatedEndpointConfig()); EmulatedEndpoint* bob_endpoint = network_manager.CreateEndpoint(EmulatedEndpointConfig()); network_manager.CreateRoute(alice_endpoint, {alice_node}, bob_endpoint); @@ -407,23 +409,22 @@ TEST(NetworkEmulationManagerTest, DebugStatsCollectedInDebugMode) { const int64_t single_packet_size = data.size() + kOverheadIpv4Udp; std::atomic received_stats_count{0}; - nt1->GetStats([&](std::unique_ptr st) { + nt1->GetStats([&](EmulatedNetworkStats st) { rtc::IPAddress bob_ip = bob_endpoint->GetPeerLocalAddress(); std::map source_st = - st->incoming_stats_per_source; + st.incoming_stats_per_source; ASSERT_EQ(source_st.size(), 1lu); std::map dest_st = - st->outgoing_stats_per_destination; + st.outgoing_stats_per_destination; ASSERT_EQ(dest_st.size(), 1lu); // No debug stats are collected by default. - EXPECT_EQ(st->SentPacketsSizeCounter().NumSamples(), 2000l); - EXPECT_EQ(st->ReceivedPacketsSizeCounter().GetAverage(), - single_packet_size); - EXPECT_EQ(st->sent_packets_queue_wait_time_us.NumSamples(), 2000l); - EXPECT_LT(st->sent_packets_queue_wait_time_us.GetMax(), 1); - EXPECT_TRUE(st->PacketsDiscardedNoReceiverSizeCounter().IsEmpty()); + EXPECT_EQ(st.SentPacketsSizeCounter().NumSamples(), 2000l); + EXPECT_EQ(st.ReceivedPacketsSizeCounter().GetAverage(), single_packet_size); + EXPECT_EQ(st.sent_packets_queue_wait_time_us.NumSamples(), 2000l); + EXPECT_LT(st.sent_packets_queue_wait_time_us.GetMax(), 1); + EXPECT_TRUE(st.PacketsDiscardedNoReceiverSizeCounter().IsEmpty()); EXPECT_EQ(dest_st.at(bob_ip).sent_packets_size.NumSamples(), 2000l); EXPECT_EQ(dest_st.at(bob_ip).sent_packets_size.GetAverage(), single_packet_size); @@ -441,7 +442,8 @@ TEST(NetworkEmulationManagerTest, DebugStatsCollectedInDebugMode) { } TEST(NetworkEmulationManagerTest, ThroughputStats) { - NetworkEmulationManagerImpl network_manager(TimeMode::kRealTime); + NetworkEmulationManagerImpl network_manager( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); @@ -501,14 +503,14 @@ TEST(NetworkEmulationManagerTest, ThroughputStats) { } std::atomic received_stats_count{0}; - nt1->GetStats([&](std::unique_ptr st) { - EXPECT_EQ(st->PacketsSent(), kNumPacketsSent); - EXPECT_EQ(st->BytesSent().bytes(), kSinglePacketSize * kNumPacketsSent); + nt1->GetStats([&](EmulatedNetworkStats st) { + EXPECT_EQ(st.PacketsSent(), kNumPacketsSent); + EXPECT_EQ(st.BytesSent().bytes(), kSinglePacketSize * kNumPacketsSent); const double tolerance = 0.95; // Accept 5% tolerance for timing. - EXPECT_GE(st->LastPacketSentTime() - st->FirstPacketSentTime(), + EXPECT_GE(st.LastPacketSentTime() - st.FirstPacketSentTime(), (kNumPacketsSent - 1) * kDelay * tolerance); - EXPECT_GT(st->AverageSendRate().bps(), 0); + EXPECT_GT(st.AverageSendRate().bps(), 0); received_stats_count++; }); @@ -571,7 +573,8 @@ TEST_F(NetworkEmulationManagerThreeNodesRoutingTest, } TEST(NetworkEmulationManagerTest, EndpointLoopback) { - NetworkEmulationManagerImpl network_manager(TimeMode::kSimulated); + NetworkEmulationManagerImpl network_manager( + TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault); auto endpoint = network_manager.CreateEndpoint(EmulatedEndpointConfig()); MockReceiver receiver; @@ -587,7 +590,8 @@ TEST(NetworkEmulationManagerTest, EndpointLoopback) { TEST(NetworkEmulationManagerTest, EndpointCanSendWithDifferentSourceIp) { constexpr uint32_t kEndpointIp = 0xC0A80011; // 192.168.0.17 constexpr uint32_t kSourceIp = 0xC0A80012; // 192.168.0.18 - NetworkEmulationManagerImpl network_manager(TimeMode::kSimulated); + NetworkEmulationManagerImpl network_manager( + TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault); EmulatedEndpointConfig endpoint_config; endpoint_config.ip = rtc::IPAddress(kEndpointIp); endpoint_config.allow_send_packet_with_different_source_ip = true; @@ -607,7 +611,8 @@ TEST(NetworkEmulationManagerTest, EndpointCanReceiveWithDifferentDestIpThroughDefaultRoute) { constexpr uint32_t kDestEndpointIp = 0xC0A80011; // 192.168.0.17 constexpr uint32_t kDestIp = 0xC0A80012; // 192.168.0.18 - NetworkEmulationManagerImpl network_manager(TimeMode::kSimulated); + NetworkEmulationManagerImpl network_manager( + TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault); auto sender_endpoint = network_manager.CreateEndpoint(EmulatedEndpointConfig()); EmulatedEndpointConfig endpoint_config; @@ -630,7 +635,8 @@ TEST(NetworkEmulationManagerTest, } TEST(NetworkEmulationManagerTURNTest, GetIceServerConfig) { - NetworkEmulationManagerImpl network_manager(TimeMode::kRealTime); + NetworkEmulationManagerImpl network_manager( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); auto turn = network_manager.CreateTURNServer(EmulatedTURNServerConfig()); EXPECT_GT(turn->GetIceServerConfig().username.size(), 0u); @@ -641,7 +647,8 @@ TEST(NetworkEmulationManagerTURNTest, GetIceServerConfig) { } TEST(NetworkEmulationManagerTURNTest, ClientTraffic) { - NetworkEmulationManagerImpl emulation(TimeMode::kSimulated); + NetworkEmulationManagerImpl emulation( + TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault); auto* ep = emulation.CreateEndpoint(EmulatedEndpointConfig()); auto* turn = emulation.CreateTURNServer(EmulatedTURNServerConfig()); auto* node = CreateEmulatedNodeWithDefaultBuiltInConfig(&emulation); diff --git a/third_party/libwebrtc/test/pc/e2e/BUILD.gn b/third_party/libwebrtc/test/pc/e2e/BUILD.gn index 807599961b08..2b9a69afc5c6 100644 --- a/third_party/libwebrtc/test/pc/e2e/BUILD.gn +++ b/third_party/libwebrtc/test/pc/e2e/BUILD.gn @@ -8,40 +8,20 @@ import("../../../webrtc.gni") -rtc_library("video_dumping") { +rtc_library("metric_metadata_keys") { testonly = true - sources = [ - "analyzer/video/video_dumping.cc", - "analyzer/video/video_dumping.h", - ] - deps = [ - "../..:video_test_support", - "../../../api/test/video:video_frame_writer", - "../../../api/video:video_frame", - "../../../rtc_base:checks", - "../../../rtc_base:logging", - "../../../system_wrappers", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + sources = [ "metric_metadata_keys.h" ] } if (!build_with_chromium) { group("e2e") { testonly = true - deps = [ - ":encoded_image_data_injector_api", - ":example_video_quality_analyzer", - ":quality_analyzing_video_decoder", - ":quality_analyzing_video_encoder", - ":single_process_encoded_image_data_injector", - ":video_frame_tracking_id_injector", - ] + deps = [ ":metric_metadata_keys" ] if (rtc_include_tests) { deps += [ ":peerconnection_quality_test", ":test_peer", - ":video_quality_analyzer_injection_helper", ] } } @@ -51,246 +31,17 @@ if (!build_with_chromium) { testonly = true deps = [ - ":analyzing_video_sink_test", - ":analyzing_video_sinks_helper_test", - ":default_video_quality_analyzer_frames_comparator_test", - ":default_video_quality_analyzer_metric_names_test", - ":default_video_quality_analyzer_stream_state_test", - ":default_video_quality_analyzer_test", - ":multi_reader_queue_test", - ":names_collection_test", ":peer_connection_e2e_smoke_test", ":peer_connection_quality_test_metric_names_test", ":peer_connection_quality_test_test", - ":simulcast_dummy_buffer_helper_test", - ":single_process_encoded_image_data_injector_unittest", + ":stats_based_network_quality_metrics_reporter_test", ":stats_poller_test", - ":video_dumping_test", - ":video_frame_tracking_id_injector_unittest", ] } } - rtc_library("encoded_image_data_injector_api") { - visibility = [ "*" ] - testonly = true - sources = [ "analyzer/video/encoded_image_data_injector.h" ] - - deps = [ "../../../api/video:encoded_image" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - - rtc_library("single_process_encoded_image_data_injector") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/single_process_encoded_image_data_injector.cc", - "analyzer/video/single_process_encoded_image_data_injector.h", - ] - - deps = [ - ":encoded_image_data_injector_api", - "../../../api/video:encoded_image", - "../../../rtc_base:checks", - "../../../rtc_base:criticalsection", - "../../../rtc_base/synchronization:mutex", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] - } - - rtc_library("video_frame_tracking_id_injector") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/video_frame_tracking_id_injector.cc", - "analyzer/video/video_frame_tracking_id_injector.h", - ] - - deps = [ - ":encoded_image_data_injector_api", - "../../../api/video:encoded_image", - "../../../rtc_base:checks", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] - } - - rtc_library("simulcast_dummy_buffer_helper") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/simulcast_dummy_buffer_helper.cc", - "analyzer/video/simulcast_dummy_buffer_helper.h", - ] - deps = [ "../../../api/video:video_frame" ] - } - - rtc_library("simulcast_dummy_buffer_helper_test") { - testonly = true - sources = [ "analyzer/video/simulcast_dummy_buffer_helper_test.cc" ] - deps = [ - ":simulcast_dummy_buffer_helper", - "../..:test_support", - "../../../api/video:video_frame", - "../../../rtc_base:random", - ] - } - - rtc_library("quality_analyzing_video_decoder") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/quality_analyzing_video_decoder.cc", - "analyzer/video/quality_analyzing_video_decoder.h", - ] - deps = [ - ":encoded_image_data_injector_api", - ":simulcast_dummy_buffer_helper", - "../../../api:video_quality_analyzer_api", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../api/video:video_rtp_headers", - "../../../api/video_codecs:video_codecs_api", - "../../../modules/video_coding:video_codec_interface", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - "../../../rtc_base/synchronization:mutex", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] - } - - rtc_library("quality_analyzing_video_encoder") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/quality_analyzing_video_encoder.cc", - "analyzer/video/quality_analyzing_video_encoder.h", - ] - deps = [ - ":encoded_image_data_injector_api", - "../../../api:video_quality_analyzer_api", - "../../../api/test/pclf:media_configuration", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../api/video:video_rtp_headers", - "../../../api/video_codecs:video_codecs_api", - "../../../modules/video_coding:video_codec_interface", - "../../../modules/video_coding/svc:scalability_mode_util", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - "../../../rtc_base/synchronization:mutex", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] - } - if (rtc_include_tests) { - rtc_library("video_dumping_test") { - testonly = true - sources = [ "analyzer/video/video_dumping_test.cc" ] - deps = [ - ":video_dumping", - "../..:fileutils", - "../..:test_support", - "../..:video_test_support", - "../../../api:scoped_refptr", - "../../../api/video:video_frame", - "../../../rtc_base:random", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - - rtc_library("analyzing_video_sinks_helper") { - testonly = true - sources = [ - "analyzer/video/analyzing_video_sinks_helper.cc", - "analyzer/video/analyzing_video_sinks_helper.h", - ] - deps = [ - "../../../api/test/pclf:media_configuration", - "../../../api/test/video:video_frame_writer", - "../../../rtc_base:macromagic", - "../../../rtc_base/synchronization:mutex", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] - } - - rtc_library("analyzing_video_sink") { - testonly = true - sources = [ - "analyzer/video/analyzing_video_sink.cc", - "analyzer/video/analyzing_video_sink.h", - ] - deps = [ - ":analyzing_video_sinks_helper", - ":simulcast_dummy_buffer_helper", - ":video_dumping", - "../..:fixed_fps_video_frame_writer_adapter", - "../..:test_renderer", - "../../../api:video_quality_analyzer_api", - "../../../api/numerics", - "../../../api/test/pclf:media_configuration", - "../../../api/test/video:video_frame_writer", - "../../../api/units:timestamp", - "../../../api/video:video_frame", - "../../../rtc_base:checks", - "../../../rtc_base:logging", - "../../../rtc_base:macromagic", - "../../../rtc_base/synchronization:mutex", - "../../../system_wrappers", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory:memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] - } - - rtc_library("video_quality_analyzer_injection_helper") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/video_quality_analyzer_injection_helper.cc", - "analyzer/video/video_quality_analyzer_injection_helper.h", - ] - deps = [ - ":analyzing_video_sink", - ":analyzing_video_sinks_helper", - ":encoded_image_data_injector_api", - ":quality_analyzing_video_decoder", - ":quality_analyzing_video_encoder", - ":simulcast_dummy_buffer_helper", - ":video_dumping", - "../..:fixed_fps_video_frame_writer_adapter", - "../..:test_renderer", - "../../../api:array_view", - "../../../api:stats_observer_interface", - "../../../api:video_quality_analyzer_api", - "../../../api/test/pclf:media_configuration", - "../../../api/video:video_frame", - "../../../api/video:video_rtp_headers", - "../../../api/video_codecs:video_codecs_api", - "../../../rtc_base:checks", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - "../../../rtc_base:stringutils", - "../../../rtc_base/synchronization:mutex", - "../../../system_wrappers", - "../../../test:video_test_common", - "../../../test:video_test_support", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - ] - } - rtc_library("echo_emulation") { - visibility = [ "*" ] testonly = true sources = [ "echo/echo_emulation.cc", @@ -304,7 +55,6 @@ if (!build_with_chromium) { } rtc_library("test_peer") { - visibility = [ "*" ] testonly = true sources = [ "test_peer.cc", @@ -335,7 +85,6 @@ if (!build_with_chromium) { } rtc_library("test_peer_factory") { - visibility = [ "*" ] testonly = true sources = [ "test_peer_factory.cc", @@ -343,9 +92,7 @@ if (!build_with_chromium) { ] deps = [ ":echo_emulation", - ":quality_analyzing_video_encoder", ":test_peer", - ":video_quality_analyzer_injection_helper", "../..:copy_to_file_audio_capturer", "../../../api:create_time_controller", "../../../api:time_controller", @@ -364,6 +111,8 @@ if (!build_with_chromium) { "../../../p2p:rtc_p2p", "../../../rtc_base:rtc_task_queue", "../../../rtc_base:threading", + "analyzer/video:quality_analyzing_video_encoder", + "analyzer/video:video_quality_analyzer_injection_helper", ] absl_deps = [ "//third_party/abseil-cpp/absl/memory", @@ -372,7 +121,6 @@ if (!build_with_chromium) { } rtc_library("media_helper") { - visibility = [ "*" ] testonly = true sources = [ "media/media_helper.cc", @@ -381,7 +129,6 @@ if (!build_with_chromium) { ] deps = [ ":test_peer", - ":video_quality_analyzer_injection_helper", "../..:fileutils", "../..:platform_video_capturer", "../..:video_test_common", @@ -393,20 +140,11 @@ if (!build_with_chromium) { "../../../api/video:video_frame", "../../../pc:session_description", "../../../pc:video_track_source", + "analyzer/video:video_quality_analyzer_injection_helper", ] absl_deps = [ "//third_party/abseil-cpp/absl/types:variant" ] } - rtc_library("peer_configurer") { - visibility = [ "*" ] - testonly = true - sources = [ "peer_configurer.h" ] - deps = [ - ":peer_params_preprocessor", - "../../../api/test/pclf:peer_configurer", - ] - } - rtc_library("peer_params_preprocessor") { visibility = [ "*" ] testonly = true @@ -428,7 +166,6 @@ if (!build_with_chromium) { } rtc_library("test_activities_executor") { - visibility = [ "*" ] testonly = true sources = [ "test_activities_executor.cc", @@ -453,7 +190,6 @@ if (!build_with_chromium) { } rtc_library("peerconnection_quality_test") { - visibility = [ "*" ] testonly = true sources = [ @@ -464,19 +200,14 @@ if (!build_with_chromium) { ":analyzer_helper", ":cross_media_metrics_reporter", ":default_audio_quality_analyzer", - ":default_video_quality_analyzer", ":media_helper", ":metric_metadata_keys", ":peer_params_preprocessor", ":sdp_changer", - ":single_process_encoded_image_data_injector", ":stats_poller", ":test_activities_executor", ":test_peer", ":test_peer_factory", - ":video_frame_tracking_id_injector", - ":video_quality_analyzer_injection_helper", - ":video_quality_metrics_reporter", "../..:field_trial", "../..:fileutils", "../..:perf_test", @@ -509,73 +240,15 @@ if (!build_with_chromium) { "../../../rtc_base/synchronization:mutex", "../../../system_wrappers", "../../../system_wrappers:field_trial", + "analyzer/video:default_video_quality_analyzer", + "analyzer/video:single_process_encoded_image_data_injector", + "analyzer/video:video_frame_tracking_id_injector", + "analyzer/video:video_quality_analyzer_injection_helper", + "analyzer/video:video_quality_metrics_reporter", ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } - rtc_library("single_process_encoded_image_data_injector_unittest") { - testonly = true - sources = [ - "analyzer/video/single_process_encoded_image_data_injector_unittest.cc", - ] - deps = [ - ":single_process_encoded_image_data_injector", - "../../../api/video:encoded_image", - "../../../rtc_base:buffer", - "../../../test:test_support", - ] - } - - rtc_library("video_frame_tracking_id_injector_unittest") { - testonly = true - sources = - [ "analyzer/video/video_frame_tracking_id_injector_unittest.cc" ] - deps = [ - ":video_frame_tracking_id_injector", - "../../../api/video:encoded_image", - "../../../rtc_base:buffer", - "../../../test:test_support", - ] - } - - rtc_library("analyzing_video_sinks_helper_test") { - testonly = true - sources = [ "analyzer/video/analyzing_video_sinks_helper_test.cc" ] - deps = [ - ":analyzing_video_sinks_helper", - "../..:test_support", - "../../../api/test/pclf:media_configuration", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - - rtc_library("analyzing_video_sink_test") { - testonly = true - sources = [ "analyzer/video/analyzing_video_sink_test.cc" ] - deps = [ - ":analyzing_video_sink", - ":example_video_quality_analyzer", - "../..:fileutils", - "../..:test_support", - "../..:video_test_support", - "../../../api:create_frame_generator", - "../../../api:frame_generator_api", - "../../../api:scoped_refptr", - "../../../api/test/pclf:media_configuration", - "../../../api/units:time_delta", - "../../../api/units:timestamp", - "../../../api/video:video_frame", - "../../../common_video", - "../../../rtc_base:timeutils", - "../../../system_wrappers", - "../../time_controller", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] - } - peer_connection_e2e_smoke_test_resources = [ "../../../resources/pc_quality_smoke_test_alice_source.wav", "../../../resources/pc_quality_smoke_test_bob_source.wav", @@ -594,8 +267,6 @@ if (!build_with_chromium) { sources = [ "peer_connection_e2e_smoke_test.cc" ] deps = [ ":default_audio_quality_analyzer", - ":default_video_quality_analyzer", - ":default_video_quality_analyzer_shared", ":network_quality_metrics_reporter", ":stats_based_network_quality_metrics_reporter", "../../../api:callfactory_api", @@ -630,6 +301,8 @@ if (!build_with_chromium) { "../../../test:field_trial", "../../../test:fileutils", "../../../test:test_support", + "analyzer/video:default_video_quality_analyzer", + "analyzer/video:default_video_quality_analyzer_shared", ] data = peer_connection_e2e_smoke_test_resources if (is_mac || is_ios) { @@ -661,6 +334,32 @@ if (!build_with_chromium) { ] } + rtc_library("stats_based_network_quality_metrics_reporter_test") { + testonly = true + sources = [ "stats_based_network_quality_metrics_reporter_test.cc" ] + deps = [ + ":metric_metadata_keys", + ":peerconnection_quality_test", + ":stats_based_network_quality_metrics_reporter", + "../..:test_support", + "../../../api:array_view", + "../../../api:create_network_emulation_manager", + "../../../api:create_peer_connection_quality_test_frame_generator", + "../../../api:network_emulation_manager_api", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api/test/metrics:metrics_logger", + "../../../api/test/metrics:stdout_metrics_exporter", + "../../../api/test/pclf:media_configuration", + "../../../api/test/pclf:media_quality_test_params", + "../../../api/test/pclf:peer_configurer", + "../../../api/units:time_delta", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] + } + rtc_library("peer_connection_quality_test_test") { testonly = true sources = [ "peer_connection_quality_test_test.cc" ] @@ -675,20 +374,19 @@ if (!build_with_chromium) { "../../../api/test/metrics:global_metrics_logger_and_exporter", "../../../api/test/pclf:media_configuration", "../../../api/test/pclf:media_quality_test_params", + "../../../api/test/pclf:peer_configurer", "../../../api/units:time_delta", "../../../rtc_base:timeutils", ] } rtc_library("stats_provider") { - visibility = [ "*" ] testonly = true sources = [ "stats_provider.h" ] deps = [ "../../../api:rtc_stats_api" ] } rtc_library("stats_poller") { - visibility = [ "*" ] testonly = true sources = [ "stats_poller.cc", @@ -715,103 +413,9 @@ if (!build_with_chromium) { "../../../api:rtc_stats_api", ] } - - rtc_library("default_video_quality_analyzer_test") { - testonly = true - sources = [ "analyzer/video/default_video_quality_analyzer_test.cc" ] - deps = [ - ":default_video_quality_analyzer", - ":default_video_quality_analyzer_shared", - "../..:test_support", - "../../../api:create_frame_generator", - "../../../api:rtp_packet_info", - "../../../api/test/metrics:global_metrics_logger_and_exporter", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../common_video", - "../../../modules/rtp_rtcp:rtp_rtcp_format", - "../../../rtc_base:stringutils", - "../../../rtc_tools:video_quality_analysis", - "../../../system_wrappers", - ] - } - - rtc_library("default_video_quality_analyzer_metric_names_test") { - testonly = true - sources = [ - "analyzer/video/default_video_quality_analyzer_metric_names_test.cc", - ] - deps = [ - ":default_video_quality_analyzer", - ":default_video_quality_analyzer_shared", - "../..:test_support", - "../../../api:create_frame_generator", - "../../../api:rtp_packet_info", - "../../../api/test/metrics:metric", - "../../../api/test/metrics:metrics_logger", - "../../../api/test/metrics:stdout_metrics_exporter", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../common_video", - "../../../rtc_base:stringutils", - "../../../rtc_tools:video_quality_analysis", - "../../../system_wrappers", - ] - } - - rtc_library("default_video_quality_analyzer_frames_comparator_test") { - testonly = true - sources = [ "analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc" ] - deps = [ - ":default_video_quality_analyzer_internal", - ":default_video_quality_analyzer_shared", - "../..:test_support", - "../../../api:create_frame_generator", - "../../../api/units:timestamp", - "../../../rtc_base:stringutils", - "../../../system_wrappers", - ] - } - - rtc_library("names_collection_test") { - testonly = true - sources = [ "analyzer/video/names_collection_test.cc" ] - deps = [ - ":default_video_quality_analyzer_internal", - "../..:test_support", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", - ] - } - - rtc_library("multi_reader_queue_test") { - testonly = true - sources = [ "analyzer/video/multi_reader_queue_test.cc" ] - deps = [ - ":multi_reader_queue", - "../../../test:test_support", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - - rtc_library("default_video_quality_analyzer_stream_state_test") { - testonly = true - sources = [ - "analyzer/video/default_video_quality_analyzer_stream_state_test.cc", - ] - deps = [ - ":default_video_quality_analyzer_internal", - "../../../api/units:timestamp", - "../../../test:test_support", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } } rtc_library("analyzer_helper") { - visibility = [ "*" ] sources = [ "analyzer_helper.cc", "analyzer_helper.h", @@ -828,7 +432,6 @@ if (!build_with_chromium) { } rtc_library("default_audio_quality_analyzer") { - visibility = [ "*" ] testonly = true sources = [ "analyzer/audio/default_audio_quality_analyzer.cc", @@ -856,168 +459,7 @@ if (!build_with_chromium) { absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } - rtc_library("example_video_quality_analyzer") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/example_video_quality_analyzer.cc", - "analyzer/video/example_video_quality_analyzer.h", - ] - - deps = [ - "../../../api:array_view", - "../../../api:video_quality_analyzer_api", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../api/video:video_rtp_headers", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - "../../../rtc_base/synchronization:mutex", - ] - } - - rtc_library("video_quality_metrics_reporter") { - visibility = [ "*" ] - - testonly = true - sources = [ - "analyzer/video/video_quality_metrics_reporter.cc", - "analyzer/video/video_quality_metrics_reporter.h", - ] - deps = [ - ":metric_metadata_keys", - "../..:perf_test", - "../../../api:peer_connection_quality_test_fixture_api", - "../../../api:rtc_stats_api", - "../../../api:track_id_stream_info_map", - "../../../api/numerics", - "../../../api/test/metrics:metric", - "../../../api/test/metrics:metrics_logger", - "../../../api/units:data_rate", - "../../../api/units:data_size", - "../../../api/units:time_delta", - "../../../api/units:timestamp", - "../../../rtc_base:checks", - "../../../rtc_base:criticalsection", - "../../../rtc_base:rtc_numerics", - "../../../rtc_base/synchronization:mutex", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] - } - - rtc_library("metric_metadata_keys") { - visibility = [ "*" ] - testonly = true - sources = [ "metric_metadata_keys.h" ] - } - - rtc_library("default_video_quality_analyzer") { - visibility = [ "*" ] - - testonly = true - sources = [ - "analyzer/video/default_video_quality_analyzer.cc", - "analyzer/video/default_video_quality_analyzer.h", - ] - - deps = [ - ":default_video_quality_analyzer_internal", - ":default_video_quality_analyzer_shared", - ":metric_metadata_keys", - "../../../api:array_view", - "../../../api:video_quality_analyzer_api", - "../../../api/numerics", - "../../../api/test/metrics:metric", - "../../../api/test/metrics:metrics_logger", - "../../../api/units:data_size", - "../../../api/units:time_delta", - "../../../api/units:timestamp", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../rtc_base:checks", - "../../../rtc_base:logging", - "../../../rtc_base:macromagic", - "../../../rtc_base:stringutils", - "../../../rtc_base/synchronization:mutex", - "../../../system_wrappers", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - - # This target contains implementation details of DefaultVideoQualityAnalyzer, - # so headers exported by it shouldn't be used in other places. - rtc_library("default_video_quality_analyzer_internal") { - visibility = [ - ":default_video_quality_analyzer", - ":default_video_quality_analyzer_frames_comparator_test", - ":default_video_quality_analyzer_stream_state_test", - ":names_collection_test", - ] - - testonly = true - sources = [ - "analyzer/video/default_video_quality_analyzer_cpu_measurer.cc", - "analyzer/video/default_video_quality_analyzer_cpu_measurer.h", - "analyzer/video/default_video_quality_analyzer_frame_in_flight.cc", - "analyzer/video/default_video_quality_analyzer_frame_in_flight.h", - "analyzer/video/default_video_quality_analyzer_frames_comparator.cc", - "analyzer/video/default_video_quality_analyzer_frames_comparator.h", - "analyzer/video/default_video_quality_analyzer_internal_shared_objects.cc", - "analyzer/video/default_video_quality_analyzer_internal_shared_objects.h", - "analyzer/video/default_video_quality_analyzer_stream_state.cc", - "analyzer/video/default_video_quality_analyzer_stream_state.h", - "analyzer/video/names_collection.cc", - "analyzer/video/names_collection.h", - ] - - deps = [ - ":default_video_quality_analyzer_shared", - ":metric_metadata_keys", - ":multi_reader_queue", - "../../../api:array_view", - "../../../api:scoped_refptr", - "../../../api/numerics:numerics", - "../../../api/units:data_size", - "../../../api/units:timestamp", - "../../../api/video:video_frame", - "../../../api/video:video_frame_type", - "../../../common_video", - "../../../rtc_base:checks", - "../../../rtc_base:platform_thread", - "../../../rtc_base:rtc_base_tests_utils", - "../../../rtc_base:rtc_event", - "../../../rtc_base:stringutils", - "../../../rtc_base:timeutils", - "../../../rtc_base/synchronization:mutex", - "../../../rtc_tools:video_quality_analysis", - "../../../system_wrappers:system_wrappers", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", - ] - } - - rtc_library("default_video_quality_analyzer_shared") { - visibility = [ "*" ] - - testonly = true - sources = [ - "analyzer/video/default_video_quality_analyzer_shared_objects.cc", - "analyzer/video/default_video_quality_analyzer_shared_objects.h", - ] - - deps = [ - "../../../api/numerics:numerics", - "../../../api/units:timestamp", - "../../../rtc_base:checks", - "../../../rtc_base:stringutils", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - rtc_library("network_quality_metrics_reporter") { - visibility = [ "*" ] testonly = true sources = [ "network_quality_metrics_reporter.cc", @@ -1042,7 +484,6 @@ if (!build_with_chromium) { } rtc_library("stats_based_network_quality_metrics_reporter") { - visibility = [ "*" ] testonly = true sources = [ "stats_based_network_quality_metrics_reporter.cc", @@ -1056,6 +497,7 @@ if (!build_with_chromium) { "../../../api:peer_connection_quality_test_fixture_api", "../../../api:rtc_stats_api", "../../../api:scoped_refptr", + "../../../api:sequence_checker", "../../../api/numerics", "../../../api/test/metrics:metric", "../../../api/test/metrics:metrics_logger", @@ -1069,13 +511,13 @@ if (!build_with_chromium) { "../../../rtc_base:rtc_event", "../../../rtc_base:stringutils", "../../../rtc_base/synchronization:mutex", + "../../../rtc_base/system:no_unique_address", "../../../system_wrappers:field_trial", ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("cross_media_metrics_reporter") { - visibility = [ "*" ] testonly = true sources = [ "cross_media_metrics_reporter.cc", @@ -1106,7 +548,6 @@ if (!build_with_chromium) { } rtc_library("sdp_changer") { - visibility = [ "*" ] testonly = true sources = [ "sdp/sdp_changer.cc", @@ -1130,12 +571,4 @@ if (!build_with_chromium) { "//third_party/abseil-cpp/absl/types:optional", ] } - - rtc_library("multi_reader_queue") { - visibility = [ "*" ] - testonly = true - sources = [ "analyzer/video/multi_reader_queue.h" ] - deps = [ "../../../rtc_base:checks" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } } diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc index 189eaca14a02..98d0c533c264 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc @@ -131,10 +131,12 @@ void DefaultAudioQualityAnalyzer::Stop() { for (auto& item : streams_stats_) { const TrackIdStreamInfoMap::StreamInfo& stream_info = stream_info_[item.first]; + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map metric_metadata{ {MetricMetadataKey::kAudioStreamMetadataKey, item.first}, {MetricMetadataKey::kPeerMetadataKey, stream_info.receiver_peer}, - {MetricMetadataKey::kReceiverMetadataKey, stream_info.receiver_peer}}; + {MetricMetadataKey::kReceiverMetadataKey, stream_info.receiver_peer}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; metrics_logger_->LogMetric("expand_rate", GetTestCaseName(item.first), item.second.expand_rate, Unit::kUnitless, diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/BUILD.gn b/third_party/libwebrtc/test/pc/e2e/analyzer/video/BUILD.gn new file mode 100644 index 000000000000..cbb4c078f397 --- /dev/null +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/BUILD.gn @@ -0,0 +1,573 @@ +# Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../../../webrtc.gni") + +if (!build_with_chromium) { + group("video_analyzer") { + testonly = true + + deps = [ + ":analyzing_video_sinks_helper", + ":default_video_quality_analyzer_internal", + ":encoded_image_data_injector_api", + ":example_video_quality_analyzer", + ":multi_reader_queue", + ":quality_analyzing_video_decoder", + ":quality_analyzing_video_encoder", + ":simulcast_dummy_buffer_helper", + ":single_process_encoded_image_data_injector", + ":video_dumping", + ":video_frame_tracking_id_injector", + ":video_quality_metrics_reporter", + ] + if (rtc_include_tests) { + deps += [ + ":analyzing_video_sink", + ":video_quality_analyzer_injection_helper", + ] + } + } + + if (rtc_include_tests) { + group("video_analyzer_unittests") { + testonly = true + + deps = [ + ":analyzing_video_sink_test", + ":analyzing_video_sinks_helper_test", + ":default_video_quality_analyzer_frames_comparator_test", + ":default_video_quality_analyzer_metric_names_test", + ":default_video_quality_analyzer_stream_state_test", + ":default_video_quality_analyzer_test", + ":multi_reader_queue_test", + ":names_collection_test", + ":simulcast_dummy_buffer_helper_test", + ":single_process_encoded_image_data_injector_unittest", + ":video_dumping_test", + ":video_frame_tracking_id_injector_unittest", + ] + } + } +} + +rtc_library("video_dumping") { + testonly = true + sources = [ + "video_dumping.cc", + "video_dumping.h", + ] + deps = [ + "../../../..:video_test_support", + "../../../../../api/test/video:video_frame_writer", + "../../../../../api/video:video_frame", + "../../../../../rtc_base:logging", + "../../../../../system_wrappers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +rtc_library("encoded_image_data_injector_api") { + testonly = true + sources = [ "encoded_image_data_injector.h" ] + + deps = [ "../../../../../api/video:encoded_image" ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("single_process_encoded_image_data_injector") { + testonly = true + sources = [ + "single_process_encoded_image_data_injector.cc", + "single_process_encoded_image_data_injector.h", + ] + + deps = [ + ":encoded_image_data_injector_api", + "../../../../../api/video:encoded_image", + "../../../../../rtc_base:checks", + "../../../../../rtc_base/synchronization:mutex", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] +} + +rtc_library("video_frame_tracking_id_injector") { + testonly = true + sources = [ + "video_frame_tracking_id_injector.cc", + "video_frame_tracking_id_injector.h", + ] + + deps = [ + ":encoded_image_data_injector_api", + "../../../../../api/video:encoded_image", + "../../../../../rtc_base:checks", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] +} + +rtc_library("simulcast_dummy_buffer_helper") { + testonly = true + sources = [ + "simulcast_dummy_buffer_helper.cc", + "simulcast_dummy_buffer_helper.h", + ] + deps = [ "../../../../../api/video:video_frame" ] +} + +rtc_library("quality_analyzing_video_decoder") { + testonly = true + sources = [ + "quality_analyzing_video_decoder.cc", + "quality_analyzing_video_decoder.h", + ] + deps = [ + ":encoded_image_data_injector_api", + ":simulcast_dummy_buffer_helper", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/video:encoded_image", + "../../../../../api/video:video_frame", + "../../../../../api/video_codecs:video_codecs_api", + "../../../../../modules/video_coding:video_codec_interface", + "../../../../../rtc_base:logging", + "../../../../../rtc_base/synchronization:mutex", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("quality_analyzing_video_encoder") { + testonly = true + sources = [ + "quality_analyzing_video_encoder.cc", + "quality_analyzing_video_encoder.h", + ] + deps = [ + ":encoded_image_data_injector_api", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/test/pclf:media_configuration", + "../../../../../api/video:video_frame", + "../../../../../api/video_codecs:video_codecs_api", + "../../../../../modules/video_coding:video_codec_interface", + "../../../../../modules/video_coding/svc:scalability_mode_util", + "../../../../../rtc_base:logging", + "../../../../../rtc_base/synchronization:mutex", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +rtc_library("analyzing_video_sinks_helper") { + testonly = true + sources = [ + "analyzing_video_sinks_helper.cc", + "analyzing_video_sinks_helper.h", + ] + deps = [ + "../../../../../api/test/pclf:media_configuration", + "../../../../../api/test/video:video_frame_writer", + "../../../../../rtc_base:macromagic", + "../../../../../rtc_base/synchronization:mutex", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("example_video_quality_analyzer") { + testonly = true + sources = [ + "example_video_quality_analyzer.cc", + "example_video_quality_analyzer.h", + ] + + deps = [ + "../../../../../api:array_view", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/video:encoded_image", + "../../../../../api/video:video_frame", + "../../../../../rtc_base:logging", + "../../../../../rtc_base/synchronization:mutex", + ] +} + +# This target contains implementation details of DefaultVideoQualityAnalyzer, +# so headers exported by it shouldn't be used in other places. +rtc_library("default_video_quality_analyzer_internal") { + visibility = [ + ":default_video_quality_analyzer", + ":default_video_quality_analyzer_frames_comparator_test", + ":default_video_quality_analyzer_stream_state_test", + ":names_collection_test", + ":video_analyzer", + ] + + testonly = true + sources = [ + "default_video_quality_analyzer_cpu_measurer.cc", + "default_video_quality_analyzer_cpu_measurer.h", + "default_video_quality_analyzer_frame_in_flight.cc", + "default_video_quality_analyzer_frame_in_flight.h", + "default_video_quality_analyzer_frames_comparator.cc", + "default_video_quality_analyzer_frames_comparator.h", + "default_video_quality_analyzer_internal_shared_objects.cc", + "default_video_quality_analyzer_internal_shared_objects.h", + "default_video_quality_analyzer_stream_state.cc", + "default_video_quality_analyzer_stream_state.h", + "names_collection.cc", + "names_collection.h", + ] + + deps = [ + ":default_video_quality_analyzer_shared", + ":multi_reader_queue", + "../..:metric_metadata_keys", + "../../../../../api:array_view", + "../../../../../api:scoped_refptr", + "../../../../../api/numerics", + "../../../../../api/units:data_size", + "../../../../../api/units:timestamp", + "../../../../../api/video:video_frame", + "../../../../../api/video:video_frame_type", + "../../../../../common_video", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:platform_thread", + "../../../../../rtc_base:rtc_base_tests_utils", + "../../../../../rtc_base:rtc_event", + "../../../../../rtc_base:stringutils", + "../../../../../rtc_base:timeutils", + "../../../../../rtc_base/synchronization:mutex", + "../../../../../rtc_tools:video_quality_analysis", + "../../../../../system_wrappers", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings:strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("multi_reader_queue") { + testonly = true + sources = [ "multi_reader_queue.h" ] + deps = [ "../../../../../rtc_base:checks" ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("video_quality_metrics_reporter") { + testonly = true + sources = [ + "video_quality_metrics_reporter.cc", + "video_quality_metrics_reporter.h", + ] + deps = [ + "../..:metric_metadata_keys", + "../../../../../api:peer_connection_quality_test_fixture_api", + "../../../../../api:rtc_stats_api", + "../../../../../api:track_id_stream_info_map", + "../../../../../api/numerics", + "../../../../../api/test/metrics:metric", + "../../../../../api/test/metrics:metrics_logger", + "../../../../../api/units:data_rate", + "../../../../../api/units:data_size", + "../../../../../api/units:time_delta", + "../../../../../api/units:timestamp", + "../../../../../rtc_base:checks", + "../../../../../rtc_base/synchronization:mutex", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +rtc_library("default_video_quality_analyzer") { + testonly = true + sources = [ + "default_video_quality_analyzer.cc", + "default_video_quality_analyzer.h", + ] + + deps = [ + ":default_video_quality_analyzer_internal", + ":default_video_quality_analyzer_shared", + "../..:metric_metadata_keys", + "../../../../../api:array_view", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/numerics", + "../../../../../api/test/metrics:metric", + "../../../../../api/test/metrics:metrics_logger", + "../../../../../api/units:data_size", + "../../../../../api/units:time_delta", + "../../../../../api/units:timestamp", + "../../../../../api/video:encoded_image", + "../../../../../api/video:video_frame", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:logging", + "../../../../../rtc_base:macromagic", + "../../../../../rtc_base:stringutils", + "../../../../../rtc_base/synchronization:mutex", + "../../../../../system_wrappers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("default_video_quality_analyzer_shared") { + testonly = true + sources = [ + "default_video_quality_analyzer_shared_objects.cc", + "default_video_quality_analyzer_shared_objects.h", + ] + + deps = [ + "../../../../../api/numerics", + "../../../../../api/units:timestamp", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:stringutils", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("analyzing_video_sink") { + testonly = true + sources = [ + "analyzing_video_sink.cc", + "analyzing_video_sink.h", + ] + deps = [ + ":analyzing_video_sinks_helper", + ":simulcast_dummy_buffer_helper", + ":video_dumping", + "../../../..:fixed_fps_video_frame_writer_adapter", + "../../../..:test_renderer", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/numerics", + "../../../../../api/test/pclf:media_configuration", + "../../../../../api/test/video:video_frame_writer", + "../../../../../api/units:timestamp", + "../../../../../api/video:video_frame", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:logging", + "../../../../../rtc_base:macromagic", + "../../../../../rtc_base/synchronization:mutex", + "../../../../../system_wrappers", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory:memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("video_quality_analyzer_injection_helper") { + testonly = true + sources = [ + "video_quality_analyzer_injection_helper.cc", + "video_quality_analyzer_injection_helper.h", + ] + deps = [ + ":analyzing_video_sink", + ":analyzing_video_sinks_helper", + ":encoded_image_data_injector_api", + ":quality_analyzing_video_decoder", + ":quality_analyzing_video_encoder", + ":simulcast_dummy_buffer_helper", + ":video_dumping", + "../../../..:fixed_fps_video_frame_writer_adapter", + "../../../..:test_renderer", + "../../../..:video_test_common", + "../../../..:video_test_support", + "../../../../../api:array_view", + "../../../../../api:stats_observer_interface", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/test/pclf:media_configuration", + "../../../../../api/video:video_frame", + "../../../../../api/video_codecs:video_codecs_api", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:logging", + "../../../../../rtc_base:stringutils", + "../../../../../rtc_base/synchronization:mutex", + "../../../../../system_wrappers", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + ] +} + +if (rtc_include_tests) { + rtc_library("simulcast_dummy_buffer_helper_test") { + testonly = true + sources = [ "simulcast_dummy_buffer_helper_test.cc" ] + deps = [ + ":simulcast_dummy_buffer_helper", + "../../../..:test_support", + "../../../../../api/video:video_frame", + "../../../../../rtc_base:random", + ] + } + + rtc_library("analyzing_video_sink_test") { + testonly = true + sources = [ "analyzing_video_sink_test.cc" ] + deps = [ + ":analyzing_video_sink", + ":example_video_quality_analyzer", + "../../../..:fileutils", + "../../../..:test_support", + "../../../..:video_test_support", + "../../../../../api:create_frame_generator", + "../../../../../api:frame_generator_api", + "../../../../../api:scoped_refptr", + "../../../../../api/test/pclf:media_configuration", + "../../../../../api/units:time_delta", + "../../../../../api/units:timestamp", + "../../../../../api/video:video_frame", + "../../../../../common_video", + "../../../../../rtc_base:timeutils", + "../../../../../system_wrappers", + "../../../../time_controller", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] + } + + rtc_library("analyzing_video_sinks_helper_test") { + testonly = true + sources = [ "analyzing_video_sinks_helper_test.cc" ] + deps = [ + ":analyzing_video_sinks_helper", + "../../../..:test_support", + "../../../../../api/test/pclf:media_configuration", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + + rtc_library("default_video_quality_analyzer_frames_comparator_test") { + testonly = true + sources = [ "default_video_quality_analyzer_frames_comparator_test.cc" ] + deps = [ + ":default_video_quality_analyzer_internal", + ":default_video_quality_analyzer_shared", + "../../../..:test_support", + "../../../../../api:create_frame_generator", + "../../../../../api/units:timestamp", + "../../../../../rtc_base:stringutils", + "../../../../../system_wrappers", + ] + } + + rtc_library("names_collection_test") { + testonly = true + sources = [ "names_collection_test.cc" ] + deps = [ + ":default_video_quality_analyzer_internal", + "../../../..:test_support", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings:strings", + "//third_party/abseil-cpp/absl/types:optional", + ] + } + + rtc_library("multi_reader_queue_test") { + testonly = true + sources = [ "multi_reader_queue_test.cc" ] + deps = [ + ":multi_reader_queue", + "../../../..:test_support", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + + rtc_library("default_video_quality_analyzer_stream_state_test") { + testonly = true + sources = [ "default_video_quality_analyzer_stream_state_test.cc" ] + deps = [ + ":default_video_quality_analyzer_internal", + "../../../..:test_support", + "../../../../../api/units:timestamp", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + + rtc_library("default_video_quality_analyzer_test") { + testonly = true + sources = [ "default_video_quality_analyzer_test.cc" ] + deps = [ + ":default_video_quality_analyzer", + ":default_video_quality_analyzer_shared", + "../../../..:test_support", + "../../../../../api:create_frame_generator", + "../../../../../api:rtp_packet_info", + "../../../../../api/test/metrics:global_metrics_logger_and_exporter", + "../../../../../api/video:encoded_image", + "../../../../../api/video:video_frame", + "../../../../../common_video", + "../../../../../rtc_base:stringutils", + "../../../../../rtc_tools:video_quality_analysis", + "../../../../../system_wrappers", + ] + } + + rtc_library("default_video_quality_analyzer_metric_names_test") { + testonly = true + sources = [ "default_video_quality_analyzer_metric_names_test.cc" ] + deps = [ + ":default_video_quality_analyzer", + "../../../..:test_support", + "../../../../../api:create_frame_generator", + "../../../../../api:rtp_packet_info", + "../../../../../api/test/metrics:metric", + "../../../../../api/test/metrics:metrics_logger", + "../../../../../api/test/metrics:stdout_metrics_exporter", + "../../../../../api/video:encoded_image", + "../../../../../api/video:video_frame", + "../../../../../common_video", + "../../../../../rtc_tools:video_quality_analysis", + "../../../../../system_wrappers", + ] + } + + rtc_library("video_dumping_test") { + testonly = true + sources = [ "video_dumping_test.cc" ] + deps = [ + ":video_dumping", + "../../../..:fileutils", + "../../../..:test_support", + "../../../..:video_test_support", + "../../../../../api:scoped_refptr", + "../../../../../api/video:video_frame", + "../../../../../rtc_base:random", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + + rtc_library("single_process_encoded_image_data_injector_unittest") { + testonly = true + sources = [ "single_process_encoded_image_data_injector_unittest.cc" ] + deps = [ + ":single_process_encoded_image_data_injector", + "../../../..:test_support", + "../../../../../api/video:encoded_image", + "../../../../../rtc_base:buffer", + ] + } + + rtc_library("video_frame_tracking_id_injector_unittest") { + testonly = true + sources = [ "video_frame_tracking_id_injector_unittest.cc" ] + deps = [ + ":video_frame_tracking_id_injector", + "../../../..:test_support", + "../../../../../api/video:encoded_image", + "../../../../../rtc_base:buffer", + ] + } +} diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc index fe88bc0bd7de..6cd89551ea9e 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc @@ -157,13 +157,10 @@ TEST_F(AnalyzingVideoSinkTest, VideoFramesAreDumpedCorrectly) { EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(1))); - test::Y4mFrameReaderImpl frame_reader( - test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m"), - /*width=*/640, - /*height=*/360); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(1)); - rtc::scoped_refptr actual_frame = frame_reader.ReadFrame(); + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(1)); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); rtc::scoped_refptr expected_frame = frame.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); @@ -200,13 +197,10 @@ TEST_F(AnalyzingVideoSinkTest, EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(1))); - test::Y4mFrameReaderImpl frame_reader( - test::JoinFilename(test_directory_, "alice_video_bob_320x240_30.y4m"), - /*width=*/320, - /*height=*/240); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(1)); - rtc::scoped_refptr actual_frame = frame_reader.ReadFrame(); + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_320x240_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(1)); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); rtc::scoped_refptr expected_frame = frame.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); @@ -245,13 +239,10 @@ TEST_F(AnalyzingVideoSinkTest, EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(1))); - test::Y4mFrameReaderImpl frame_reader( - test::JoinFilename(test_directory_, "alice_video_bob_320x240_30.y4m"), - /*width=*/320, - /*height=*/240); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(1)); - rtc::scoped_refptr actual_frame = frame_reader.ReadFrame(); + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_320x240_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(1)); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); rtc::scoped_refptr expected_frame = frame.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); @@ -301,13 +292,10 @@ TEST_F(AnalyzingVideoSinkTest, EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(2))); { - test::Y4mFrameReaderImpl frame_reader( - test::JoinFilename(test_directory_, "alice_video_bob_1280x720_30.y4m"), - /*width=*/1280, - /*height=*/720); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(1)); - rtc::scoped_refptr actual_frame = frame_reader.ReadFrame(); + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_1280x720_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(1)); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); rtc::scoped_refptr expected_frame = frame_before.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); @@ -317,13 +305,10 @@ TEST_F(AnalyzingVideoSinkTest, EXPECT_DOUBLE_EQ(psnr, 48); } { - test::Y4mFrameReaderImpl frame_reader( - test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m"), - /*width=*/640, - /*height=*/360); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(1)); - rtc::scoped_refptr actual_frame = frame_reader.ReadFrame(); + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(1)); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); rtc::scoped_refptr expected_frame = frame_after.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); @@ -374,21 +359,18 @@ TEST_F(AnalyzingVideoSinkTest, EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(2))); { - test::Y4mFrameReaderImpl frame_reader( - test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m"), - /*width=*/640, - /*height=*/360); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(2)); + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(2)); // Read the first frame. - rtc::scoped_refptr actual_frame = frame_reader.ReadFrame(); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); rtc::scoped_refptr expected_frame = frame_before.video_frame_buffer()->ToI420(); // Frames should be equal. EXPECT_DOUBLE_EQ(I420SSIM(*expected_frame, *actual_frame), 1.00); EXPECT_DOUBLE_EQ(I420PSNR(*expected_frame, *actual_frame), 48); // Read the second frame. - actual_frame = frame_reader.ReadFrame(); + actual_frame = frame_reader->PullFrame(); expected_frame = frame_after.video_frame_buffer()->ToI420(); // Frames should be equal. EXPECT_DOUBLE_EQ(I420SSIM(*expected_frame, *actual_frame), 1.00); @@ -426,14 +408,11 @@ TEST_F(AnalyzingVideoSinkTest, SmallDiviationsInAspectRationAreAllowed) { EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(1))); { - test::Y4mFrameReaderImpl frame_reader( - test::JoinFilename(test_directory_, "alice_video_bob_480x270_30.y4m"), - /*width=*/480, - /*height=*/270); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(1)); + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_480x270_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(1)); // Read the first frame. - rtc::scoped_refptr actual_frame = frame_reader.ReadFrame(); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); rtc::scoped_refptr expected_frame = frame.video_frame_buffer()->ToI420(); // Actual frame is upscaled version of the expected. But because rendered @@ -524,14 +503,11 @@ TEST_F(AnalyzingVideoSinkTest, EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(2))); - test::Y4mFrameReaderImpl frame_reader( - test::JoinFilename(test_directory_, "alice_video_bob_320x240_10.y4m"), - /*width=*/320, - /*height=*/240); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(11)); + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_320x240_10.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(11)); for (int i = 0; i < 10; ++i) { - rtc::scoped_refptr actual_frame = frame_reader.ReadFrame(); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); rtc::scoped_refptr expected_frame = frame1.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); @@ -540,7 +516,7 @@ TEST_F(AnalyzingVideoSinkTest, EXPECT_DOUBLE_EQ(ssim, 1.00); EXPECT_DOUBLE_EQ(psnr, 48); } - rtc::scoped_refptr actual_frame = frame_reader.ReadFrame(); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); rtc::scoped_refptr expected_frame = frame2.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc index 54f1ac75b23d..15caa87ad4da 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc @@ -347,9 +347,9 @@ void DefaultVideoQualityAnalyzer::OnFrameEncoded( used_encoder.last_frame_id = frame_id; used_encoder.switched_on_at = now; used_encoder.switched_from_at = now; - frame_in_flight.OnFrameEncoded(now, encoded_image._frameType, - DataSize::Bytes(encoded_image.size()), - stats.target_encode_bitrate, used_encoder); + frame_in_flight.OnFrameEncoded( + now, encoded_image._frameType, DataSize::Bytes(encoded_image.size()), + stats.target_encode_bitrate, stats.qp, used_encoder); if (options_.report_infra_metrics) { analyzer_stats_.on_frame_encoded_processing_time_ms.AddSample( @@ -936,9 +936,11 @@ void DefaultVideoQualityAnalyzer::ReportResults() { ReportResults(item.first, item.second, stream_frame_counters_.at(item.first)); } - metrics_logger_->LogSingleValueMetric("cpu_usage_%", test_label_, - GetCpuUsagePercent(), Unit::kUnitless, - ImprovementDirection::kSmallerIsBetter); + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. + metrics_logger_->LogSingleValueMetric( + "cpu_usage_%", test_label_, GetCpuUsagePercent(), Unit::kUnitless, + ImprovementDirection::kSmallerIsBetter, + {{MetricMetadataKey::kExperimentalTestNameMetadataKey, test_label_}}); LogFrameCounters("Global", frame_counters_); if (!unknown_sender_frame_counters_.empty()) { RTC_LOG(LS_INFO) << "Received frame counters with unknown frame id:"; @@ -1030,11 +1032,13 @@ void DefaultVideoQualityAnalyzer::ReportResults( const FrameCounters& frame_counters) { TimeDelta test_duration = Now() - start_time_; std::string test_case_name = GetTestCaseName(ToMetricName(key)); + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map metric_metadata{ {MetricMetadataKey::kPeerMetadataKey, peers_->name(key.sender)}, {MetricMetadataKey::kVideoStreamMetadataKey, streams_.name(key.stream)}, {MetricMetadataKey::kSenderMetadataKey, peers_->name(key.sender)}, - {MetricMetadataKey::kReceiverMetadataKey, peers_->name(key.receiver)}}; + {MetricMetadataKey::kReceiverMetadataKey, peers_->name(key.receiver)}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_label_}}; double sum_squared_interframe_delays_secs = 0; Timestamp video_start_time = Timestamp::PlusInfinity(); @@ -1132,6 +1136,9 @@ void DefaultVideoQualityAnalyzer::ReportResults( "target_encode_bitrate", test_case_name, stats.target_encode_bitrate / 1000, Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter, metric_metadata); + metrics_logger_->LogMetric("qp", test_case_name, stats.qp, Unit::kUnitless, + ImprovementDirection::kSmallerIsBetter, + metric_metadata); metrics_logger_->LogSingleValueMetric( "actual_encode_bitrate", test_case_name, static_cast(stats.total_encoded_images_payload) / diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc index 6b24dab94609..24f829e08907 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc @@ -88,11 +88,14 @@ void FrameInFlight::OnFrameEncoded(webrtc::Timestamp time, VideoFrameType frame_type, DataSize encoded_image_size, uint32_t target_encode_bitrate, + int qp, StreamCodecInfo used_encoder) { encoded_time_ = time; frame_type_ = frame_type; encoded_image_size_ = encoded_image_size; target_encode_bitrate_ += target_encode_bitrate; + qp_values_.AddSample(SamplesStatsCounter::StatsSample{ + .value = static_cast(qp), .time = time}); // Update used encoder info. If simulcast/SVC is used, this method can // be called multiple times, in such case we should preserve the value // of `used_encoder_.switched_on_at` from the first invocation as the @@ -182,6 +185,7 @@ FrameStats FrameInFlight::GetStatsForPeer(size_t peer) const { stats.encoded_frame_type = frame_type_; stats.encoded_image_size = encoded_image_size_; stats.used_encoder = used_encoder_; + stats.qp_values = qp_values_; absl::optional receiver_stats = MaybeGetValue(receiver_stats_, peer); diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h index f1db78f7af8e..7ee910effeed 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h @@ -17,6 +17,7 @@ #include #include "absl/types/optional.h" +#include "api/numerics/samples_stats_counter.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" #include "api/video/video_frame.h" @@ -87,6 +88,7 @@ class FrameInFlight { VideoFrameType frame_type, DataSize encoded_image_size, uint32_t target_encode_bitrate, + int qp, StreamCodecInfo used_encoder); bool HasEncodedTime() const { return encoded_time_.IsFinite(); } @@ -152,6 +154,7 @@ class FrameInFlight { VideoFrameType frame_type_ = VideoFrameType::kEmptyFrame; DataSize encoded_image_size_ = DataSize::Bytes(0); uint32_t target_encode_bitrate_ = 0; + SamplesStatsCounter qp_values_; // Can be not set if frame was dropped by encoder. absl::optional used_encoder_ = absl::nullopt; // Map from the receiver peer's index to frame stats for that peer. diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc index e8c8d7ad1816..77418b7e5d43 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc @@ -126,16 +126,6 @@ FrameComparison ValidateFrameComparison(FrameComparison comparison) { RTC_DCHECK(comparison.frame_stats.decoded_frame_height.has_value()) << "Dropped frame comparison has to have decoded_frame_height when " << "decode_end_time is set"; - } else { - RTC_DCHECK(!comparison.frame_stats.received_time.IsFinite()) - << "Dropped frame comparison can't have received_time when " - << "decode_end_time is not set and there were no decoder failures"; - RTC_DCHECK(!comparison.frame_stats.decode_start_time.IsFinite()) - << "Dropped frame comparison can't have decode_start_time when " - << "decode_end_time is not set and there were no decoder failures"; - RTC_DCHECK(!comparison.frame_stats.used_decoder.has_value()) - << "Dropped frame comparison can't have used_decoder when " - << "decode_end_time is not set and there were no decoder failures"; } RTC_DCHECK(!comparison.frame_stats.rendered_time.IsFinite()) << "Dropped frame comparison can't have rendered_time"; @@ -448,8 +438,7 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison( FrameDropPhase dropped_phase; if (frame_stats.decode_end_time.IsFinite()) { dropped_phase = FrameDropPhase::kAfterDecoder; - } else if (frame_stats.decode_start_time.IsFinite() && - frame_stats.decoder_failed) { + } else if (frame_stats.decode_start_time.IsFinite()) { dropped_phase = FrameDropPhase::kByDecoder; } else if (frame_stats.encoded_time.IsFinite()) { dropped_phase = FrameDropPhase::kTransport; @@ -470,6 +459,11 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison( frame_stats.encoded_image_size.bytes(); stats->target_encode_bitrate.AddSample(StatsSample( frame_stats.target_encode_bitrate, frame_stats.encoded_time, metadata)); + for (SamplesStatsCounter::StatsSample qp : + frame_stats.qp_values.GetTimedSamples()) { + qp.metadata = metadata; + stats->qp.AddSample(std::move(qp)); + } // Stats sliced on encoded frame type. if (frame_stats.encoded_frame_type == VideoFrameType::kVideoFrameKey) { diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc index c58d7d96bbfc..2cfb0c3d7977 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc @@ -108,6 +108,16 @@ FrameStats ShiftStatsOn(const FrameStats& stats, TimeDelta delta) { return frame_stats; } +SamplesStatsCounter StatsCounter( + const std::vector>& samples) { + SamplesStatsCounter counter; + for (const std::pair& sample : samples) { + counter.AddSample(SamplesStatsCounter::StatsSample{.value = sample.first, + .time = sample.second}); + } + return counter; +} + double GetFirstOrDie(const SamplesStatsCounter& counter) { EXPECT_FALSE(counter.IsEmpty()) << "Counter has to be not empty"; return counter.GetSamples()[0]; @@ -377,6 +387,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.time_between_freezes_ms); expectEmpty(stats.resolution_of_decoded_frame); expectEmpty(stats.target_encode_bitrate); + expectEmpty(stats.qp); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 0); @@ -436,6 +447,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.time_between_freezes_ms); expectEmpty(stats.resolution_of_decoded_frame); expectEmpty(stats.target_encode_bitrate); + expectEmpty(stats.qp); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 0); @@ -476,6 +488,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, @@ -503,6 +517,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.time_between_freezes_ms); expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -544,6 +559,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameDelta; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, @@ -571,6 +588,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.time_between_freezes_ms); expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -612,6 +630,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -644,6 +664,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.time_between_freezes_ms); expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -685,6 +706,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -724,6 +747,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.time_between_freezes_ms); EXPECT_GE(GetFirstOrDie(stats.resolution_of_decoded_frame), 200 * 100.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -766,6 +790,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -802,6 +828,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.time_between_freezes_ms); expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -864,6 +891,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.time_between_freezes_ms); expectEmpty(stats.resolution_of_decoded_frame); expectEmpty(stats.target_encode_bitrate); + expectEmpty(stats.qp); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 0); @@ -923,6 +951,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.time_between_freezes_ms); expectEmpty(stats.resolution_of_decoded_frame); expectEmpty(stats.target_encode_bitrate); + expectEmpty(stats.qp); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 0); @@ -963,6 +992,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, @@ -990,6 +1021,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.time_between_freezes_ms); expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -1031,6 +1063,82 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameDelta; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kDroppedFrame, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + expectEmpty(stats.transport_time_ms); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.encode_time_ms), 10.0); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_decoded_frame); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); + expectEmpty(stats.recv_key_frame_size_bytes); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 1000); + EXPECT_EQ(stats.num_send_key_frames, 0); + EXPECT_EQ(stats.num_recv_key_frames, 0); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 1}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_EQ(stats.encoders, + std::vector{*frame_stats.used_encoder}); + EXPECT_THAT(stats.decoders, IsEmpty()); +} + +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + PreDecodedDroppedKeyFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + uint16_t frame_id = 1; + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(/*frame_id=*/1, captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + // Frame encoded + frame_stats.encoded_time = captured_time + TimeDelta::Millis(20); + frame_stats.used_encoder = + Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time); + frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.encoded_image_size = DataSize::Bytes(1000); + frame_stats.target_encode_bitrate = 2000; + // Frame pre decoded + frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.pre_decoded_image_size = DataSize::Bytes(500); + frame_stats.received_time = captured_time + TimeDelta::Millis(30); + frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40); comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, @@ -1061,22 +1169,19 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); - EXPECT_EQ(stats.num_send_key_frames, 0); + EXPECT_EQ(stats.num_send_key_frames, 1); EXPECT_EQ(stats.num_recv_key_frames, 0); EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ {FrameDropPhase::kBeforeEncoder, 0}, {FrameDropPhase::kByEncoder, 0}, - {FrameDropPhase::kTransport, 1}, - {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 1}, {FrameDropPhase::kAfterDecoder, 0}})); EXPECT_EQ(stats.encoders, std::vector{*frame_stats.used_encoder}); EXPECT_THAT(stats.decoders, IsEmpty()); } -// TODO(titovartem): add test that just pre decoded frame can't be received as -// dropped one because decoder always returns either decoded frame or error. - TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, DecodedDroppedKeyFrameAccountedInStats) { // We don't really drop frames after decoder, so it's a bit unclear what is @@ -1105,6 +1210,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -1143,6 +1250,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.time_between_freezes_ms); expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -1185,6 +1293,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -1221,6 +1331,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.time_between_freezes_ms); expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -1267,6 +1378,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -1307,6 +1420,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.time_between_freezes_ms); EXPECT_GE(GetFirstOrDie(stats.resolution_of_decoded_frame), 200 * 100.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -1350,6 +1464,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, AllStatsHaveMetadataSet) { frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -1386,6 +1502,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, AllStatsHaveMetadataSet) { AssertFirstMetadataHasField(stats.resolution_of_decoded_frame, "frame_id", "1"); AssertFirstMetadataHasField(stats.target_encode_bitrate, "frame_id", "1"); + AssertFirstMetadataHasField(stats.qp, "frame_id", "1"); AssertFirstMetadataHasField(stats.recv_key_frame_size_bytes, "frame_id", "1"); expectEmpty(stats.recv_delta_frame_size_bytes); diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h index 6429392f65a9..3e65e2b888db 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h @@ -14,8 +14,10 @@ #include #include #include +#include #include "absl/types/optional.h" +#include "api/numerics/samples_stats_counter.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" #include "api/video/video_frame.h" @@ -62,6 +64,10 @@ struct FrameStats { VideoFrameType pre_decoded_frame_type = VideoFrameType::kEmptyFrame; DataSize pre_decoded_image_size = DataSize::Bytes(0); uint32_t target_encode_bitrate = 0; + // There can be multiple qp values for single video frame when simulcast + // or SVC is used. In such case multiple EncodedImage's are created by encoder + // and each of it will have its own qp value. + SamplesStatsCounter qp_values; absl::optional decoded_frame_width = absl::nullopt; absl::optional decoded_frame_height = absl::nullopt; diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc index 6b9761220235..106daac58490 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc @@ -277,6 +277,11 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, MetricNamesForP2PAreCorrect) { .name = "target_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, + MetricValidationInfo{ + .test_case = "test_case/alice_video", + .name = "qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "actual_encode_bitrate", @@ -446,6 +451,11 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, .name = "target_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, + MetricValidationInfo{ + .test_case = "test_case/alice_video_alice_bob", + .name = "qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ .test_case = "test_case/alice_video_alice_bob", .name = "actual_encode_bitrate", @@ -583,6 +593,11 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, .name = "target_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, + MetricValidationInfo{ + .test_case = "test_case/alice_video_alice_charlie", + .name = "qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ .test_case = "test_case/alice_video_alice_charlie", .name = "actual_encode_bitrate", @@ -656,10 +671,10 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, std::vector metrics = ToTestCases(metrics_logger.GetCollectedMetrics()); - EXPECT_THAT(metrics, SizeIs(55)); - EXPECT_THAT(metrics, Contains("test_case/alice_video_alice_bob").Times(27)); + EXPECT_THAT(metrics, SizeIs(57)); + EXPECT_THAT(metrics, Contains("test_case/alice_video_alice_bob").Times(28)); EXPECT_THAT(metrics, - Contains("test_case/alice_video_alice_charlie").Times(27)); + Contains("test_case/alice_video_alice_charlie").Times(28)); EXPECT_THAT(metrics, Contains("test_case").Times(1)); } diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h index f419089a01aa..a71dad71c17e 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h @@ -145,6 +145,7 @@ struct StreamStats { SamplesStatsCounter time_between_freezes_ms; SamplesStatsCounter resolution_of_decoded_frame; SamplesStatsCounter target_encode_bitrate; + SamplesStatsCounter qp; int64_t total_encoded_images_payload = 0; // Counters on which phase how many frames were dropped. diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc index 7d39238f8a70..fc970e1ea2c2 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc @@ -2123,6 +2123,45 @@ TEST(DefaultVideoQualityAnalyzerTest, InfraMetricsNotCollectedByDefault) { EXPECT_EQ(stats.on_decoder_error_processing_time_ms.NumSamples(), 0); } +TEST(DefaultVideoQualityAnalyzerTest, + FrameDroppedByDecoderIsAccountedCorrectly) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + + DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); + options.report_infra_metrics = false; + DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), + test::GetGlobalMetricsLogger(), options); + analyzer.Start("test_case", std::vector{"alice", "bob"}, + kAnalyzerMaxThreadsCount); + + VideoFrame to_be_dropped_frame = + NextFrame(frame_generator.get(), /*timestamp_us=*/1); + uint16_t frame_id = + analyzer.OnFrameCaptured("alice", "alice_video", to_be_dropped_frame); + to_be_dropped_frame.set_id(frame_id); + analyzer.OnFramePreEncode("alice", to_be_dropped_frame); + analyzer.OnFrameEncoded("alice", to_be_dropped_frame.id(), + FakeEncode(to_be_dropped_frame), + VideoQualityAnalyzerInterface::EncoderStats(), false); + VideoFrame received_to_be_dropped_frame = DeepCopy(to_be_dropped_frame); + analyzer.OnFramePreDecode("bob", received_to_be_dropped_frame.id(), + FakeEncode(received_to_be_dropped_frame)); + PassFramesThroughAnalyzer(analyzer, "alice", "alice_video", {"bob"}, + /*frames_count=*/1, *frame_generator); + + // Give analyzer some time to process frames on async thread. The computations + // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it + // means we have an issue! + SleepMs(100); + analyzer.Stop(); + + StreamStats stats = analyzer.GetStats().at(StatsKey("alice_video", "bob")); + ASSERT_EQ(stats.dropped_by_phase[FrameDropPhase::kByDecoder], 1); +} + class DefaultVideoQualityAnalyzerTimeBetweenFreezesTest : public TestWithParam {}; diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc index 98c12f665cca..7f742972cb0a 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc @@ -290,6 +290,7 @@ EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage( VideoQualityAnalyzerInterface::EncoderStats stats; stats.encoder_name = codec_name; stats.target_encode_bitrate = target_encode_bitrate; + stats.qp = encoded_image.qp_; analyzer_->OnFrameEncoded(peer_name_, frame_id, encoded_image, stats, discard); diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping_test.cc index a7c95107ab78..5dd402151654 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping_test.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping_test.cc @@ -136,12 +136,10 @@ TEST_F(CreateVideoFrameWithIdsWriterTest, VideoIsWritenWithFrameIds) { ASSERT_TRUE(writer->WriteFrame(frame2)); writer->Close(); - test::Y4mFrameReaderImpl frame_reader(video_filename_, /*width=*/2, - /*height=*/2); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(2)); - AssertFramesEqual(frame_reader.ReadFrame(), frame1.video_frame_buffer()); - AssertFramesEqual(frame_reader.ReadFrame(), frame2.video_frame_buffer()); + auto frame_reader = test::CreateY4mFrameReader(video_filename_); + EXPECT_THAT(frame_reader->num_frames(), Eq(2)); + AssertFramesEqual(frame_reader->PullFrame(), frame1.video_frame_buffer()); + AssertFramesEqual(frame_reader->PullFrame(), frame2.video_frame_buffer()); AssertFrameIdsAre(ids_filename_, {"1", "2"}); } @@ -163,12 +161,10 @@ TEST_F(VideoWriterTest, AllFramesAreWrittenWithSamplingModulo1) { frame_writer.Close(); } - test::Y4mFrameReaderImpl frame_reader(video_filename_, /*width=*/2, - /*height=*/2); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(2)); - AssertFramesEqual(frame_reader.ReadFrame(), frame1.video_frame_buffer()); - AssertFramesEqual(frame_reader.ReadFrame(), frame2.video_frame_buffer()); + auto frame_reader = test::CreateY4mFrameReader(video_filename_); + EXPECT_THAT(frame_reader->num_frames(), Eq(2)); + AssertFramesEqual(frame_reader->PullFrame(), frame1.video_frame_buffer()); + AssertFramesEqual(frame_reader->PullFrame(), frame2.video_frame_buffer()); } TEST_F(VideoWriterTest, OnlyEvery2ndFramesIsWrittenWithSamplingModulo2) { @@ -189,12 +185,10 @@ TEST_F(VideoWriterTest, OnlyEvery2ndFramesIsWrittenWithSamplingModulo2) { frame_writer.Close(); } - test::Y4mFrameReaderImpl frame_reader(video_filename_, /*width=*/2, - /*height=*/2); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(2)); - AssertFramesEqual(frame_reader.ReadFrame(), frame1.video_frame_buffer()); - AssertFramesEqual(frame_reader.ReadFrame(), frame3.video_frame_buffer()); + auto frame_reader = test::CreateY4mFrameReader(video_filename_); + EXPECT_THAT(frame_reader->num_frames(), Eq(2)); + AssertFramesEqual(frame_reader->PullFrame(), frame1.video_frame_buffer()); + AssertFramesEqual(frame_reader->PullFrame(), frame3.video_frame_buffer()); } } // namespace diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc index 4bdc60168ba7..87c11886cce2 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc @@ -37,6 +37,7 @@ namespace webrtc { namespace webrtc_pc_e2e { namespace { +using webrtc::webrtc_pc_e2e::VideoConfig; using EmulatedSFUConfigMap = ::webrtc::webrtc_pc_e2e::QualityAnalyzingVideoEncoder::EmulatedSFUConfigMap; diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h index 6407890bb206..8000edadb1e3 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h @@ -43,8 +43,6 @@ namespace webrtc_pc_e2e { // VideoQualityAnalyzerInterface into PeerConnection pipeline. class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface { public: - using VideoConfig = ::webrtc::webrtc_pc_e2e::VideoConfig; - VideoQualityAnalyzerInjectionHelper( Clock* clock, std::unique_ptr analyzer, @@ -71,7 +69,7 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface { // `input_dump_file_name`, video will be written into that file. std::unique_ptr CreateFramePreprocessor(absl::string_view peer_name, - const VideoConfig& config); + const webrtc::webrtc_pc_e2e::VideoConfig& config); // Creates sink, that will allow video quality analyzer to get access to // the rendered frames. If corresponding video track has // `output_dump_file_name` in its VideoConfig, which was used for @@ -160,7 +158,7 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface { Mutex mutex_; int peers_count_ RTC_GUARDED_BY(mutex_); // Map from stream label to the video config. - std::map known_video_configs_ + std::map known_video_configs_ RTC_GUARDED_BY(mutex_); std::map>>> diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc index 085f1b3cf60a..fa7f5b1c1224 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc @@ -136,8 +136,10 @@ void VideoQualityMetricsReporter::ReportVideoBweResults( const std::string& peer_name, const VideoBweStats& video_bwe_stats) { std::string test_case_name = GetTestCaseName(peer_name); + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map metric_metadata{ - {MetricMetadataKey::kPeerMetadataKey, peer_name}}; + {MetricMetadataKey::kPeerMetadataKey, peer_name}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; metrics_logger_->LogMetric( "available_send_bandwidth", test_case_name, diff --git a/third_party/libwebrtc/test/pc/e2e/cross_media_metrics_reporter.cc b/third_party/libwebrtc/test/pc/e2e/cross_media_metrics_reporter.cc index d09135d18244..b2c91089c8b9 100644 --- a/third_party/libwebrtc/test/pc/e2e/cross_media_metrics_reporter.cc +++ b/third_party/libwebrtc/test/pc/e2e/cross_media_metrics_reporter.cc @@ -114,6 +114,7 @@ void CrossMediaMetricsReporter::StopAndReportResults() { MutexLock lock(&mutex_); for (const auto& pair : stats_info_) { const std::string& sync_group = pair.first; + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map audio_metric_metadata{ {MetricMetadataKey::kPeerSyncGroupMetadataKey, sync_group}, {MetricMetadataKey::kAudioStreamMetadataKey, @@ -121,7 +122,8 @@ void CrossMediaMetricsReporter::StopAndReportResults() { {MetricMetadataKey::kPeerMetadataKey, pair.second.audio_stream_info.receiver_peer}, {MetricMetadataKey::kReceiverMetadataKey, - pair.second.audio_stream_info.receiver_peer}}; + pair.second.audio_stream_info.receiver_peer}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; metrics_logger_->LogMetric( "audio_ahead_ms", GetTestCaseName(pair.second.audio_stream_info.stream_label, sync_group), @@ -129,6 +131,7 @@ void CrossMediaMetricsReporter::StopAndReportResults() { webrtc::test::ImprovementDirection::kSmallerIsBetter, std::move(audio_metric_metadata)); + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map video_metric_metadata{ {MetricMetadataKey::kPeerSyncGroupMetadataKey, sync_group}, {MetricMetadataKey::kAudioStreamMetadataKey, @@ -136,7 +139,8 @@ void CrossMediaMetricsReporter::StopAndReportResults() { {MetricMetadataKey::kPeerMetadataKey, pair.second.video_stream_info.receiver_peer}, {MetricMetadataKey::kReceiverMetadataKey, - pair.second.video_stream_info.receiver_peer}}; + pair.second.video_stream_info.receiver_peer}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; metrics_logger_->LogMetric( "video_ahead_ms", GetTestCaseName(pair.second.video_stream_info.stream_label, sync_group), diff --git a/third_party/libwebrtc/test/pc/e2e/metric_metadata_keys.h b/third_party/libwebrtc/test/pc/e2e/metric_metadata_keys.h index 9a77c3196336..2fee0cbcb051 100644 --- a/third_party/libwebrtc/test/pc/e2e/metric_metadata_keys.h +++ b/third_party/libwebrtc/test/pc/e2e/metric_metadata_keys.h @@ -10,6 +10,8 @@ #ifndef TEST_PC_E2E_METRIC_METADATA_KEYS_H_ #define TEST_PC_E2E_METRIC_METADATA_KEYS_H_ +#include + namespace webrtc { namespace webrtc_pc_e2e { @@ -28,6 +30,13 @@ class MetricMetadataKey { static constexpr char kVideoStreamMetadataKey[] = "video_stream"; // Represents name of the sync group to which stream belongs. static constexpr char kPeerSyncGroupMetadataKey[] = "peer_sync_group"; + // Represents the test name (without any peer and stream data appended to it + // as it currently happens with the webrtc.test_metrics.Metric.test_case + // field). This metadata is temporary and it will be removed once this + // information is moved to webrtc.test_metrics.Metric.test_case. + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. + static constexpr char kExperimentalTestNameMetadataKey[] = + "experimental_test_name"; private: MetricMetadataKey() = default; diff --git a/third_party/libwebrtc/test/pc/e2e/network_quality_metrics_reporter.cc b/third_party/libwebrtc/test/pc/e2e/network_quality_metrics_reporter.cc index 10d16956e937..0bb28f0847b6 100644 --- a/third_party/libwebrtc/test/pc/e2e/network_quality_metrics_reporter.cc +++ b/third_party/libwebrtc/test/pc/e2e/network_quality_metrics_reporter.cc @@ -110,8 +110,8 @@ EmulatedNetworkStats NetworkQualityMetricsReporter::PopulateStats( EmulatedNetworkManagerInterface* network) { rtc::Event wait; EmulatedNetworkStats stats; - network->GetStats([&](std::unique_ptr s) { - stats = *s; + network->GetStats([&](EmulatedNetworkStats s) { + stats = std::move(s); wait.Set(); }); bool stats_received = wait.Wait(kStatsWaitTimeout); diff --git a/third_party/libwebrtc/test/pc/e2e/peer_configurer.h b/third_party/libwebrtc/test/pc/e2e/peer_configurer.h deleted file mode 100644 index d4b2d2f12c5c..000000000000 --- a/third_party/libwebrtc/test/pc/e2e/peer_configurer.h +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef TEST_PC_E2E_PEER_CONFIGURER_H_ -#define TEST_PC_E2E_PEER_CONFIGURER_H_ - -#include "api/test/pclf/peer_configurer.h" -#include "test/pc/e2e/peer_params_preprocessor.h" - -namespace webrtc { -namespace webrtc_pc_e2e { - -using PeerConfigurerImpl = ::webrtc::webrtc_pc_e2e::PeerConfigurer; - -} // namespace webrtc_pc_e2e -} // namespace webrtc - -#endif // TEST_PC_E2E_PEER_CONFIGURER_H_ diff --git a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.cc b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.cc index fe4114c4ed92..83613118f9c1 100644 --- a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.cc +++ b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.cc @@ -195,16 +195,6 @@ void PeerConnectionE2EQualityTest::AddQualityMetricsReporter( quality_metrics_reporters_.push_back(std::move(quality_metrics_reporter)); } -PeerConnectionE2EQualityTest::PeerHandle* PeerConnectionE2EQualityTest::AddPeer( - const PeerNetworkDependencies& network_dependencies, - rtc::FunctionView configurer) { - peer_configurations_.push_back( - std::make_unique(network_dependencies)); - configurer(peer_configurations_.back().get()); - peer_handles_.push_back(PeerHandleImpl()); - return &peer_handles_.back(); -} - PeerConnectionE2EQualityTest::PeerHandle* PeerConnectionE2EQualityTest::AddPeer( std::unique_ptr configurer) { peer_configurations_.push_back(std::move(configurer)); @@ -751,14 +741,18 @@ void PeerConnectionE2EQualityTest::TearDownCall() { } void PeerConnectionE2EQualityTest::ReportGeneralTestResults() { + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. metrics_logger_->LogSingleValueMetric( *alice_->params().name + "_connected", test_case_name_, alice_connected_, Unit::kUnitless, ImprovementDirection::kBiggerIsBetter, - {{MetricMetadataKey::kPeerMetadataKey, *alice_->params().name}}); + {{MetricMetadataKey::kPeerMetadataKey, *alice_->params().name}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}); + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. metrics_logger_->LogSingleValueMetric( *bob_->params().name + "_connected", test_case_name_, bob_connected_, Unit::kUnitless, ImprovementDirection::kBiggerIsBetter, - {{MetricMetadataKey::kPeerMetadataKey, *bob_->params().name}}); + {{MetricMetadataKey::kPeerMetadataKey, *bob_->params().name}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}); } Timestamp PeerConnectionE2EQualityTest::Now() const { diff --git a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.h b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.h index e077673f090b..6cbf232874b9 100644 --- a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.h +++ b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.h @@ -71,9 +71,6 @@ class PeerConnectionE2EQualityTest void AddQualityMetricsReporter(std::unique_ptr quality_metrics_reporter) override; - PeerHandle* AddPeer( - const PeerNetworkDependencies& network_dependencies, - rtc::FunctionView configurer) override; PeerHandle* AddPeer(std::unique_ptr configurer) override; void Run(RunParams run_params) override; diff --git a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc index cf359448b98f..d5f46f3ccc09 100644 --- a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc +++ b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc @@ -157,13 +157,17 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .name = "alice_connected", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case", .name = "bob_connected", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, // Metrics from DefaultAudioQualityAnalyzer MetricValidationInfo{ @@ -174,7 +178,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_audio", .name = "accelerate_rate", @@ -183,7 +189,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_audio", .name = "preemptive_rate", @@ -192,7 +200,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_audio", .name = "speech_expand_rate", @@ -201,7 +211,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_audio", .name = "average_jitter_buffer_delay_ms", @@ -210,7 +222,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_audio", .name = "preferred_buffer_size_ms", @@ -219,7 +233,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_audio", .name = "expand_rate", @@ -228,7 +244,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_audio", .name = "accelerate_rate", @@ -237,7 +255,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_audio", .name = "preemptive_rate", @@ -246,7 +266,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_audio", .name = "speech_expand_rate", @@ -255,7 +277,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_audio", .name = "average_jitter_buffer_delay_ms", @@ -264,7 +288,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_audio", .name = "preferred_buffer_size_ms", @@ -273,7 +299,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, // Metrics from DefaultVideoQualityAnalyzer MetricValidationInfo{ @@ -285,7 +313,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "ssim", @@ -295,7 +325,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "transport_time", @@ -305,7 +337,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "total_delay_incl_transport", @@ -315,7 +349,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "time_between_rendered_frames", @@ -325,7 +361,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "harmonic_framerate", @@ -335,7 +373,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "encode_frame_rate", @@ -345,7 +385,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "encode_time", @@ -355,7 +397,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "time_between_freezes", @@ -365,7 +409,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "freeze_time_ms", @@ -375,7 +421,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "pixels_per_frame", @@ -385,7 +433,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "min_psnr_dB", @@ -395,7 +445,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "decode_time", @@ -405,7 +457,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "receive_to_render_time", @@ -415,7 +469,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "dropped_frames", @@ -425,7 +481,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "frames_in_flight", @@ -435,7 +493,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "rendered_frames", @@ -445,7 +505,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "max_skipped", @@ -455,7 +517,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "target_encode_bitrate", @@ -465,7 +529,21 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, + MetricValidationInfo{ + .test_case = "test_case/alice_video", + .name = "qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "alice_video"}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "actual_encode_bitrate", @@ -475,7 +553,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "psnr_dB", @@ -485,7 +565,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "ssim", @@ -495,7 +577,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "transport_time", @@ -505,7 +589,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "total_delay_incl_transport", @@ -515,7 +601,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "time_between_rendered_frames", @@ -525,7 +613,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "harmonic_framerate", @@ -535,7 +625,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "encode_frame_rate", @@ -545,7 +637,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "encode_time", @@ -555,7 +649,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "time_between_freezes", @@ -565,7 +661,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "freeze_time_ms", @@ -575,7 +673,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "pixels_per_frame", @@ -585,7 +685,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "min_psnr_dB", @@ -595,7 +697,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "decode_time", @@ -605,7 +709,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "receive_to_render_time", @@ -615,7 +721,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "dropped_frames", @@ -625,7 +733,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "frames_in_flight", @@ -635,7 +745,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "rendered_frames", @@ -645,7 +757,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "max_skipped", @@ -655,7 +769,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "target_encode_bitrate", @@ -665,7 +781,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "actual_encode_bitrate", @@ -675,13 +793,28 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, + MetricValidationInfo{ + .test_case = "test_case/bob_video", + .name = "qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case", .name = "cpu_usage_%", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, // Metrics from StatsBasedNetworkQualityMetricsReporter MetricValidationInfo{ @@ -689,133 +822,177 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .name = "bytes_discarded_no_receiver", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "packets_discarded_no_receiver", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "payload_bytes_received", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "payload_bytes_sent", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "bytes_sent", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "packets_sent", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "average_send_rate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "bytes_received", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "packets_received", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "average_receive_rate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "sent_packets_loss", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "bytes_discarded_no_receiver", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "packets_discarded_no_receiver", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "payload_bytes_received", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "payload_bytes_sent", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "bytes_sent", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "packets_sent", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "average_send_rate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "bytes_received", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "packets_received", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "average_receive_rate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "sent_packets_loss", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, // Metrics from VideoQualityMetricsReporter MetricValidationInfo{ @@ -823,37 +1000,49 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .name = "available_send_bandwidth", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "transmission_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "retransmission_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "available_send_bandwidth", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "transmission_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "retransmission_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, // Metrics from CrossMediaMetricsReporter MetricValidationInfo{ @@ -865,7 +1054,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, {MetricMetadataKey::kPeerSyncGroupMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_alice_video", .name = "video_ahead_ms", @@ -875,7 +1066,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, {MetricMetadataKey::kPeerSyncGroupMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_bob_audio", .name = "audio_ahead_ms", @@ -885,7 +1078,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, {MetricMetadataKey::kPeerSyncGroupMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_bob_video", .name = "video_ahead_ms", @@ -895,7 +1090,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kAudioStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, {MetricMetadataKey::kPeerSyncGroupMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}})); + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}})); } } // namespace diff --git a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_test.cc b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_test.cc index f39b4f5421ec..066fe7d8eec0 100644 --- a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_test.cc +++ b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_test.cc @@ -20,6 +20,7 @@ #include "api/test/network_emulation_manager.h" #include "api/test/pclf/media_configuration.h" #include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "api/units/time_delta.h" #include "rtc_base/time_utils.h" @@ -35,8 +36,7 @@ namespace { using ::testing::Eq; using ::testing::Test; -using PeerConfigurer = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::PeerConfigurer; +using ::webrtc::webrtc_pc_e2e::PeerConfigurer; // Remove files and directories in a directory non-recursively. void CleanDir(absl::string_view dir, size_t expected_output_files_count) { @@ -127,12 +127,9 @@ TEST_F(PeerConnectionE2EQualityTestTest, OutputVideoIsDumpedWhenRequested) { fixture.Run(RunParams(TimeDelta::Seconds(2))); - test::Y4mFrameReaderImpl frame_reader( - test::JoinFilename(test_directory_, "alice_video_bob_320x180_15.y4m"), - /*width=*/320, - /*height=*/180); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(31)); // 2 seconds 15 fps + 1 + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_320x180_15.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(31)); // 2 seconds 15 fps + 1 ExpectOutputFilesCount(1); } diff --git a/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc b/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc index 063d142be923..65dca5b518a5 100644 --- a/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc +++ b/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc @@ -22,6 +22,7 @@ #include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/stats/rtc_stats.h" #include "api/stats/rtcstats_objects.h" #include "api/test/metrics/metric.h" @@ -34,6 +35,7 @@ #include "rtc_base/ip_address.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/no_unique_address.h" #include "system_wrappers/include/field_trial.h" #include "test/pc/e2e/metric_metadata_keys.h" @@ -44,6 +46,9 @@ namespace { using ::webrtc::test::ImprovementDirection; using ::webrtc::test::Unit; +using NetworkLayerStats = + StatsBasedNetworkQualityMetricsReporter::NetworkLayerStats; + constexpr TimeDelta kStatsWaitTimeout = TimeDelta::Seconds(1); // Field trial which controls whether to report standard-compliant bytes @@ -55,11 +60,10 @@ EmulatedNetworkStats PopulateStats(std::vector endpoints, NetworkEmulationManager* network_emulation) { rtc::Event stats_loaded; EmulatedNetworkStats stats; - network_emulation->GetStats(endpoints, - [&](std::unique_ptr s) { - stats = *s; - stats_loaded.Set(); - }); + network_emulation->GetStats(endpoints, [&](EmulatedNetworkStats s) { + stats = std::move(s); + stats_loaded.Set(); + }); bool stats_received = stats_loaded.Wait(kStatsWaitTimeout); RTC_CHECK(stats_received); return stats; @@ -79,6 +83,83 @@ std::map PopulateIpToPeer( return out; } +// Accumulates emulated network stats being executed on the network thread. +// When all stats are collected stores it in thread safe variable. +class EmulatedNetworkStatsAccumulator { + public: + // `expected_stats_count` - the number of calls to + // AddEndpointStats/AddUplinkStats/AddDownlinkStats the accumulator is going + // to wait. If called more than expected, the program will crash. + explicit EmulatedNetworkStatsAccumulator(size_t expected_stats_count) + : not_collected_stats_count_(expected_stats_count) { + RTC_DCHECK_GE(not_collected_stats_count_, 0); + if (not_collected_stats_count_ == 0) { + all_stats_collected_.Set(); + } + sequence_checker_.Detach(); + } + + // Has to be executed on network thread. + void AddEndpointStats(std::string peer_name, EmulatedNetworkStats stats) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + n_stats_[peer_name].endpoints_stats = std::move(stats); + DecrementNotCollectedStatsCount(); + } + + // Has to be executed on network thread. + void AddUplinkStats(std::string peer_name, EmulatedNetworkNodeStats stats) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + n_stats_[peer_name].uplink_stats = std::move(stats); + DecrementNotCollectedStatsCount(); + } + + // Has to be executed on network thread. + void AddDownlinkStats(std::string peer_name, EmulatedNetworkNodeStats stats) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + n_stats_[peer_name].downlink_stats = std::move(stats); + DecrementNotCollectedStatsCount(); + } + + // Can be executed on any thread. + // Returns true if count down was completed and false if timeout elapsed + // before. + bool Wait(TimeDelta timeout) { return all_stats_collected_.Wait(timeout); } + + // Can be called once. Returns all collected stats by moving underlying + // object. + std::map ReleaseStats() { + RTC_DCHECK(!stats_released_); + stats_released_ = true; + MutexLock lock(&mutex_); + return std::move(stats_); + } + + private: + void DecrementNotCollectedStatsCount() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_CHECK_GT(not_collected_stats_count_, 0) + << "All stats are already collected"; + not_collected_stats_count_--; + if (not_collected_stats_count_ == 0) { + MutexLock lock(&mutex_); + stats_ = std::move(n_stats_); + all_stats_collected_.Set(); + } + } + + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; + size_t not_collected_stats_count_ RTC_GUARDED_BY(sequence_checker_); + // Collected on the network thread. Moved into `stats_` after all stats are + // collected. + std::map n_stats_ + RTC_GUARDED_BY(sequence_checker_); + + rtc::Event all_stats_collected_; + Mutex mutex_; + std::map stats_ RTC_GUARDED_BY(mutex_); + bool stats_released_ = false; +}; + } // namespace StatsBasedNetworkQualityMetricsReporter:: @@ -114,11 +195,15 @@ void StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: void StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: AddPeer(absl::string_view peer_name, - std::vector endpoints) { + std::vector endpoints, + std::vector uplink, + std::vector downlink) { MutexLock lock(&mutex_); // When new peer is added not in the constructor, don't check if it has empty // stats, because their endpoint could be used for traffic before. peer_endpoints_.emplace(peer_name, std::move(endpoints)); + peer_uplinks_.emplace(peer_name, std::move(uplink)); + peer_downlinks_.emplace(peer_name, std::move(downlink)); for (const EmulatedEndpoint* const endpoint : endpoints) { RTC_CHECK(ip_to_peer_.find(endpoint->GetPeerLocalAddress()) == ip_to_peer_.end()) @@ -127,19 +212,43 @@ void StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: } } -std::map +std::map StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: GetStats() { MutexLock lock(&mutex_); - std::map peer_to_stats; + EmulatedNetworkStatsAccumulator stats_accumulator( + peer_endpoints_.size() + peer_uplinks_.size() + peer_downlinks_.size()); + for (const auto& entry : peer_endpoints_) { + network_emulation_->GetStats( + entry.second, [&stats_accumulator, + peer = entry.first](EmulatedNetworkStats s) mutable { + stats_accumulator.AddEndpointStats(std::move(peer), std::move(s)); + }); + } + for (const auto& entry : peer_uplinks_) { + network_emulation_->GetStats( + entry.second, [&stats_accumulator, + peer = entry.first](EmulatedNetworkNodeStats s) mutable { + stats_accumulator.AddUplinkStats(std::move(peer), std::move(s)); + }); + } + for (const auto& entry : peer_downlinks_) { + network_emulation_->GetStats( + entry.second, [&stats_accumulator, + peer = entry.first](EmulatedNetworkNodeStats s) mutable { + stats_accumulator.AddDownlinkStats(std::move(peer), std::move(s)); + }); + } + bool stats_collected = stats_accumulator.Wait(kStatsWaitTimeout); + RTC_CHECK(stats_collected); + std::map peer_to_stats = + stats_accumulator.ReleaseStats(); std::map> sender_to_receivers; for (const auto& entry : peer_endpoints_) { - NetworkLayerStats stats; - stats.stats = PopulateStats(entry.second, network_emulation_); const std::string& peer_name = entry.first; + const NetworkLayerStats& stats = peer_to_stats[peer_name]; for (const auto& income_stats_entry : - stats.stats.incoming_stats_per_source) { + stats.endpoints_stats.incoming_stats_per_source) { const rtc::IPAddress& source_ip = income_stats_entry.first; auto it = ip_to_peer_.find(source_ip); if (it == ip_to_peer_.end()) { @@ -148,7 +257,6 @@ StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: } sender_to_receivers[it->second].push_back(peer_name); } - peer_to_stats.emplace(peer_name, std::move(stats)); } for (auto& entry : peer_to_stats) { const std::vector& receivers = @@ -162,7 +270,17 @@ StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: void StatsBasedNetworkQualityMetricsReporter::AddPeer( absl::string_view peer_name, std::vector endpoints) { - collector_.AddPeer(peer_name, std::move(endpoints)); + collector_.AddPeer(peer_name, std::move(endpoints), /*uplink=*/{}, + /*downlink=*/{}); +} + +void StatsBasedNetworkQualityMetricsReporter::AddPeer( + absl::string_view peer_name, + std::vector endpoints, + std::vector uplink, + std::vector downlink) { + collector_.AddPeer(peer_name, std::move(endpoints), std::move(uplink), + std::move(downlink)); } void StatsBasedNetworkQualityMetricsReporter::Start( @@ -252,16 +370,18 @@ void StatsBasedNetworkQualityMetricsReporter::ReportStats( const NetworkLayerStats& network_layer_stats, int64_t packet_loss, const Timestamp& end_time) { + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map metric_metadata{ - {MetricMetadataKey::kPeerMetadataKey, pc_label}}; + {MetricMetadataKey::kPeerMetadataKey, pc_label}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; metrics_logger_->LogSingleValueMetric( "bytes_discarded_no_receiver", GetTestCaseName(pc_label), - network_layer_stats.stats.overall_incoming_stats + network_layer_stats.endpoints_stats.overall_incoming_stats .bytes_discarded_no_receiver.bytes(), Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata); metrics_logger_->LogSingleValueMetric( "packets_discarded_no_receiver", GetTestCaseName(pc_label), - network_layer_stats.stats.overall_incoming_stats + network_layer_stats.endpoints_stats.overall_incoming_stats .packets_discarded_no_receiver, Unit::kUnitless, ImprovementDirection::kNeitherIsBetter, metric_metadata); @@ -313,55 +433,62 @@ void StatsBasedNetworkQualityMetricsReporter::LogNetworkLayerStats( const std::string& peer_name, const NetworkLayerStats& stats) const { DataRate average_send_rate = - stats.stats.overall_outgoing_stats.packets_sent >= 2 - ? stats.stats.overall_outgoing_stats.AverageSendRate() + stats.endpoints_stats.overall_outgoing_stats.packets_sent >= 2 + ? stats.endpoints_stats.overall_outgoing_stats.AverageSendRate() : DataRate::Zero(); DataRate average_receive_rate = - stats.stats.overall_incoming_stats.packets_received >= 2 - ? stats.stats.overall_incoming_stats.AverageReceiveRate() + stats.endpoints_stats.overall_incoming_stats.packets_received >= 2 + ? stats.endpoints_stats.overall_incoming_stats.AverageReceiveRate() : DataRate::Zero(); + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map metric_metadata{ - {MetricMetadataKey::kPeerMetadataKey, peer_name}}; + {MetricMetadataKey::kPeerMetadataKey, peer_name}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; rtc::StringBuilder log; log << "Raw network layer statistic for [" << peer_name << "]:\n" << "Local IPs:\n"; - for (size_t i = 0; i < stats.stats.local_addresses.size(); ++i) { - log << " " << stats.stats.local_addresses[i].ToString() << "\n"; + for (size_t i = 0; i < stats.endpoints_stats.local_addresses.size(); ++i) { + log << " " << stats.endpoints_stats.local_addresses[i].ToString() << "\n"; } - if (!stats.stats.overall_outgoing_stats.sent_packets_size.IsEmpty()) { - metrics_logger_->LogMetric( - "sent_packets_size", GetTestCaseName(peer_name), - stats.stats.overall_outgoing_stats.sent_packets_size, Unit::kBytes, - ImprovementDirection::kNeitherIsBetter, metric_metadata); - } - if (!stats.stats.overall_incoming_stats.received_packets_size.IsEmpty()) { - metrics_logger_->LogMetric( - "received_packets_size", GetTestCaseName(peer_name), - stats.stats.overall_incoming_stats.received_packets_size, Unit::kBytes, - ImprovementDirection::kNeitherIsBetter, metric_metadata); - } - if (!stats.stats.overall_incoming_stats.packets_discarded_no_receiver_size + if (!stats.endpoints_stats.overall_outgoing_stats.sent_packets_size .IsEmpty()) { metrics_logger_->LogMetric( - "packets_discarded_no_receiver_size", GetTestCaseName(peer_name), - stats.stats.overall_incoming_stats.packets_discarded_no_receiver_size, + "sent_packets_size", GetTestCaseName(peer_name), + stats.endpoints_stats.overall_outgoing_stats.sent_packets_size, Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata); } - if (!stats.stats.sent_packets_queue_wait_time_us.IsEmpty()) { + if (!stats.endpoints_stats.overall_incoming_stats.received_packets_size + .IsEmpty()) { + metrics_logger_->LogMetric( + "received_packets_size", GetTestCaseName(peer_name), + stats.endpoints_stats.overall_incoming_stats.received_packets_size, + Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata); + } + if (!stats.endpoints_stats.overall_incoming_stats + .packets_discarded_no_receiver_size.IsEmpty()) { + metrics_logger_->LogMetric( + "packets_discarded_no_receiver_size", GetTestCaseName(peer_name), + stats.endpoints_stats.overall_incoming_stats + .packets_discarded_no_receiver_size, + Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata); + } + if (!stats.endpoints_stats.sent_packets_queue_wait_time_us.IsEmpty()) { metrics_logger_->LogMetric( "sent_packets_queue_wait_time_us", GetTestCaseName(peer_name), - stats.stats.sent_packets_queue_wait_time_us, Unit::kUnitless, + stats.endpoints_stats.sent_packets_queue_wait_time_us, Unit::kUnitless, ImprovementDirection::kNeitherIsBetter, metric_metadata); } log << "Send statistic:\n" - << " packets: " << stats.stats.overall_outgoing_stats.packets_sent - << " bytes: " << stats.stats.overall_outgoing_stats.bytes_sent.bytes() + << " packets: " + << stats.endpoints_stats.overall_outgoing_stats.packets_sent << " bytes: " + << stats.endpoints_stats.overall_outgoing_stats.bytes_sent.bytes() << " avg_rate (bytes/sec): " << average_send_rate.bytes_per_sec() << " avg_rate (bps): " << average_send_rate.bps() << "\n" << "Send statistic per destination:\n"; - for (const auto& entry : stats.stats.outgoing_stats_per_destination) { + for (const auto& entry : + stats.endpoints_stats.outgoing_stats_per_destination) { DataRate source_average_send_rate = entry.second.packets_sent >= 2 ? entry.second.AverageSendRate() : DataRate::Zero(); @@ -379,14 +506,38 @@ void StatsBasedNetworkQualityMetricsReporter::LogNetworkLayerStats( } } + if (!stats.uplink_stats.packet_transport_time.IsEmpty()) { + log << "[Debug stats] packet_transport_time=(" + << stats.uplink_stats.packet_transport_time.GetAverage() << ", " + << stats.uplink_stats.packet_transport_time.GetStandardDeviation() + << ")\n"; + metrics_logger_->LogMetric( + "uplink_packet_transport_time", GetTestCaseName(peer_name), + stats.uplink_stats.packet_transport_time, Unit::kMilliseconds, + ImprovementDirection::kNeitherIsBetter, metric_metadata); + } + if (!stats.uplink_stats.size_to_packet_transport_time.IsEmpty()) { + log << "[Debug stats] size_to_packet_transport_time=(" + << stats.uplink_stats.size_to_packet_transport_time.GetAverage() << ", " + << stats.uplink_stats.size_to_packet_transport_time + .GetStandardDeviation() + << ")\n"; + metrics_logger_->LogMetric( + "uplink_size_to_packet_transport_time", GetTestCaseName(peer_name), + stats.uplink_stats.size_to_packet_transport_time, Unit::kUnitless, + ImprovementDirection::kNeitherIsBetter, metric_metadata); + } + log << "Receive statistic:\n" - << " packets: " << stats.stats.overall_incoming_stats.packets_received - << " bytes: " << stats.stats.overall_incoming_stats.bytes_received.bytes() + << " packets: " + << stats.endpoints_stats.overall_incoming_stats.packets_received + << " bytes: " + << stats.endpoints_stats.overall_incoming_stats.bytes_received.bytes() << " avg_rate (bytes/sec): " << average_receive_rate.bytes_per_sec() << " avg_rate (bps): " << average_receive_rate.bps() << "\n" << "Receive statistic per source:\n"; - for (const auto& entry : stats.stats.incoming_stats_per_source) { + for (const auto& entry : stats.endpoints_stats.incoming_stats_per_source) { DataRate source_average_receive_rate = entry.second.packets_received >= 2 ? entry.second.AverageReceiveRate() : DataRate::Zero(); @@ -411,6 +562,28 @@ void StatsBasedNetworkQualityMetricsReporter::LogNetworkLayerStats( ImprovementDirection::kNeitherIsBetter, metric_metadata); } } + if (!stats.downlink_stats.packet_transport_time.IsEmpty()) { + log << "[Debug stats] packet_transport_time=(" + << stats.downlink_stats.packet_transport_time.GetAverage() << ", " + << stats.downlink_stats.packet_transport_time.GetStandardDeviation() + << ")\n"; + metrics_logger_->LogMetric( + "downlink_packet_transport_time", GetTestCaseName(peer_name), + stats.downlink_stats.packet_transport_time, Unit::kMilliseconds, + ImprovementDirection::kNeitherIsBetter, metric_metadata); + } + if (!stats.downlink_stats.size_to_packet_transport_time.IsEmpty()) { + log << "[Debug stats] size_to_packet_transport_time=(" + << stats.downlink_stats.size_to_packet_transport_time.GetAverage() + << ", " + << stats.downlink_stats.size_to_packet_transport_time + .GetStandardDeviation() + << ")\n"; + metrics_logger_->LogMetric( + "downlink_size_to_packet_transport_time", GetTestCaseName(peer_name), + stats.downlink_stats.size_to_packet_transport_time, Unit::kUnitless, + ImprovementDirection::kNeitherIsBetter, metric_metadata); + } RTC_LOG(LS_INFO) << log.str(); } diff --git a/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.h b/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.h index 8516c40f5c68..60daf40c8cc4 100644 --- a/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.h +++ b/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.h @@ -37,6 +37,14 @@ namespace webrtc_pc_e2e { class StatsBasedNetworkQualityMetricsReporter : public PeerConnectionE2EQualityTestFixture::QualityMetricsReporter { public: + // Emulated network layer stats for single peer. + struct NetworkLayerStats { + EmulatedNetworkStats endpoints_stats; + EmulatedNetworkNodeStats uplink_stats; + EmulatedNetworkNodeStats downlink_stats; + std::set receivers; + }; + // `networks` map peer name to network to report network layer stability stats // and to log network layer metrics. StatsBasedNetworkQualityMetricsReporter( @@ -47,6 +55,10 @@ class StatsBasedNetworkQualityMetricsReporter void AddPeer(absl::string_view peer_name, std::vector endpoints); + void AddPeer(absl::string_view peer_name, + std::vector endpoints, + std::vector uplink, + std::vector downlink); // Network stats must be empty when this method will be invoked. void Start(absl::string_view test_case_name, @@ -71,11 +83,6 @@ class StatsBasedNetworkQualityMetricsReporter int64_t packets_sent = 0; }; - struct NetworkLayerStats { - EmulatedNetworkStats stats; - std::set receivers; - }; - class NetworkLayerStatsCollector { public: NetworkLayerStatsCollector( @@ -85,7 +92,9 @@ class StatsBasedNetworkQualityMetricsReporter void Start(); void AddPeer(absl::string_view peer_name, - std::vector endpoints); + std::vector endpoints, + std::vector uplink, + std::vector downlink); std::map GetStats(); @@ -93,6 +102,10 @@ class StatsBasedNetworkQualityMetricsReporter Mutex mutex_; std::map> peer_endpoints_ RTC_GUARDED_BY(mutex_); + std::map> peer_uplinks_ + RTC_GUARDED_BY(mutex_); + std::map> peer_downlinks_ + RTC_GUARDED_BY(mutex_); std::map ip_to_peer_ RTC_GUARDED_BY(mutex_); NetworkEmulationManager* const network_emulation_; }; diff --git a/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc b/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc new file mode 100644 index 000000000000..be5514948285 --- /dev/null +++ b/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/pc/e2e/stats_based_network_quality_metrics_reporter.h" + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/test/create_network_emulation_manager.h" +#include "api/test/create_peer_connection_quality_test_frame_generator.h" +#include "api/test/metrics/metrics_logger.h" +#include "api/test/metrics/stdout_metrics_exporter.h" +#include "api/test/network_emulation_manager.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" +#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/units/time_delta.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/pc/e2e/metric_metadata_keys.h" +#include "test/pc/e2e/peer_connection_quality_test.h" + +namespace webrtc { +namespace webrtc_pc_e2e { +namespace { + +using ::testing::UnorderedElementsAre; + +using ::webrtc::test::DefaultMetricsLogger; +using ::webrtc::test::ImprovementDirection; +using ::webrtc::test::Metric; +using ::webrtc::test::Unit; +using ::webrtc::webrtc_pc_e2e::PeerConfigurer; + +// Adds a peer with some audio and video (the client should not care about +// details about audio and video configs). +void AddDefaultAudioVideoPeer( + absl::string_view peer_name, + absl::string_view audio_stream_label, + absl::string_view video_stream_label, + const PeerNetworkDependencies& network_dependencies, + PeerConnectionE2EQualityTestFixture& fixture) { + AudioConfig audio{std::string(audio_stream_label)}; + audio.sync_group = std::string(peer_name); + VideoConfig video(std::string(video_stream_label), 320, 180, 15); + video.sync_group = std::string(peer_name); + auto peer = std::make_unique(network_dependencies); + peer->SetName(peer_name); + peer->SetAudioConfig(std::move(audio)); + peer->AddVideoConfig(std::move(video)); + peer->SetVideoCodecs({VideoCodecConfig(cricket::kVp8CodecName)}); + fixture.AddPeer(std::move(peer)); +} + +absl::optional FindMeetricByName(absl::string_view name, + rtc::ArrayView metrics) { + for (const Metric& metric : metrics) { + if (metric.name == name) { + return metric; + } + } + return absl::nullopt; +} + +TEST(StatsBasedNetworkQualityMetricsReporterTest, DebugStatsAreCollected) { + std::unique_ptr network_emulation = + CreateNetworkEmulationManager(TimeMode::kSimulated, + EmulatedNetworkStatsGatheringMode::kDebug); + DefaultMetricsLogger metrics_logger( + network_emulation->time_controller()->GetClock()); + PeerConnectionE2EQualityTest fixture( + "test_case", *network_emulation->time_controller(), + /*audio_quality_analyzer=*/nullptr, /*video_quality_analyzer=*/nullptr, + &metrics_logger); + + EmulatedEndpoint* alice_endpoint = + network_emulation->CreateEndpoint(EmulatedEndpointConfig()); + EmulatedEndpoint* bob_endpoint = + network_emulation->CreateEndpoint(EmulatedEndpointConfig()); + + EmulatedNetworkNode* alice_link = network_emulation->CreateEmulatedNode( + BuiltInNetworkBehaviorConfig{.link_capacity_kbps = 500}); + network_emulation->CreateRoute(alice_endpoint, {alice_link}, bob_endpoint); + EmulatedNetworkNode* bob_link = network_emulation->CreateEmulatedNode( + BuiltInNetworkBehaviorConfig{.link_capacity_kbps = 500}); + network_emulation->CreateRoute(bob_endpoint, {bob_link}, alice_endpoint); + + EmulatedNetworkManagerInterface* alice_network = + network_emulation->CreateEmulatedNetworkManagerInterface( + {alice_endpoint}); + EmulatedNetworkManagerInterface* bob_network = + network_emulation->CreateEmulatedNetworkManagerInterface({bob_endpoint}); + + AddDefaultAudioVideoPeer("alice", "alice_audio", "alice_video", + alice_network->network_dependencies(), fixture); + AddDefaultAudioVideoPeer("bob", "bob_audio", "bob_video", + bob_network->network_dependencies(), fixture); + + auto network_stats_reporter = + std::make_unique( + /*peer_endpoints=*/std::map>{}, + network_emulation.get(), &metrics_logger); + network_stats_reporter->AddPeer("alice", alice_network->endpoints(), + /*uplink=*/{alice_link}, + /*downlink=*/{bob_link}); + network_stats_reporter->AddPeer("bob", bob_network->endpoints(), + /*uplink=*/{bob_link}, + /*downlink=*/{alice_link}); + fixture.AddQualityMetricsReporter(std::move(network_stats_reporter)); + + fixture.Run(RunParams(TimeDelta::Seconds(4))); + + std::vector metrics = metrics_logger.GetCollectedMetrics(); + absl::optional uplink_packet_transport_time = + FindMeetricByName("uplink_packet_transport_time", metrics); + ASSERT_TRUE(uplink_packet_transport_time.has_value()); + ASSERT_FALSE(uplink_packet_transport_time->time_series.samples.empty()); + absl::optional uplink_size_to_packet_transport_time = + FindMeetricByName("uplink_size_to_packet_transport_time", metrics); + ASSERT_TRUE(uplink_size_to_packet_transport_time.has_value()); + ASSERT_FALSE( + uplink_size_to_packet_transport_time->time_series.samples.empty()); + absl::optional downlink_packet_transport_time = + FindMeetricByName("downlink_packet_transport_time", metrics); + ASSERT_TRUE(downlink_packet_transport_time.has_value()); + ASSERT_FALSE(downlink_packet_transport_time->time_series.samples.empty()); + absl::optional downlink_size_to_packet_transport_time = + FindMeetricByName("downlink_size_to_packet_transport_time", metrics); + ASSERT_TRUE(downlink_size_to_packet_transport_time.has_value()); + ASSERT_FALSE( + downlink_size_to_packet_transport_time->time_series.samples.empty()); +} + +} // namespace +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/third_party/libwebrtc/test/peer_scenario/peer_scenario.cc b/third_party/libwebrtc/test/peer_scenario/peer_scenario.cc index ea959c943afa..485e33f67fb1 100644 --- a/third_party/libwebrtc/test/peer_scenario/peer_scenario.cc +++ b/third_party/libwebrtc/test/peer_scenario/peer_scenario.cc @@ -55,7 +55,7 @@ PeerScenario::PeerScenario( std::unique_ptr log_writer_manager, TimeMode mode) : log_writer_manager_(std::move(log_writer_manager)), - net_(mode), + net_(mode, EmulatedNetworkStatsGatheringMode::kDefault), signaling_thread_(net_.time_controller()->GetMainThread()) {} PeerScenarioClient* PeerScenario::CreateClient( diff --git a/third_party/libwebrtc/test/scenario/audio_stream.cc b/third_party/libwebrtc/test/scenario/audio_stream.cc index ea170bc17cb7..3c94d7911f03 100644 --- a/third_party/libwebrtc/test/scenario/audio_stream.cc +++ b/third_party/libwebrtc/test/scenario/audio_stream.cc @@ -93,9 +93,10 @@ SendAudioStream::SendAudioStream( RTC_DCHECK_LE(config.source.channels, 2); send_config.encoder_factory = encoder_factory; - if (config.encoder.fixed_rate) + bool use_fixed_rate = !config.encoder.min_rate && !config.encoder.max_rate; + if (use_fixed_rate) send_config.send_codec_spec->target_bitrate_bps = - config.encoder.fixed_rate->bps(); + config.encoder.fixed_rate.bps(); if (!config.adapt.binary_proto.empty()) { send_config.audio_network_adaptor_config = config.adapt.binary_proto; } else if (config.network_adaptation) { @@ -106,9 +107,9 @@ SendAudioStream::SendAudioStream( config.stream.in_bandwidth_estimation) { DataRate min_rate = DataRate::Infinity(); DataRate max_rate = DataRate::Infinity(); - if (config.encoder.fixed_rate) { - min_rate = *config.encoder.fixed_rate; - max_rate = *config.encoder.fixed_rate; + if (use_fixed_rate) { + min_rate = config.encoder.fixed_rate; + max_rate = config.encoder.fixed_rate; } else { min_rate = *config.encoder.min_rate; max_rate = *config.encoder.max_rate; @@ -129,10 +130,8 @@ SendAudioStream::SendAudioStream( sender_->SendTask([&] { send_stream_ = sender_->call_->CreateAudioSendStream(send_config); - if (field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead")) { - sender->call_->OnAudioTransportOverheadChanged( - sender_->transport_->packet_overhead().bytes()); - } + sender->call_->OnAudioTransportOverheadChanged( + sender_->transport_->packet_overhead().bytes()); }); } diff --git a/third_party/libwebrtc/test/scenario/call_client.cc b/third_party/libwebrtc/test/scenario/call_client.cc index c9babc7b79df..46f593898d9a 100644 --- a/third_party/libwebrtc/test/scenario/call_client.cc +++ b/third_party/libwebrtc/test/scenario/call_client.cc @@ -70,6 +70,7 @@ Call* CreateCall(TimeController* time_controller, call_config.task_queue_factory = time_controller->GetTaskQueueFactory(); call_config.network_controller_factory = network_controller_factory; call_config.audio_state = audio_state; + call_config.pacer_burst_interval = config.pacer_burst_interval; call_config.trials = config.field_trials; Clock* clock = time_controller->GetClock(); return Call::Create(call_config, clock, diff --git a/third_party/libwebrtc/test/scenario/probing_test.cc b/third_party/libwebrtc/test/scenario/probing_test.cc index 74b68fc0445f..86653ced9b9a 100644 --- a/third_party/libwebrtc/test/scenario/probing_test.cc +++ b/third_party/libwebrtc/test/scenario/probing_test.cc @@ -39,7 +39,7 @@ TEST(ProbingTest, MidCallProbingRampupTriggeredByUpdatedBitrateConstraints) { const DataRate kStartRate = DataRate::KilobitsPerSec(300); const DataRate kConstrainedRate = DataRate::KilobitsPerSec(100); - const DataRate kHighRate = DataRate::KilobitsPerSec(2500); + const DataRate kHighRate = DataRate::KilobitsPerSec(1500); VideoStreamConfig video_config; video_config.encoder.codec = diff --git a/third_party/libwebrtc/test/scenario/scenario.cc b/third_party/libwebrtc/test/scenario/scenario.cc index 4f0fb3159bd3..795276ee06bd 100644 --- a/third_party/libwebrtc/test/scenario/scenario.cc +++ b/third_party/libwebrtc/test/scenario/scenario.cc @@ -65,7 +65,8 @@ Scenario::Scenario( std::unique_ptr log_writer_factory, bool real_time) : log_writer_factory_(std::move(log_writer_factory)), - network_manager_(real_time ? TimeMode::kRealTime : TimeMode::kSimulated), + network_manager_(real_time ? TimeMode::kRealTime : TimeMode::kSimulated, + EmulatedNetworkStatsGatheringMode::kDefault), clock_(network_manager_.time_controller()->GetClock()), audio_decoder_factory_(CreateBuiltinAudioDecoderFactory()), audio_encoder_factory_(CreateBuiltinAudioEncoderFactory()), diff --git a/third_party/libwebrtc/test/scenario/scenario_config.h b/third_party/libwebrtc/test/scenario/scenario_config.h index be0d0b3589cc..9ce99401d7ad 100644 --- a/third_party/libwebrtc/test/scenario/scenario_config.h +++ b/third_party/libwebrtc/test/scenario/scenario_config.h @@ -53,6 +53,10 @@ struct TransportControllerConfig { struct CallClientConfig { TransportControllerConfig transport; + // Allows the pacer to send out multiple packets in a burst. + // The number of bites that can be sent in one burst is pacer_burst_interval * + // current bwe. 40ms is the default Chrome setting. + TimeDelta pacer_burst_interval = TimeDelta::Millis(40); const FieldTrialsView* field_trials = nullptr; }; @@ -194,7 +198,8 @@ struct AudioStreamConfig { ~Encoder(); bool allocate_bitrate = false; bool enable_dtx = false; - absl::optional fixed_rate; + DataRate fixed_rate = DataRate::KilobitsPerSec(32); + // Overrides fixed rate. absl::optional min_rate; absl::optional max_rate; TimeDelta initial_frame_length = TimeDelta::Millis(20); @@ -203,8 +208,8 @@ struct AudioStreamConfig { Stream(); Stream(const Stream&); ~Stream(); - bool abs_send_time = false; - bool in_bandwidth_estimation = false; + bool abs_send_time = true; + bool in_bandwidth_estimation = true; } stream; struct Rendering { std::string sync_group; diff --git a/third_party/libwebrtc/test/scenario/stats_collection_unittest.cc b/third_party/libwebrtc/test/scenario/stats_collection_unittest.cc index 3db1100a2af3..9f46f1007340 100644 --- a/third_party/libwebrtc/test/scenario/stats_collection_unittest.cc +++ b/third_party/libwebrtc/test/scenario/stats_collection_unittest.cc @@ -91,7 +91,7 @@ TEST(ScenarioAnalyzerTest, PsnrIsLowWhenNetworkIsBad) { // might change due to changes in configuration and encoder etc. EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 20, 10); EXPECT_NEAR(stats.call.stats().target_rate.Mean().kbps(), 75, 50); - EXPECT_NEAR(stats.video_send.stats().media_bitrate.Mean().kbps(), 100, 50); + EXPECT_NEAR(stats.video_send.stats().media_bitrate.Mean().kbps(), 70, 30); EXPECT_NEAR(stats.video_receive.stats().resolution.Mean(), 180, 10); EXPECT_NEAR(stats.audio_receive.stats().jitter_buffer.Mean().ms(), 250, 200); } diff --git a/third_party/libwebrtc/test/scenario/video_stream.cc b/third_party/libwebrtc/test/scenario/video_stream.cc index ad352f9ab91f..96ced83b0474 100644 --- a/third_party/libwebrtc/test/scenario/video_stream.cc +++ b/third_party/libwebrtc/test/scenario/video_stream.cc @@ -480,7 +480,7 @@ void SendVideoStream::UpdateActiveLayers(std::vector active_layers) { MutexLock lock(&mutex_); if (config_.encoder.codec == VideoStreamConfig::Encoder::Codec::kVideoCodecVP8) { - send_stream_->UpdateActiveSimulcastLayers(active_layers); + send_stream_->StartPerRtpStream(active_layers); } VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(config_); RTC_CHECK_EQ(encoder_config.simulcast_layers.size(), active_layers.size()); diff --git a/third_party/libwebrtc/test/test_flags.cc b/third_party/libwebrtc/test/test_flags.cc index a0becc2ab710..a0fff747fe3e 100644 --- a/third_party/libwebrtc/test/test_flags.cc +++ b/third_party/libwebrtc/test/test_flags.cc @@ -42,4 +42,10 @@ ABSL_FLAG(std::string, "", "Path where the test perf metrics should be stored using " "api/test/metrics/metric.proto proto format. File will contain " - "MetricsSet as a root proto"); + "MetricsSet as a root proto. On iOS, this MUST be a file name " + "and the file will be stored under NSDocumentDirectory."); + +ABSL_FLAG(bool, + export_perf_results_new_api, + false, + "Tells to initialize new API for exporting performance metrics"); diff --git a/third_party/libwebrtc/test/test_flags.h b/third_party/libwebrtc/test/test_flags.h index 6ca30b22f037..30f918fc7d39 100644 --- a/third_party/libwebrtc/test/test_flags.h +++ b/third_party/libwebrtc/test/test_flags.h @@ -19,5 +19,6 @@ ABSL_DECLARE_FLAG(std::string, force_fieldtrials); ABSL_DECLARE_FLAG(std::vector, plot); ABSL_DECLARE_FLAG(std::string, isolated_script_test_perf_output); ABSL_DECLARE_FLAG(std::string, webrtc_test_metrics_output_path); +ABSL_DECLARE_FLAG(bool, export_perf_results_new_api); #endif // TEST_TEST_FLAGS_H_ diff --git a/third_party/libwebrtc/test/test_main.cc b/third_party/libwebrtc/test/test_main.cc index 27c3d8c134cb..d811fd0e6d48 100644 --- a/third_party/libwebrtc/test/test_main.cc +++ b/third_party/libwebrtc/test/test_main.cc @@ -9,6 +9,9 @@ */ #include +#include +#include +#include #include "absl/debugging/failure_signal_handler.h" #include "absl/debugging/symbolize.h" @@ -16,11 +19,44 @@ #include "test/gmock.h" #include "test/test_main_lib.h" +namespace { + +std::vector ReplaceDashesWithUnderscores(int argc, char* argv[]) { + std::vector args(argv, argv + argc); + for (std::string& arg : args) { + // Only replace arguments that starts with a dash. + if (!arg.empty() && arg[0] == '-') { + // Don't replace the 2 first characters. + auto begin = arg.begin() + 2; + // Replace dashes on the left of '=' or on all the arg if no '=' is found. + auto end = std::find(arg.begin(), arg.end(), '='); + std::replace(begin, end, '-', '_'); + } + } + return args; +} + +std::vector VectorOfStringsToVectorOfPointers( + std::vector& input) { + std::vector output(input.size()); + for (size_t i = 0; i < input.size(); ++i) { + output[i] = &(input[i][0]); + } + return output; +} + +} // namespace + int main(int argc, char* argv[]) { // Initialize the symbolizer to get a human-readable stack trace absl::InitializeSymbolizer(argv[0]); testing::InitGoogleMock(&argc, argv); - absl::ParseCommandLine(argc, argv); + // Before parsing the arguments with the absl flag library, any internal '-' + // characters will be converted to '_' characters to make sure the string is a + // valid attribute name. + std::vector new_argv = ReplaceDashesWithUnderscores(argc, argv); + std::vector raw_new_argv = VectorOfStringsToVectorOfPointers(new_argv); + absl::ParseCommandLine(argc, &raw_new_argv[0]); // This absl handler use unsupported features/instructions on Fuchsia #if !defined(WEBRTC_FUCHSIA) diff --git a/third_party/libwebrtc/test/test_main_lib.cc b/third_party/libwebrtc/test/test_main_lib.cc index 9968adf32e43..4c80315ac5e2 100644 --- a/third_party/libwebrtc/test/test_main_lib.cc +++ b/third_party/libwebrtc/test/test_main_lib.cc @@ -22,6 +22,7 @@ #include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metrics_exporter.h" +#include "api/test/metrics/metrics_set_proto_file_exporter.h" #include "api/test/metrics/print_result_proxy_metrics_exporter.h" #include "api/test/metrics/stdout_metrics_exporter.h" #include "rtc_base/checks.h" @@ -48,11 +49,19 @@ ABSL_FLAG(std::string, NSTreatUnknownArgumentsAsOpen, "", - "Intentionally ignored flag intended for iOS simulator."); + "Intentionally ignored flag intended for iOS test runner."); ABSL_FLAG(std::string, ApplePersistenceIgnoreState, "", - "Intentionally ignored flag intended for iOS simulator."); + "Intentionally ignored flag intended for iOS test runner."); +ABSL_FLAG(bool, + enable_run_ios_unittests_with_xctest, + false, + "Intentionally ignored flag intended for iOS test runner."); +ABSL_FLAG(bool, + write_compiled_tests_json_to_writable_path, + false, + "Intentionally ignored flag intended for iOS test runner."); // This is the cousin of isolated_script_test_perf_output, but we can't dictate // where to write on iOS so the semantics of this flag are a bit different. @@ -64,6 +73,12 @@ ABSL_FLAG( "described by histogram.proto in " "https://chromium.googlesource.com/catapult/."); +#elif defined(WEBRTC_FUCHSIA) +ABSL_FLAG(std::string, use_vulkan, "", "Intentionally ignored flag."); +#else +// TODO(bugs.webrtc.org/8115): Remove workaround when fixed. +ABSL_FLAG(bool, no_sandbox, false, "Intentionally ignored flag."); +ABSL_FLAG(bool, test_launcher_bot_mode, false, "Intentionally ignored flag."); #endif ABSL_FLAG(std::string, @@ -71,11 +86,6 @@ ABSL_FLAG(std::string, "", "Path to output an empty JSON file which Chromium infra requires."); -ABSL_FLAG(bool, - export_perf_results_new_api, - false, - "Tells to initialize new API for exporting performance metrics"); - ABSL_FLAG(bool, logs, true, "print logs to stderr"); ABSL_FLAG(bool, verbose, false, "verbose logs to stderr"); @@ -85,6 +95,17 @@ ABSL_FLAG(std::string, "Path to collect trace events (json file) for chrome://tracing. " "If not set, events aren't captured."); +ABSL_FLAG(std::string, + test_launcher_shard_index, + "", + "Index of the test shard to run, from 0 to " + "the value specified with --test_launcher_total_shards."); + +ABSL_FLAG(std::string, + test_launcher_total_shards, + "", + "Total number of shards."); + namespace webrtc { namespace { @@ -112,6 +133,19 @@ class TestMainImpl : public TestMain { rtc::LogMessage::SetLogToStderr(absl::GetFlag(FLAGS_logs) || absl::GetFlag(FLAGS_verbose)); + // The sharding arguments take precedence over the sharding environment + // variables. + if (!absl::GetFlag(FLAGS_test_launcher_shard_index).empty() && + !absl::GetFlag(FLAGS_test_launcher_total_shards).empty()) { + std::string shard_index = + "GTEST_SHARD_INDEX=" + absl::GetFlag(FLAGS_test_launcher_shard_index); + std::string total_shards = + "GTEST_TOTAL_SHARDS=" + + absl::GetFlag(FLAGS_test_launcher_total_shards); + putenv(shard_index.data()); + putenv(total_shards.data()); + } + // InitFieldTrialsFromString stores the char*, so the char array must // outlive the application. field_trials_ = absl::GetFlag(FLAGS_force_fieldtrials); @@ -150,10 +184,11 @@ class TestMainImpl : public TestMain { } #if defined(WEBRTC_IOS) - rtc::test::InitTestSuite(RUN_ALL_TESTS, argc, argv, - absl::GetFlag(FLAGS_write_perf_output_on_ios), - absl::GetFlag(FLAGS_export_perf_results_new_api), - metrics_to_plot); + rtc::test::InitTestSuite( + RUN_ALL_TESTS, argc, argv, + absl::GetFlag(FLAGS_write_perf_output_on_ios), + absl::GetFlag(FLAGS_export_perf_results_new_api), + absl::GetFlag(FLAGS_webrtc_test_metrics_output_path), metrics_to_plot); rtc::test::RunTestsFromIOSApp(); int exit_code = 0; #else @@ -162,6 +197,12 @@ class TestMainImpl : public TestMain { std::vector> exporters; if (absl::GetFlag(FLAGS_export_perf_results_new_api)) { exporters.push_back(std::make_unique()); + if (!absl::GetFlag(FLAGS_webrtc_test_metrics_output_path).empty()) { + exporters.push_back( + std::make_unique( + webrtc::test::MetricsSetProtoFileExporter::Options( + absl::GetFlag(FLAGS_webrtc_test_metrics_output_path)))); + } if (!absl::GetFlag(FLAGS_isolated_script_test_perf_output).empty()) { exporters.push_back( std::make_unique( diff --git a/third_party/libwebrtc/test/testsupport/file_utils_override.cc b/third_party/libwebrtc/test/testsupport/file_utils_override.cc index ac44e91b8a2b..7d0a3e33123a 100644 --- a/third_party/libwebrtc/test/testsupport/file_utils_override.cc +++ b/third_party/libwebrtc/test/testsupport/file_utils_override.cc @@ -69,7 +69,10 @@ const absl::string_view kPathDelimiter = "/"; const absl::string_view kAndroidChromiumTestsRoot = "/sdcard/chromium_tests_root/"; #endif - +#if defined(WEBRTC_FUCHSIA) +const absl::string_view kFuchsiaTestRoot = "/pkg/"; +const absl::string_view kFuchsiaTempWritableDir = "/tmp/"; +#endif #if !defined(WEBRTC_IOS) const absl::string_view kResourcesDirName = "resources"; #endif @@ -91,6 +94,11 @@ absl::optional ProjectRootPath() { // the test is bundled (which our tests are not), in which case it's 5 levels. return DirName(DirName(exe_dir)) + std::string(kPathDelimiter); #elif defined(WEBRTC_POSIX) +// Fuchsia uses POSIX defines as well but does not have full POSIX +// functionality. +#if defined(WEBRTC_FUCHSIA) + return std::string(kFuchsiaTestRoot); +#else char buf[PATH_MAX]; ssize_t count = ::readlink("/proc/self/exe", buf, arraysize(buf)); if (count <= 0) { @@ -100,6 +108,7 @@ absl::optional ProjectRootPath() { // On POSIX, tests execute in out/Whatever, so src is two levels up. std::string exe_dir = DirName(absl::string_view(buf, count)); return DirName(DirName(exe_dir)) + std::string(kPathDelimiter); +#endif #elif defined(WEBRTC_WIN) wchar_t buf[MAX_PATH]; buf[0] = 0; @@ -117,6 +126,8 @@ std::string OutputPath() { return IOSOutputPath(); #elif defined(WEBRTC_ANDROID) return std::string(kAndroidChromiumTestsRoot); +#elif defined(WEBRTC_FUCHSIA) + return std::string(kFuchsiaTempWritableDir); #else absl::optional path_opt = ProjectRootPath(); RTC_DCHECK(path_opt); diff --git a/third_party/libwebrtc/test/testsupport/frame_reader.h b/third_party/libwebrtc/test/testsupport/frame_reader.h index d2a3b4b06499..7856476ca073 100644 --- a/third_party/libwebrtc/test/testsupport/frame_reader.h +++ b/third_party/libwebrtc/test/testsupport/frame_reader.h @@ -17,6 +17,7 @@ #include "absl/types/optional.h" #include "api/scoped_refptr.h" +#include "api/video/resolution.h" namespace webrtc { class I420Buffer; @@ -25,94 +26,123 @@ namespace test { // Handles reading of I420 frames from video files. class FrameReader { public: + struct Ratio { + int num = 1; + int den = 1; + }; + + static constexpr Ratio kNoScale = Ratio({.num = 1, .den = 1}); + virtual ~FrameReader() {} - // Initializes the frame reader, i.e. opens the input file. - // This must be called before reading of frames has started. - // Returns false if an error has occurred, in addition to printing to stderr. - virtual bool Init() = 0; + // Reads and returns next frame. Returns `nullptr` if reading failed or end of + // stream is reached. + virtual rtc::scoped_refptr PullFrame() = 0; - // Reads a frame from the input file. On success, returns the frame. - // Returns nullptr if encountering end of file or a read error. - virtual rtc::scoped_refptr ReadFrame() = 0; + // Reads and returns next frame. `frame_num` stores unwrapped frame number + // which can be passed to `ReadFrame` to re-read this frame later. Returns + // `nullptr` if reading failed or end of stream is reached. + virtual rtc::scoped_refptr PullFrame(int* frame_num) = 0; - // Closes the input file if open. Essentially makes this class impossible - // to use anymore. Will also be invoked by the destructor. - virtual void Close() = 0; + // Reads and returns frame specified by `frame_num`. Returns `nullptr` if + // reading failed. + virtual rtc::scoped_refptr ReadFrame(int frame_num) = 0; - // Frame length in bytes of a single frame image. - virtual size_t FrameLength() = 0; - // Total number of frames in the input video source. - virtual int NumberOfFrames() = 0; + // Reads next frame, resizes and returns it. `frame_num` stores unwrapped + // frame number which can be passed to `ReadFrame` to re-read this frame + // later. `resolution` specifies resolution of the returned frame. + // `framerate_scale` specifies frame rate scale factor. Frame rate scaling is + // done by skipping or repeating frames. + virtual rtc::scoped_refptr PullFrame(int* frame_num, + Resolution resolution, + Ratio framerate_scale) = 0; + + // Reads frame specified by `frame_num`, resizes and returns it. Returns + // `nullptr` if reading failed. + virtual rtc::scoped_refptr ReadFrame(int frame_num, + Resolution resolution) = 0; + + // Total number of retrievable frames. + virtual int num_frames() const = 0; }; class YuvFrameReaderImpl : public FrameReader { public: enum class RepeatMode { kSingle, kRepeat, kPingPong }; - class DropperUtil { - public: - DropperUtil(int source_fps, int target_fps); - enum class DropDecision { kDropframe, kKeepFrame }; - DropDecision UpdateLevel(); + // Creates the frame reader for a YUV file specified by `filepath`. + // `resolution` specifies width and height of frames in pixels. `repeat_mode` + // specifies behaviour of the reader at reaching the end of file (stop, read + // it over from the beginning or read in reverse order). The file is assumed + // to exist, be readable and to contain at least 1 frame. + YuvFrameReaderImpl(std::string filepath, + Resolution resolution, + RepeatMode repeat_mode); - private: - const double frame_size_buckets_; - double bucket_level_; - }; - - // Creates a file handler. The input file is assumed to exist and be readable. - // Parameters: - // input_filename The file to read from. - // width, height Size of each frame to read. - YuvFrameReaderImpl(std::string input_filename, int width, int height); - YuvFrameReaderImpl(std::string input_filename, - int input_width, - int input_height, - int desired_width, - int desired_height, - RepeatMode repeat_mode, - absl::optional clip_fps, - int target_fps); ~YuvFrameReaderImpl() override; - bool Init() override; - rtc::scoped_refptr ReadFrame() override; - void Close() override; - size_t FrameLength() override; - int NumberOfFrames() override; + + virtual void Init(); + + rtc::scoped_refptr PullFrame() override; + + rtc::scoped_refptr PullFrame(int* frame_num) override; + + rtc::scoped_refptr PullFrame(int* frame_num, + Resolution resolution, + Ratio framerate_scale) override; + + rtc::scoped_refptr ReadFrame(int frame_num) override; + + rtc::scoped_refptr ReadFrame(int frame_num, + Resolution resolution) override; + + int num_frames() const override { return num_frames_; } protected: - const std::string input_filename_; - // It is not const, so subclasses will be able to add frame header size. - size_t frame_length_in_bytes_; - const int input_width_; - const int input_height_; - const int desired_width_; - const int desired_height_; - const size_t frame_size_bytes_; + class RateScaler { + public: + int Skip(Ratio framerate_scale); + + private: + absl::optional ticks_; + }; + + const std::string filepath_; + Resolution resolution_; const RepeatMode repeat_mode_; - int number_of_frames_; - int current_frame_index_; - std::unique_ptr dropper_; - FILE* input_file_; + int num_frames_; + int frame_num_; + int frame_size_bytes_; + int header_size_bytes_; + FILE* file_; + RateScaler framerate_scaler_; }; class Y4mFrameReaderImpl : public YuvFrameReaderImpl { public: - // Creates a file handler. The input file is assumed to exist and be readable. - // Parameters: - // input_filename The file to read from. - // width, height Size of each frame to read. - Y4mFrameReaderImpl(std::string input_filename, int width, int height); - ~Y4mFrameReaderImpl() override; - bool Init() override; - rtc::scoped_refptr ReadFrame() override; + // Creates the frame reader for a Y4M file specified by `filepath`. + // `repeat_mode` specifies behaviour of the reader at reaching the end of file + // (stop, read it over from the beginning or read in reverse order). The file + // is assumed to exist, be readable and to contain at least 1 frame. + Y4mFrameReaderImpl(std::string filepath, RepeatMode repeat_mode); - private: - // Buffer that is used to read file and frame headers. - char* buffer_; + void Init() override; }; +std::unique_ptr CreateYuvFrameReader(std::string filepath, + Resolution resolution); + +std::unique_ptr CreateYuvFrameReader( + std::string filepath, + Resolution resolution, + YuvFrameReaderImpl::RepeatMode repeat_mode); + +std::unique_ptr CreateY4mFrameReader(std::string filepath); + +std::unique_ptr CreateY4mFrameReader( + std::string filepath, + YuvFrameReaderImpl::RepeatMode repeat_mode); + } // namespace test } // namespace webrtc diff --git a/third_party/libwebrtc/test/testsupport/mock/mock_frame_reader.h b/third_party/libwebrtc/test/testsupport/mock/mock_frame_reader.h index dbb246cfc853..f68bbf83682a 100644 --- a/third_party/libwebrtc/test/testsupport/mock/mock_frame_reader.h +++ b/third_party/libwebrtc/test/testsupport/mock/mock_frame_reader.h @@ -20,11 +20,18 @@ namespace test { class MockFrameReader : public FrameReader { public: - MOCK_METHOD(bool, Init, (), (override)); - MOCK_METHOD(rtc::scoped_refptr, ReadFrame, (), (override)); - MOCK_METHOD(void, Close, (), (override)); - MOCK_METHOD(size_t, FrameLength, (), (override)); - MOCK_METHOD(int, NumberOfFrames, (), (override)); + MOCK_METHOD(rtc::scoped_refptr, PullFrame, (), (override)); + MOCK_METHOD(rtc::scoped_refptr, PullFrame, (int*), (override)); + MOCK_METHOD(rtc::scoped_refptr, + PullFrame, + (int*, Resolution, Ratio), + (override)); + MOCK_METHOD(rtc::scoped_refptr, ReadFrame, (int), (override)); + MOCK_METHOD(rtc::scoped_refptr, + ReadFrame, + (int, Resolution), + (override)); + MOCK_METHOD(int, num_frames, (), (const override)); }; } // namespace test diff --git a/third_party/libwebrtc/test/testsupport/video_frame_writer_unittest.cc b/third_party/libwebrtc/test/testsupport/video_frame_writer_unittest.cc index 57e2fbf04804..9d59627c0fb9 100644 --- a/third_party/libwebrtc/test/testsupport/video_frame_writer_unittest.cc +++ b/third_party/libwebrtc/test/testsupport/video_frame_writer_unittest.cc @@ -140,13 +140,10 @@ TEST_F(Y4mVideoFrameWriterTest, WriteFrame) { GetFileSize(temp_filename_)); std::unique_ptr frame_reader = - std::make_unique(temp_filename_, kFrameWidth, - kFrameHeight); - ASSERT_TRUE(frame_reader->Init()); - AssertI420BuffersEq(frame_reader->ReadFrame(), expected_buffer); - AssertI420BuffersEq(frame_reader->ReadFrame(), expected_buffer); - EXPECT_FALSE(frame_reader->ReadFrame()); // End of file. - frame_reader->Close(); + CreateY4mFrameReader(temp_filename_); + AssertI420BuffersEq(frame_reader->PullFrame(), expected_buffer); + AssertI420BuffersEq(frame_reader->PullFrame(), expected_buffer); + EXPECT_FALSE(frame_reader->PullFrame()); // End of file. } TEST_F(YuvVideoFrameWriterTest, InitSuccess) {} @@ -164,14 +161,12 @@ TEST_F(YuvVideoFrameWriterTest, WriteFrame) { frame_writer_->Close(); EXPECT_EQ(2 * kFrameLength, GetFileSize(temp_filename_)); - std::unique_ptr frame_reader = - std::make_unique(temp_filename_, kFrameWidth, - kFrameHeight); - ASSERT_TRUE(frame_reader->Init()); - AssertI420BuffersEq(frame_reader->ReadFrame(), expected_buffer); - AssertI420BuffersEq(frame_reader->ReadFrame(), expected_buffer); - EXPECT_FALSE(frame_reader->ReadFrame()); // End of file. - frame_reader->Close(); + std::unique_ptr frame_reader = CreateYuvFrameReader( + temp_filename_, + Resolution({.width = kFrameWidth, .height = kFrameHeight})); + AssertI420BuffersEq(frame_reader->PullFrame(), expected_buffer); + AssertI420BuffersEq(frame_reader->PullFrame(), expected_buffer); + EXPECT_FALSE(frame_reader->PullFrame()); // End of file. } } // namespace test diff --git a/third_party/libwebrtc/test/testsupport/y4m_frame_reader.cc b/third_party/libwebrtc/test/testsupport/y4m_frame_reader.cc index 0faa024141f5..72fb9b5188ec 100644 --- a/third_party/libwebrtc/test/testsupport/y4m_frame_reader.cc +++ b/third_party/libwebrtc/test/testsupport/y4m_frame_reader.cc @@ -14,6 +14,7 @@ #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" #include "test/testsupport/file_utils.h" @@ -22,87 +23,69 @@ namespace webrtc { namespace test { namespace { - -// Size of header: "YUV4MPEG2 WXXXXXX HXXXXXX FXXX:1 C420\n" -// We allocate up to 6 digits for width and height and up to 3 digits for FPS. -const size_t kFileHeaderMaxSize = 38; -// Size of header: "YUV4MPEG2 WX HX FX:1 C420\n" -const size_t kFileHeaderMinSize = 26; -// Size of header: "FRAME\n" -const size_t kFrameHeaderSize = 6; - -std::string GetExpectedHeaderPrefix(int width, int height) { - rtc::StringBuilder out; - out << "YUV4MPEG2 W" << width << " H" << height << " F"; - return out.str(); -} - +constexpr int kFrameHeaderSize = 6; // "FRAME\n" } // namespace -Y4mFrameReaderImpl::Y4mFrameReaderImpl(std::string input_filename, - int width, - int height) - : YuvFrameReaderImpl(input_filename, width, height) { - frame_length_in_bytes_ += kFrameHeaderSize; - buffer_ = new char[kFileHeaderMaxSize]; -} -Y4mFrameReaderImpl::~Y4mFrameReaderImpl() { - delete[] buffer_; +void ParseY4mHeader(std::string filepath, + Resolution* resolution, + int* header_size) { + FILE* file = fopen(filepath.c_str(), "r"); + RTC_CHECK(file != NULL) << "Cannot open " << filepath; + + // Length of Y4M header is technically unlimited due to the comment tag 'X'. + char h[1024]; + RTC_CHECK(fgets(h, sizeof(h), file) != NULL) + << "File " << filepath << " is too small"; + fclose(file); + + RTC_CHECK(sscanf(h, "YUV4MPEG2 W%d H%d", &resolution->width, + &resolution->height) == 2) + << filepath << " is not a valid Y4M file"; + + RTC_CHECK_GT(resolution->width, 0) << "Width must be positive"; + RTC_CHECK_GT(resolution->height, 0) << "Height must be positive"; + + *header_size = strcspn(h, "\n") + 1; + RTC_CHECK(static_cast(*header_size) < sizeof(h)) + << filepath << " has unexpectedly large header"; } -bool Y4mFrameReaderImpl::Init() { - if (input_width_ <= 0 || input_height_ <= 0) { - RTC_LOG(LS_ERROR) << "Frame width and height must be positive. Was: " - << input_width_ << "x" << input_height_; - return false; - } - input_file_ = fopen(input_filename_.c_str(), "rb"); - if (input_file_ == nullptr) { - RTC_LOG(LS_ERROR) << "Couldn't open input file: " << input_filename_; - return false; - } - size_t source_file_size = GetFileSize(input_filename_); - if (source_file_size <= 0u) { - RTC_LOG(LS_ERROR) << "Input file " << input_filename_ << " is empty."; - return false; - } - char* c_file_header = fgets(buffer_, kFileHeaderMaxSize, input_file_); - std::string file_header(c_file_header); - if (file_header.size() < kFileHeaderMinSize) { - RTC_LOG(LS_ERROR) << "Couldn't read Y4M header from input file: " - << input_filename_; - return false; - } - if (file_header.find(GetExpectedHeaderPrefix(input_width_, input_height_)) != - 0) { - RTC_LOG(LS_ERROR) << "Couldn't read Y4M file: " << input_filename_ - << ". Input file has different resolution, expected: " - << GetExpectedHeaderPrefix(input_width_, input_height_) - << "[0-9]?:1 C420; got: " << file_header; - return false; - } +Y4mFrameReaderImpl::Y4mFrameReaderImpl(std::string filepath, + RepeatMode repeat_mode) + : YuvFrameReaderImpl(filepath, Resolution(), repeat_mode) {} - number_of_frames_ = static_cast((source_file_size - file_header.size()) / - frame_length_in_bytes_); +void Y4mFrameReaderImpl::Init() { + file_ = fopen(filepath_.c_str(), "rb"); + RTC_CHECK(file_ != nullptr) << "Cannot open " << filepath_; - if (number_of_frames_ == 0) { - RTC_LOG(LS_ERROR) << "Input file " << input_filename_ << " is too small."; - } - return true; + ParseY4mHeader(filepath_, &resolution_, &header_size_bytes_); + frame_size_bytes_ = + CalcBufferSize(VideoType::kI420, resolution_.width, resolution_.height); + frame_size_bytes_ += kFrameHeaderSize; + + size_t file_size_bytes = GetFileSize(filepath_); + RTC_CHECK_GT(file_size_bytes, 0u) << "File " << filepath_ << " is empty"; + RTC_CHECK_GT(file_size_bytes, header_size_bytes_) + << "File " << filepath_ << " is too small"; + + num_frames_ = static_cast((file_size_bytes - header_size_bytes_) / + frame_size_bytes_); + RTC_CHECK_GT(num_frames_, 0u) << "File " << filepath_ << " is too small"; + header_size_bytes_ += kFrameHeaderSize; } -rtc::scoped_refptr Y4mFrameReaderImpl::ReadFrame() { - if (input_file_ == nullptr) { - RTC_LOG(LS_ERROR) << "Y4mFrameReaderImpl is not initialized."; - return nullptr; - } - if (fread(buffer_, 1, kFrameHeaderSize, input_file_) < kFrameHeaderSize && - ferror(input_file_)) { - RTC_LOG(LS_ERROR) << "Couldn't read frame header from input file: " - << input_filename_; - return nullptr; - } - return YuvFrameReaderImpl::ReadFrame(); +std::unique_ptr CreateY4mFrameReader(std::string filepath) { + return CreateY4mFrameReader(filepath, + YuvFrameReaderImpl::RepeatMode::kSingle); +} + +std::unique_ptr CreateY4mFrameReader( + std::string filepath, + YuvFrameReaderImpl::RepeatMode repeat_mode) { + Y4mFrameReaderImpl* frame_reader = + new Y4mFrameReaderImpl(filepath, repeat_mode); + frame_reader->Init(); + return std::unique_ptr(frame_reader); } } // namespace test diff --git a/third_party/libwebrtc/test/testsupport/y4m_frame_reader_unittest.cc b/third_party/libwebrtc/test/testsupport/y4m_frame_reader_unittest.cc index 219ec1bbe31d..df81a8135b71 100644 --- a/third_party/libwebrtc/test/testsupport/y4m_frame_reader_unittest.cc +++ b/third_party/libwebrtc/test/testsupport/y4m_frame_reader_unittest.cc @@ -25,15 +25,14 @@ namespace webrtc { namespace test { namespace { +using Ratio = FrameReader::Ratio; +using RepeatMode = YuvFrameReaderImpl::RepeatMode; -const absl::string_view kFileHeader = "YUV4MPEG2 W2 H2 F30:1 C420\n"; -const absl::string_view kFrameHeader = "FRAME\n"; -const absl::string_view kInputVideoContents = "abcdef"; - -const size_t kFrameWidth = 2; -const size_t kFrameHeight = 2; -const size_t kFrameLength = 3 * kFrameWidth * kFrameHeight / 2; // I420. - +constexpr Resolution kResolution({.width = 1, .height = 1}); +constexpr char kFileHeader[] = "YUV4MPEG2 W1 H1 F30:1 C420\n"; +constexpr char kFrameHeader[] = "FRAME\n"; +constexpr char kFrameContent[3][3] = {{0, 1, 2}, {1, 2, 3}, {2, 3, 4}}; +constexpr int kNumFrames = sizeof(kFrameContent) / sizeof(kFrameContent[0]); } // namespace class Y4mFrameReaderTest : public ::testing::Test { @@ -42,63 +41,118 @@ class Y4mFrameReaderTest : public ::testing::Test { ~Y4mFrameReaderTest() override = default; void SetUp() override { - temp_filename_ = webrtc::test::TempFilename(webrtc::test::OutputPath(), - "y4m_frame_reader_unittest"); - FILE* dummy = fopen(temp_filename_.c_str(), "wb"); - fprintf(dummy, "%s", - (std::string(kFileHeader) + std::string(kFrameHeader) + - std::string(kInputVideoContents)) - .c_str()); - fclose(dummy); + filepath_ = webrtc::test::TempFilename(webrtc::test::OutputPath(), + "y4m_frame_reader_unittest"); + FILE* file = fopen(filepath_.c_str(), "wb"); + fwrite(kFileHeader, 1, sizeof(kFileHeader) - 1, file); + for (int n = 0; n < kNumFrames; ++n) { + fwrite(kFrameHeader, 1, sizeof(kFrameHeader) - 1, file); + fwrite(kFrameContent[n], 1, sizeof(kFrameContent[n]), file); + } + fclose(file); - frame_reader_.reset( - new Y4mFrameReaderImpl(temp_filename_, kFrameWidth, kFrameHeight)); - ASSERT_TRUE(frame_reader_->Init()); + reader_ = CreateY4mFrameReader(filepath_); } - void TearDown() override { remove(temp_filename_.c_str()); } + void TearDown() override { remove(filepath_.c_str()); } - std::unique_ptr frame_reader_; - std::string temp_filename_; + std::string filepath_; + std::unique_ptr reader_; }; -TEST_F(Y4mFrameReaderTest, InitSuccess) {} - -TEST_F(Y4mFrameReaderTest, FrameLength) { - EXPECT_EQ(kFrameHeader.size() + kFrameLength, frame_reader_->FrameLength()); +TEST_F(Y4mFrameReaderTest, num_frames) { + EXPECT_EQ(kNumFrames, reader_->num_frames()); } -TEST_F(Y4mFrameReaderTest, NumberOfFrames) { - EXPECT_EQ(1, frame_reader_->NumberOfFrames()); +TEST_F(Y4mFrameReaderTest, PullFrame_frameResolution) { + rtc::scoped_refptr buffer = reader_->PullFrame(); + EXPECT_EQ(kResolution.width, buffer->width()); + EXPECT_EQ(kResolution.height, buffer->height()); } -TEST_F(Y4mFrameReaderTest, ReadFrame) { - rtc::scoped_refptr buffer = frame_reader_->ReadFrame(); - ASSERT_TRUE(buffer); - // Expect I420 packed as YUV. - EXPECT_EQ(kInputVideoContents[0], buffer->DataY()[0]); - EXPECT_EQ(kInputVideoContents[1], buffer->DataY()[1]); - EXPECT_EQ(kInputVideoContents[2], buffer->DataY()[2]); - EXPECT_EQ(kInputVideoContents[3], buffer->DataY()[3]); - EXPECT_EQ(kInputVideoContents[4], buffer->DataU()[0]); - EXPECT_EQ(kInputVideoContents[5], buffer->DataV()[0]); - EXPECT_FALSE(frame_reader_->ReadFrame()); // End of file. +TEST_F(Y4mFrameReaderTest, PullFrame_frameContent) { + rtc::scoped_refptr buffer = reader_->PullFrame(); + EXPECT_EQ(kFrameContent[0][0], *buffer->DataY()); + EXPECT_EQ(kFrameContent[0][1], *buffer->DataU()); + EXPECT_EQ(kFrameContent[0][2], *buffer->DataV()); } -TEST_F(Y4mFrameReaderTest, ReadFrameUninitialized) { - Y4mFrameReaderImpl file_reader(temp_filename_, kFrameWidth, kFrameHeight); - EXPECT_FALSE(file_reader.ReadFrame()); +TEST_F(Y4mFrameReaderTest, ReadFrame_randomOrder) { + std::vector expected_frames = {2, 0, 1}; + std::vector actual_frames; + for (int frame_num : expected_frames) { + rtc::scoped_refptr buffer = + reader_->ReadFrame(frame_num); + actual_frames.push_back(*buffer->DataY()); + } + EXPECT_EQ(expected_frames, actual_frames); } -TEST_F(Y4mFrameReaderTest, ReadFrameDifferentWidth) { - Y4mFrameReaderImpl file_reader(temp_filename_, kFrameWidth + 1, kFrameHeight); - EXPECT_FALSE(file_reader.Init()); +TEST_F(Y4mFrameReaderTest, PullFrame_scale) { + rtc::scoped_refptr buffer = reader_->PullFrame( + /*pulled_frame_num=*/nullptr, Resolution({.width = 2, .height = 2}), + FrameReader::kNoScale); + EXPECT_EQ(2, buffer->width()); + EXPECT_EQ(2, buffer->height()); } -TEST_F(Y4mFrameReaderTest, ReadFrameDifferentHeight) { - Y4mFrameReaderImpl file_reader(temp_filename_, kFrameWidth, kFrameHeight + 1); - EXPECT_FALSE(file_reader.Init()); +class Y4mFrameReaderRepeatModeTest + : public Y4mFrameReaderTest, + public ::testing::WithParamInterface< + std::tuple>> {}; + +TEST_P(Y4mFrameReaderRepeatModeTest, PullFrame) { + RepeatMode mode = std::get<0>(GetParam()); + std::vector expected_frames = std::get<1>(GetParam()); + + reader_ = CreateY4mFrameReader(filepath_, mode); + std::vector read_frames; + for (size_t i = 0; i < expected_frames.size(); ++i) { + rtc::scoped_refptr buffer = reader_->PullFrame(); + read_frames.push_back(*buffer->DataY()); + } + EXPECT_EQ(expected_frames, read_frames); } +INSTANTIATE_TEST_SUITE_P( + Y4mFrameReaderTest, + Y4mFrameReaderRepeatModeTest, + ::testing::ValuesIn( + {std::make_tuple(RepeatMode::kSingle, std::vector{0, 1, 2}), + std::make_tuple(RepeatMode::kRepeat, + std::vector{0, 1, 2, 0, 1, 2}), + std::make_tuple(RepeatMode::kPingPong, + std::vector{0, 1, 2, 1, 0, 1, 2})})); + +class Y4mFrameReaderFramerateScaleTest + : public Y4mFrameReaderTest, + public ::testing::WithParamInterface< + std::tuple>> {}; + +TEST_P(Y4mFrameReaderFramerateScaleTest, PullFrame) { + Ratio framerate_scale = std::get<0>(GetParam()); + std::vector expected_frames = std::get<1>(GetParam()); + + std::vector actual_frames; + for (size_t i = 0; i < expected_frames.size(); ++i) { + int pulled_frame; + rtc::scoped_refptr buffer = + reader_->PullFrame(&pulled_frame, kResolution, framerate_scale); + actual_frames.push_back(pulled_frame); + } + EXPECT_EQ(expected_frames, actual_frames); +} + +INSTANTIATE_TEST_SUITE_P(Y4mFrameReaderTest, + Y4mFrameReaderFramerateScaleTest, + ::testing::ValuesIn({ + std::make_tuple(Ratio({.num = 1, .den = 2}), + std::vector{0, 2, 4}), + std::make_tuple(Ratio({.num = 2, .den = 3}), + std::vector{0, 1, 3, 4, 6}), + std::make_tuple(Ratio({.num = 2, .den = 1}), + std::vector{0, 0, 1, 1}), + })); + } // namespace test } // namespace webrtc diff --git a/third_party/libwebrtc/test/testsupport/yuv_frame_reader.cc b/third_party/libwebrtc/test/testsupport/yuv_frame_reader.cc index 330541496bd6..02c1a68008bf 100644 --- a/third_party/libwebrtc/test/testsupport/yuv_frame_reader.cc +++ b/third_party/libwebrtc/test/testsupport/yuv_frame_reader.cc @@ -14,6 +14,7 @@ #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "rtc_base/logging.h" #include "test/frame_utils.h" #include "test/testsupport/file_utils.h" @@ -21,164 +22,140 @@ namespace webrtc { namespace test { -size_t FrameSizeBytes(int width, int height) { - int half_width = (width + 1) / 2; - size_t size_y = static_cast(width) * height; - size_t size_uv = static_cast(half_width) * ((height + 1) / 2); - return size_y + 2 * size_uv; +namespace { +using RepeatMode = YuvFrameReaderImpl::RepeatMode; + +int WrapFrameNum(int frame_num, int num_frames, RepeatMode mode) { + RTC_CHECK_GE(frame_num, 0) << "frame_num cannot be negative"; + RTC_CHECK_GT(num_frames, 0) << "num_frames must be greater than 0"; + if (mode == RepeatMode::kSingle) { + return frame_num; + } + if (mode == RepeatMode::kRepeat) { + return frame_num % num_frames; + } + + RTC_CHECK_EQ(RepeatMode::kPingPong, mode); + int cycle_len = 2 * (num_frames - 1); + int wrapped_num = frame_num % cycle_len; + if (wrapped_num >= num_frames) { + return cycle_len - wrapped_num; + } + return wrapped_num; } -YuvFrameReaderImpl::DropperUtil::DropperUtil(int source_fps, int target_fps) - : frame_size_buckets_( - std::max(1.0, static_cast(source_fps) / target_fps)), - bucket_level_(0.0) {} - -YuvFrameReaderImpl::DropperUtil::DropDecision -YuvFrameReaderImpl::DropperUtil::UpdateLevel() { - DropDecision decision; - if (bucket_level_ <= 0.0) { - decision = DropDecision::kKeepFrame; - bucket_level_ += frame_size_buckets_; - } else { - decision = DropDecision::kDropframe; - } - bucket_level_ -= 1.0; - return decision; -} - -YuvFrameReaderImpl::YuvFrameReaderImpl(std::string input_filename, - int width, - int height) - : YuvFrameReaderImpl(input_filename, - width, - height, - width, - height, - RepeatMode::kSingle, - 30, - 30) {} -YuvFrameReaderImpl::YuvFrameReaderImpl(std::string input_filename, - int input_width, - int input_height, - int desired_width, - int desired_height, - RepeatMode repeat_mode, - absl::optional clip_fps, - int target_fps) - : input_filename_(input_filename), - frame_length_in_bytes_(input_width * input_height + - 2 * ((input_width + 1) / 2) * - ((input_height + 1) / 2)), - input_width_(input_width), - input_height_(input_height), - desired_width_(desired_width), - desired_height_(desired_height), - frame_size_bytes_(FrameSizeBytes(input_width, input_height)), - repeat_mode_(repeat_mode), - number_of_frames_(-1), - current_frame_index_(-1), - dropper_(clip_fps.has_value() ? new DropperUtil(*clip_fps, target_fps) - : nullptr), - input_file_(nullptr) {} - -YuvFrameReaderImpl::~YuvFrameReaderImpl() { - Close(); -} - -bool YuvFrameReaderImpl::Init() { - if (input_width_ <= 0 || input_height_ <= 0) { - RTC_LOG(LS_ERROR) << "Frame width and height must be positive. Was: " - << input_width_ << "x" << input_height_; - return false; - } - input_file_ = fopen(input_filename_.c_str(), "rb"); - if (input_file_ == nullptr) { - RTC_LOG(LS_ERROR) << "Couldn't open input file: " - << input_filename_.c_str(); - return false; - } - // Calculate total number of frames. - size_t source_file_size = GetFileSize(input_filename_); - if (source_file_size <= 0u) { - RTC_LOG(LS_ERROR) << "Input file " << input_filename_.c_str() - << " is empty."; - return false; - } - number_of_frames_ = - static_cast(source_file_size / frame_length_in_bytes_); - - if (number_of_frames_ == 0) { - RTC_LOG(LS_ERROR) << "Input file " << input_filename_.c_str() - << " is too small."; - } - - current_frame_index_ = 0; - return true; -} - -rtc::scoped_refptr YuvFrameReaderImpl::ReadFrame() { - if (input_file_ == nullptr) { - RTC_LOG(LS_ERROR) << "YuvFrameReaderImpl is not initialized."; - return nullptr; - } - - rtc::scoped_refptr buffer; - - do { - if (current_frame_index_ >= number_of_frames_) { - switch (repeat_mode_) { - case RepeatMode::kSingle: - return nullptr; - case RepeatMode::kRepeat: - fseek(input_file_, 0, SEEK_SET); - current_frame_index_ = 0; - break; - case RepeatMode::kPingPong: - if (current_frame_index_ == number_of_frames_ * 2) { - fseek(input_file_, 0, SEEK_SET); - current_frame_index_ = 0; - } else { - int reverse_frame_index = current_frame_index_ - number_of_frames_; - int seek_frame_pos = (number_of_frames_ - reverse_frame_index - 1); - fseek(input_file_, seek_frame_pos * frame_size_bytes_, SEEK_SET); - } - break; - } - } - ++current_frame_index_; - - buffer = ReadI420Buffer(input_width_, input_height_, input_file_); - if (!buffer && ferror(input_file_)) { - RTC_LOG(LS_ERROR) << "Couldn't read frame from file: " - << input_filename_.c_str(); - } - } while (dropper_ && - dropper_->UpdateLevel() == DropperUtil::DropDecision::kDropframe); - - if (input_width_ == desired_width_ && input_height_ == desired_height_) { +rtc::scoped_refptr Scale(rtc::scoped_refptr buffer, + Resolution resolution) { + if (buffer->width() == resolution.width && + buffer->height() == resolution.height) { return buffer; } + rtc::scoped_refptr scaled( + I420Buffer::Create(resolution.width, resolution.height)); + scaled->ScaleFrom(*buffer.get()); + return scaled; +} +} // namespace - rtc::scoped_refptr rescaled_buffer( - I420Buffer::Create(desired_width_, desired_height_)); - rescaled_buffer->ScaleFrom(*buffer.get()); - - return rescaled_buffer; +int YuvFrameReaderImpl::RateScaler::Skip(Ratio framerate_scale) { + ticks_ = ticks_.value_or(framerate_scale.num); + int skip = 0; + while (ticks_ <= 0) { + *ticks_ += framerate_scale.num; + ++skip; + } + *ticks_ -= framerate_scale.den; + return skip; } -void YuvFrameReaderImpl::Close() { - if (input_file_ != nullptr) { - fclose(input_file_); - input_file_ = nullptr; +YuvFrameReaderImpl::YuvFrameReaderImpl(std::string filepath, + Resolution resolution, + RepeatMode repeat_mode) + : filepath_(filepath), + resolution_(resolution), + repeat_mode_(repeat_mode), + num_frames_(0), + frame_num_(0), + frame_size_bytes_(0), + header_size_bytes_(0), + file_(nullptr) {} + +YuvFrameReaderImpl::~YuvFrameReaderImpl() { + if (file_ != nullptr) { + fclose(file_); + file_ = nullptr; } } -size_t YuvFrameReaderImpl::FrameLength() { - return frame_length_in_bytes_; +void YuvFrameReaderImpl::Init() { + RTC_CHECK_GT(resolution_.width, 0) << "Width must be positive"; + RTC_CHECK_GT(resolution_.height, 0) << "Height must be positive"; + frame_size_bytes_ = + CalcBufferSize(VideoType::kI420, resolution_.width, resolution_.height); + + file_ = fopen(filepath_.c_str(), "rb"); + RTC_CHECK(file_ != NULL) << "Cannot open " << filepath_; + + size_t file_size_bytes = GetFileSize(filepath_); + RTC_CHECK_GT(file_size_bytes, 0u) << "File " << filepath_ << " is empty"; + + num_frames_ = static_cast(file_size_bytes / frame_size_bytes_); + RTC_CHECK_GT(num_frames_, 0u) << "File " << filepath_ << " is too small"; } -int YuvFrameReaderImpl::NumberOfFrames() { - return number_of_frames_; +rtc::scoped_refptr YuvFrameReaderImpl::PullFrame() { + return PullFrame(/*frame_num=*/nullptr); +} + +rtc::scoped_refptr YuvFrameReaderImpl::PullFrame(int* frame_num) { + return PullFrame(frame_num, resolution_, /*framerate_scale=*/kNoScale); +} + +rtc::scoped_refptr YuvFrameReaderImpl::PullFrame( + int* frame_num, + Resolution resolution, + Ratio framerate_scale) { + frame_num_ += framerate_scaler_.Skip(framerate_scale); + auto buffer = ReadFrame(frame_num_, resolution); + if (frame_num != nullptr) { + *frame_num = frame_num_; + } + return buffer; +} + +rtc::scoped_refptr YuvFrameReaderImpl::ReadFrame(int frame_num) { + return ReadFrame(frame_num, resolution_); +} + +rtc::scoped_refptr YuvFrameReaderImpl::ReadFrame( + int frame_num, + Resolution resolution) { + int wrapped_num = WrapFrameNum(frame_num, num_frames_, repeat_mode_); + if (wrapped_num >= num_frames_) { + RTC_CHECK_EQ(RepeatMode::kSingle, repeat_mode_); + return nullptr; + } + fseek(file_, header_size_bytes_ + wrapped_num * frame_size_bytes_, SEEK_SET); + auto buffer = ReadI420Buffer(resolution_.width, resolution_.height, file_); + RTC_CHECK(buffer != nullptr); + + return Scale(buffer, resolution); +} + +std::unique_ptr CreateYuvFrameReader(std::string filepath, + Resolution resolution) { + return CreateYuvFrameReader(filepath, resolution, + YuvFrameReaderImpl::RepeatMode::kSingle); +} + +std::unique_ptr CreateYuvFrameReader( + std::string filepath, + Resolution resolution, + YuvFrameReaderImpl::RepeatMode repeat_mode) { + YuvFrameReaderImpl* frame_reader = + new YuvFrameReaderImpl(filepath, resolution, repeat_mode); + frame_reader->Init(); + return std::unique_ptr(frame_reader); } } // namespace test diff --git a/third_party/libwebrtc/test/testsupport/yuv_frame_reader_unittest.cc b/third_party/libwebrtc/test/testsupport/yuv_frame_reader_unittest.cc index 525f0e9971e9..b9ea2d0c4678 100644 --- a/third_party/libwebrtc/test/testsupport/yuv_frame_reader_unittest.cc +++ b/third_party/libwebrtc/test/testsupport/yuv_frame_reader_unittest.cc @@ -25,11 +25,12 @@ namespace webrtc { namespace test { namespace { -const std::string kInputFileContents = "bazouk"; +using Ratio = FrameReader::Ratio; +using RepeatMode = YuvFrameReaderImpl::RepeatMode; -const size_t kFrameWidth = 2; -const size_t kFrameHeight = 2; -const size_t kFrameLength = 3 * kFrameWidth * kFrameHeight / 2; // I420. +constexpr Resolution kResolution({.width = 1, .height = 1}); +constexpr char kFrameContent[3][3] = {{0, 1, 2}, {1, 2, 3}, {2, 3, 4}}; +constexpr int kNumFrames = sizeof(kFrameContent) / sizeof(kFrameContent[0]); } // namespace class YuvFrameReaderTest : public ::testing::Test { @@ -38,50 +39,108 @@ class YuvFrameReaderTest : public ::testing::Test { ~YuvFrameReaderTest() override = default; void SetUp() override { - temp_filename_ = webrtc::test::TempFilename(webrtc::test::OutputPath(), - "yuv_frame_reader_unittest"); - FILE* dummy = fopen(temp_filename_.c_str(), "wb"); - fprintf(dummy, "%s", kInputFileContents.c_str()); - fclose(dummy); + filepath_ = webrtc::test::TempFilename(webrtc::test::OutputPath(), + "yuv_frame_reader_unittest"); + FILE* file = fopen(filepath_.c_str(), "wb"); + fwrite(kFrameContent, 1, sizeof(kFrameContent), file); + fclose(file); - frame_reader_.reset( - new YuvFrameReaderImpl(temp_filename_, kFrameWidth, kFrameHeight)); - ASSERT_TRUE(frame_reader_->Init()); + reader_ = CreateYuvFrameReader(filepath_, kResolution); } - void TearDown() override { remove(temp_filename_.c_str()); } + void TearDown() override { remove(filepath_.c_str()); } - std::unique_ptr frame_reader_; - std::string temp_filename_; + std::string filepath_; + std::unique_ptr reader_; }; -TEST_F(YuvFrameReaderTest, InitSuccess) {} - -TEST_F(YuvFrameReaderTest, FrameLength) { - EXPECT_EQ(kFrameLength, frame_reader_->FrameLength()); +TEST_F(YuvFrameReaderTest, num_frames) { + EXPECT_EQ(kNumFrames, reader_->num_frames()); } -TEST_F(YuvFrameReaderTest, NumberOfFrames) { - EXPECT_EQ(1, frame_reader_->NumberOfFrames()); +TEST_F(YuvFrameReaderTest, PullFrame_frameContent) { + rtc::scoped_refptr buffer = reader_->PullFrame(); + EXPECT_EQ(kFrameContent[0][0], *buffer->DataY()); + EXPECT_EQ(kFrameContent[0][1], *buffer->DataU()); + EXPECT_EQ(kFrameContent[0][2], *buffer->DataV()); } -TEST_F(YuvFrameReaderTest, ReadFrame) { - rtc::scoped_refptr buffer = frame_reader_->ReadFrame(); - ASSERT_TRUE(buffer); - // Expect I420 packed as YUV. - EXPECT_EQ(kInputFileContents[0], buffer->DataY()[0]); - EXPECT_EQ(kInputFileContents[1], buffer->DataY()[1]); - EXPECT_EQ(kInputFileContents[2], buffer->DataY()[2]); - EXPECT_EQ(kInputFileContents[3], buffer->DataY()[3]); - EXPECT_EQ(kInputFileContents[4], buffer->DataU()[0]); - EXPECT_EQ(kInputFileContents[5], buffer->DataV()[0]); - EXPECT_FALSE(frame_reader_->ReadFrame()); // End of file. +TEST_F(YuvFrameReaderTest, ReadFrame_randomOrder) { + std::vector expected_frames = {2, 0, 1}; + std::vector actual_frames; + for (int frame_num : expected_frames) { + rtc::scoped_refptr buffer = + reader_->ReadFrame(frame_num); + actual_frames.push_back(*buffer->DataY()); + } + EXPECT_EQ(expected_frames, actual_frames); } -TEST_F(YuvFrameReaderTest, ReadFrameUninitialized) { - YuvFrameReaderImpl file_reader(temp_filename_, kFrameWidth, kFrameHeight); - EXPECT_FALSE(file_reader.ReadFrame()); +TEST_F(YuvFrameReaderTest, PullFrame_scale) { + rtc::scoped_refptr buffer = reader_->PullFrame( + /*pulled_frame_num=*/nullptr, Resolution({.width = 2, .height = 2}), + FrameReader::kNoScale); + EXPECT_EQ(2, buffer->width()); + EXPECT_EQ(2, buffer->height()); } +class YuvFrameReaderRepeatModeTest + : public YuvFrameReaderTest, + public ::testing::WithParamInterface< + std::tuple>> {}; + +TEST_P(YuvFrameReaderRepeatModeTest, PullFrame) { + RepeatMode mode = std::get<0>(GetParam()); + std::vector expected_frames = std::get<1>(GetParam()); + + reader_ = CreateYuvFrameReader(filepath_, kResolution, mode); + std::vector read_frames; + for (size_t i = 0; i < expected_frames.size(); ++i) { + rtc::scoped_refptr buffer = reader_->PullFrame(); + read_frames.push_back(*buffer->DataY()); + } + EXPECT_EQ(expected_frames, read_frames); +} + +INSTANTIATE_TEST_SUITE_P( + YuvFrameReaderTest, + YuvFrameReaderRepeatModeTest, + ::testing::ValuesIn( + {std::make_tuple(RepeatMode::kSingle, std::vector{0, 1, 2}), + std::make_tuple(RepeatMode::kRepeat, + std::vector{0, 1, 2, 0, 1, 2}), + std::make_tuple(RepeatMode::kPingPong, + std::vector{0, 1, 2, 1, 0, 1, 2})})); + +class YuvFrameReaderFramerateScaleTest + : public YuvFrameReaderTest, + public ::testing::WithParamInterface< + std::tuple>> {}; + +TEST_P(YuvFrameReaderFramerateScaleTest, PullFrame) { + Ratio framerate_scale = std::get<0>(GetParam()); + std::vector expected_frames = std::get<1>(GetParam()); + + std::vector actual_frames; + for (size_t i = 0; i < expected_frames.size(); ++i) { + int pulled_frame; + rtc::scoped_refptr buffer = + reader_->PullFrame(&pulled_frame, kResolution, framerate_scale); + actual_frames.push_back(pulled_frame); + } + EXPECT_EQ(expected_frames, actual_frames); +} + +INSTANTIATE_TEST_SUITE_P(YuvFrameReaderTest, + YuvFrameReaderFramerateScaleTest, + ::testing::ValuesIn({ + std::make_tuple(Ratio({.num = 1, .den = 2}), + std::vector{0, 2, 4}), + std::make_tuple(Ratio({.num = 2, .den = 3}), + std::vector{0, 1, 3, 4, 6}), + std::make_tuple(Ratio({.num = 2, .den = 1}), + std::vector{0, 0, 1, 1}), + })); + } // namespace test } // namespace webrtc diff --git a/third_party/libwebrtc/tools_webrtc/android/build_aar.py b/third_party/libwebrtc/tools_webrtc/android/build_aar.py index 42a902cafd31..d910b39a7cb5 100755 --- a/third_party/libwebrtc/tools_webrtc/android/build_aar.py +++ b/third_party/libwebrtc/tools_webrtc/android/build_aar.py @@ -121,7 +121,7 @@ def _RunGN(args): def _RunNinja(output_directory, args): cmd = [ - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'), '-C', + os.path.join(SRC_DIR, 'third_party', 'ninja', 'ninja'), '-C', output_directory ] cmd.extend(args) diff --git a/third_party/libwebrtc/tools_webrtc/flags_compatibility.py b/third_party/libwebrtc/tools_webrtc/flags_compatibility.py deleted file mode 100755 index 72c66a594f6c..000000000000 --- a/third_party/libwebrtc/tools_webrtc/flags_compatibility.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env vpython3 - -# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -import argparse -import logging -import subprocess -import sys - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('--isolated-script-test-output') - parser.add_argument('--isolated-script-test-perf-output') - args, unrecognized_args = parser.parse_known_args() - - test_command = _ForcePythonInterpreter(unrecognized_args) - if args.isolated_script_test_output: - test_command += [ - '--isolated_script_test_output', args.isolated_script_test_output - ] - if args.isolated_script_test_perf_output: - test_command += [ - '--isolated_script_test_perf_output=' + - args.isolated_script_test_perf_output - ] - logging.info('Running %r', test_command) - - return subprocess.call(test_command) - - -def _ForcePythonInterpreter(cmd): - """Returns the fixed command line to call the right python executable.""" - out = cmd[:] - if len(out) > 0: - if out[0] == 'python': - out[0] = sys.executable - elif out[0].endswith('.py'): - out.insert(0, sys.executable) - return out - - -if __name__ == '__main__': - logging.basicConfig(level=logging.INFO) - sys.exit(main()) diff --git a/third_party/libwebrtc/tools_webrtc/gtest-parallel-wrapper.py b/third_party/libwebrtc/tools_webrtc/gtest-parallel-wrapper.py index 2972e6c9bf09..a64c7736382f 100755 --- a/third_party/libwebrtc/tools_webrtc/gtest-parallel-wrapper.py +++ b/third_party/libwebrtc/tools_webrtc/gtest-parallel-wrapper.py @@ -63,7 +63,7 @@ Will be converted into: --test_artifacts_dir=SOME_OUTPUT_DIR/test_artifacts \ --some_flag=some_value \ --another_flag \ - --isolated_script_test_perf_output=SOME_OTHER_DIR \ + --isolated-script-test-perf-output=SOME_OTHER_DIR \ --foo=bar \ --baz @@ -155,32 +155,12 @@ def ParseArgs(argv=None): # know what will be the swarming output dir. parser.add_argument('--store-test-artifacts', action='store_true') - # No-sandbox is a Chromium-specific flag, ignore it. - # TODO(bugs.webrtc.org/8115): Remove workaround when fixed. - parser.add_argument('--no-sandbox', - action='store_true', - help=argparse.SUPPRESS) - parser.add_argument('executable') parser.add_argument('executable_args', nargs='*') options, unrecognized_args = parser.parse_known_args(argv) - webrtc_flags_to_change = { - '--isolated-script-test-perf-output': - '--isolated_script_test_perf_output', - '--isolated-script-test-output': '--isolated_script_test_output', - } - args_to_pass = [] - for arg in unrecognized_args: - if any(arg.startswith(k) for k in list(webrtc_flags_to_change.keys())): - arg_split = arg.split('=') - args_to_pass.append(webrtc_flags_to_change[arg_split[0]] + '=' + - arg_split[1]) - else: - args_to_pass.append(arg) - - executable_args = options.executable_args + args_to_pass + executable_args = options.executable_args + unrecognized_args if options.store_test_artifacts: assert options.output_dir, ( diff --git a/third_party/libwebrtc/tools_webrtc/gtest_parallel_wrapper_test.py b/third_party/libwebrtc/tools_webrtc/gtest_parallel_wrapper_test.py index 609052d2488e..5fbd52e8db8e 100755 --- a/third_party/libwebrtc/tools_webrtc/gtest_parallel_wrapper_test.py +++ b/third_party/libwebrtc/tools_webrtc/gtest_parallel_wrapper_test.py @@ -146,7 +146,7 @@ class GtestParallelWrapperTest(unittest.TestCase): '--output_dir=' + output_dir, '--dump_json_test_results=SOME_DIR', 'some_test', '--', '--test_artifacts_dir=' + expected_artifacts_dir, '--some_flag=some_value', '--another_flag', - '--isolated_script_test_perf_output=SOME_OTHER_DIR', '--foo=bar', + '--isolated-script-test-perf-output=SOME_OTHER_DIR', '--foo=bar', '--baz' ]) self.assertEqual(result.gtest_parallel_args, expected) diff --git a/third_party/libwebrtc/tools_webrtc/ios/build_ios_libs.py b/third_party/libwebrtc/tools_webrtc/ios/build_ios_libs.py index 093a002d76b6..764c378c7b1d 100755 --- a/third_party/libwebrtc/tools_webrtc/ios/build_ios_libs.py +++ b/third_party/libwebrtc/tools_webrtc/ios/build_ios_libs.py @@ -197,7 +197,7 @@ def BuildWebRTC(output_dir, target_environment, target_arch, flavor, logging.info('Building target: %s', gn_target_name) cmd = [ - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'), + os.path.join(SRC_DIR, 'third_party', 'ninja', 'ninja'), '-C', output_dir, gn_target_name, diff --git a/third_party/libwebrtc/tools_webrtc/iwyu/mappings.imp b/third_party/libwebrtc/tools_webrtc/iwyu/mappings.imp index fa111faed942..a79997bdceb4 100644 --- a/third_party/libwebrtc/tools_webrtc/iwyu/mappings.imp +++ b/third_party/libwebrtc/tools_webrtc/iwyu/mappings.imp @@ -32,11 +32,7 @@ { symbol: ["std::unique_ptr", "public", "", "public"] }, # Needed to avoid { symbol: ["std::ostringstream", "public", "", "public"] }, -# Needed for unknown reasons -{ include: ['<__algorithm/equal.h>', "private", '', "public"] }, -{ include: ['<__algorithm/unique.h>', "private", '', "public"] }, -{ include: ['<__type_traits/remove_reference.h>', "private", '', "public"] }, -{ include: ['<__functional/function.h>', "private", '', "public"] }, -{ include: ['<__utility/pair.h>', "private", '', "public"] }, + +{ ref: "../../buildtools/third_party/libc++/trunk/include/libcxx.imp" }, ] diff --git a/third_party/libwebrtc/tools_webrtc/mb/mb.py b/third_party/libwebrtc/tools_webrtc/mb/mb.py index 07cb14c01672..762df9a05c1f 100755 --- a/third_party/libwebrtc/tools_webrtc/mb/mb.py +++ b/third_party/libwebrtc/tools_webrtc/mb/mb.py @@ -53,8 +53,9 @@ class WebRTCMetaBuildWrapper(mb.MetaBuildWrapper): is_android = 'target_os="android"' in vals['gn_args'] is_fuchsia = 'target_os="fuchsia"' in vals['gn_args'] - is_linux = self.platform.startswith('linux') and not is_android is_ios = 'target_os="ios"' in vals['gn_args'] + is_linux = self.platform.startswith('linux') and not is_android + is_win = self.platform.startswith('win') if test_type == 'nontest': self.WriteFailureAndRaise('We should not be isolating %s.' % target, @@ -81,21 +82,17 @@ class WebRTCMetaBuildWrapper(mb.MetaBuildWrapper): ] elif is_android: cmdline += [ - vpython_exe, '../../build/android/test_wrapper/logdog_wrapper.py', - '--target', target, '--logdog-bin-cmd', '../../bin/logdog_butler', - '--logcat-output-file', '${ISOLATED_OUTDIR}/logcats', - '--store-tombstones' + 'luci-auth', 'context', '--', vpython_exe, + '../../build/android/test_wrapper/logdog_wrapper.py', '--target', + target, '--logdog-bin-cmd', + '../../.task_template_packages/logdog_butler', '--logcat-output-file', + '${ISOLATED_OUTDIR}/logcats', '--store-tombstones' ] - elif is_ios or is_fuchsia: - cmdline += [ - vpython_exe, '../../tools_webrtc/flags_compatibility.py', - 'bin/run_%s' % target - ] - extra_files.append('../../tools_webrtc/flags_compatibility.py') - elif test_type == 'raw': - cmdline += [vpython_exe, '../../tools_webrtc/flags_compatibility.py'] - extra_files.append('../../tools_webrtc/flags_compatibility.py') - cmdline.append(_GetExecutable(target, self.platform)) + elif is_ios or is_fuchsia or test_type == 'raw': + if is_win: + cmdline += ['bin\\run_{}.bat'.format(target)] + else: + cmdline += ['bin/run_{}'.format(target)] else: if isolate_map[target].get('use_webcam', False): cmdline += [ diff --git a/third_party/libwebrtc/tools_webrtc/mb/mb_config.pyl b/third_party/libwebrtc/tools_webrtc/mb/mb_config.pyl index e8acbce10601..138ecf275c22 100644 --- a/third_party/libwebrtc/tools_webrtc/mb/mb_config.pyl +++ b/third_party/libwebrtc/tools_webrtc/mb/mb_config.pyl @@ -31,8 +31,8 @@ }, 'client.webrtc': { # Android - 'Android32 (M Nexus5X)': 'android_release_bot_arm_reclient', - 'Android32 (M Nexus5X)(dbg)': 'android_debug_static_bot_arm', + 'Android32': 'android_release_bot_arm_reclient', + 'Android32 (dbg)': 'android_debug_static_bot_arm', 'Android32 (more configs)': { 'bwe_test_logging': 'bwe_test_logging_android_arm', 'dummy_audio_file_devices_no_protobuf': @@ -42,12 +42,13 @@ 'Android32 Builder arm': 'android_pure_release_bot_arm', 'Android32 Builder x86': 'android_release_bot_x86', 'Android32 Builder x86 (dbg)': 'android_debug_static_bot_x86', - 'Android64 (M Nexus5X)': 'android_release_bot_arm64', - 'Android64 (M Nexus5X)(dbg)': 'android_debug_static_bot_arm64', + 'Android64': 'android_release_bot_arm64', + 'Android64 (dbg)': 'android_debug_static_bot_arm64', 'Android64 Builder arm64': 'android_pure_release_bot_arm64', 'Android64 Builder x64 (dbg)': 'android_debug_static_bot_x64', # Fuchsia + 'Fuchsia Builder': 'release_bot_x64_fuchsia', 'Fuchsia Release': 'release_bot_x64_fuchsia', # Linux @@ -90,10 +91,10 @@ 'dummy_audio_file_devices_no_protobuf_x86', 'rtti_no_sctp': 'rtti_no_sctp_no_unicode_win_x86', }, - 'Win32 Builder (Clang)': 'win_clang_pure_release_bot_x86', 'Win32 Debug (Clang)': 'win_clang_debug_bot_x86', 'Win32 Release (Clang)': 'win_clang_release_bot_x86', 'Win64 ASan': 'win_asan_clang_release_bot_x64', + 'Win64 Builder (Clang)': 'win_clang_pure_release_bot_x64', 'Win64 Debug (Clang)': 'win_clang_debug_bot_x64', 'Win64 Release (Clang)': 'win_clang_release_bot_x64', @@ -130,11 +131,12 @@ 'Perf Android64 (M Nexus5X)': 'release_bot_x64', 'Perf Android64 (O Pixel2)': 'release_bot_x64', 'Perf Android64 (R Pixel5)': 'release_bot_x64', + 'Perf Fuchsia': 'release_bot_x64_fuchsia', 'Perf Linux Bionic': 'release_bot_x64', 'Perf Linux Trusty': 'release_bot_x64', 'Perf Mac 11': 'release_bot_x64', 'Perf Mac M1 Arm64 12': 'release_bot_x64', - 'Perf Win7': 'release_bot_x64', + 'Perf Win 10': 'release_bot_x64', }, 'internal.client.webrtc': { 'iOS64 Debug': 'ios_internal_debug_bot_arm64', @@ -153,6 +155,7 @@ 'rtti_no_sctp': 'rtti_no_sctp_android_arm', }, 'android_arm_rel': 'android_release_bot_arm', + 'android_arm_rel_reclient': 'android_release_bot_arm_reclient', 'android_compile_arm64_dbg': 'android_debug_static_bot_arm64', 'android_compile_arm64_rel': 'android_pure_release_bot_arm64', 'android_compile_arm_dbg': 'android_debug_static_bot_arm', @@ -168,6 +171,7 @@ # iOS 'ios_compile_arm64_dbg': 'ios_debug_bot_arm64', 'ios_compile_arm64_rel': 'ios_release_bot_arm64', + 'ios_compile_arm64_rel_reclient': 'ios_release_bot_arm64_reclient', 'ios_sim_x64_dbg_ios12': 'ios_debug_bot_x64', 'ios_sim_x64_dbg_ios13': 'ios_debug_bot_x64', 'ios_sim_x64_dbg_ios14': 'ios_debug_bot_x64', @@ -182,6 +186,7 @@ 'linux_compile_rel': 'pure_release_bot_x64', 'linux_compile_x86_dbg': 'debug_bot_x86', 'linux_compile_x86_rel': 'pure_release_bot_x86', + 'linux_coverage': 'code_coverage_bot_x64', 'linux_dbg': 'debug_bot_x64', 'linux_libfuzzer_rel': 'libfuzzer_asan_release_bot_x64', 'linux_more_configs': { @@ -192,6 +197,7 @@ }, 'linux_msan': 'msan_clang_release_bot_x64', 'linux_rel': 'release_bot_x64', + 'linux_rel_reclient': 'release_bot_x64_reclient', 'linux_tsan2': 'tsan_clang_release_bot_x64', 'linux_ubsan': 'ubsan_clang_release_bot_x64', 'linux_ubsan_vptr': 'ubsan_vptr_clang_release_bot_x64', @@ -206,15 +212,17 @@ 'mac_dbg_m1': 'debug_bot_arm64', 'mac_rel': 'release_bot_x64', 'mac_rel_m1': 'release_bot_arm64', + 'mac_rel_reclient': 'release_bot_x64_reclient', # Windows 'win_asan': 'win_asan_clang_release_bot_x64', 'win_compile_x64_clang_dbg': 'win_clang_debug_bot_x64', 'win_compile_x64_clang_rel': 'win_clang_release_bot_x64', + 'win_compile_x64_clang_rel_reclient': + 'win_clang_release_bot_x64_reclient', 'win_compile_x86_clang_dbg': 'win_clang_debug_bot_x86', 'win_compile_x86_clang_rel': 'win_clang_release_bot_x86', 'win_x64_clang_dbg': 'win_clang_debug_bot_x64', - 'win_x64_clang_dbg_win10': 'win_clang_debug_bot_x64', 'win_x64_clang_rel': 'win_clang_release_bot_x64', 'win_x86_clang_dbg': 'win_clang_debug_bot_x86', 'win_x86_clang_rel': 'win_clang_release_bot_x86', @@ -242,7 +250,8 @@ 'android_pure_release_bot_arm64': ['android', 'pure_release_bot', 'arm64'], 'android_release_bot_arm': ['android', 'release_bot', 'arm'], 'android_release_bot_arm64': ['android', 'release_bot', 'arm64'], - 'android_release_bot_arm_reclient': ['android', 'release_bot_reclient', 'arm'], + 'android_release_bot_arm_reclient': + ['android', 'release_bot_reclient', 'arm'], 'android_release_bot_x64': ['android', 'release_bot', 'x64'], 'android_release_bot_x86': ['android', 'release_bot', 'x86'], 'asan_lsan_clang_release_bot_x64': @@ -251,6 +260,10 @@ ['android', 'debug_static_bot', 'arm', 'bwe_test_logging'], 'bwe_test_logging_x64': ['debug_bot', 'x64', 'bwe_test_logging'], 'bwe_test_logging_x86': ['debug_bot', 'x86', 'bwe_test_logging'], + 'code_coverage_bot_x64': [ + 'openh264', 'release_bot', 'x64', 'code_coverage', + 'partial_code_coverage_instrumentation' + ], 'codesearch_gen_linux_bot': ['openh264', 'debug_bot', 'minimal_symbols'], 'debug_bot_arm': ['openh264', 'debug_bot', 'arm'], 'debug_bot_arm64': ['openh264', 'debug_bot', 'arm64'], @@ -282,6 +295,10 @@ 'ios_release_bot_arm64': [ 'ios', 'release_bot', 'arm64', 'no_ios_code_signing', 'xctest', ], + 'ios_release_bot_arm64_reclient': [ + 'ios', 'release_bot', 'arm64', 'no_ios_code_signing', 'xctest', + 'no_goma', 'reclient', + ], 'libfuzzer_asan_release_bot_x64': [ 'libfuzzer', 'asan', 'optimize_for_fuzzing', 'openh264', 'pure_release_bot', 'x64' @@ -306,22 +323,21 @@ 'release_bot_arm64': ['openh264', 'release_bot', 'arm64'], 'release_bot_x64': ['openh264', 'release_bot', 'x64'], 'release_bot_x64_fuchsia': ['openh264', 'release_bot', 'x64', 'fuchsia'], - 'release_bot_x64_reclient': [ - 'openh264', 'release_bot_reclient', 'x64', - ], + 'release_bot_x64_reclient': ['openh264', 'release_bot_reclient', 'x64'], 'release_bot_x86': ['openh264', 'release_bot', 'x86'], - 'rtti_no_sctp_android_arm': [ - 'android', 'debug_static_bot', 'arm', 'rtti', 'no_sctp' - ], + 'rtti_no_sctp_android_arm': + ['android', 'debug_static_bot', 'arm', 'rtti', 'no_sctp'], 'rtti_no_sctp_no_unicode_win_x86': ['debug_bot', 'x86', 'rtti', 'no_sctp', 'win_undef_unicode'], 'rtti_no_sctp_x64': ['debug_bot', 'x64', 'rtti', 'no_sctp'], 'tsan_clang_release_bot_x64': ['tsan', 'clang', 'openh264', 'pure_release_bot', 'x64'], - 'ubsan_clang_release_bot_x64': - ['ubsan', 'clang', 'openh264', 'pure_release_bot', 'x64'], - 'ubsan_vptr_clang_release_bot_x64': - ['ubsan_vptr', 'clang', 'openh264', 'pure_release_bot', 'x64'], + 'ubsan_clang_release_bot_x64': [ + 'ubsan', 'clang', 'openh264', 'pure_release_bot', 'x64' + ], + 'ubsan_vptr_clang_release_bot_x64': [ + 'ubsan_vptr', 'clang', 'openh264', 'pure_release_bot', 'x64' + ], 'win_asan_clang_release_bot_x64': [ 'asan', 'clang', @@ -343,11 +359,11 @@ 'debug_bot', 'x86', ], - 'win_clang_pure_release_bot_x86': [ + 'win_clang_pure_release_bot_x64': [ 'clang', 'openh264', 'pure_release_bot', - 'x86', + 'x64', ], 'win_clang_release_bot_x64': [ 'clang', @@ -355,6 +371,14 @@ 'release_bot', 'x64', ], + 'win_clang_release_bot_x64_reclient': [ + 'clang', + 'openh264', + 'release_bot', + 'x64', + 'no_goma', + 'reclient', + ], 'win_clang_release_bot_x86': [ 'clang', 'openh264', @@ -385,6 +409,9 @@ 'clang': { 'gn_args': 'is_clang=true', }, + 'code_coverage': { + 'gn_args': 'use_clang_coverage=true', + }, 'dcheck_always_on': { 'gn_args': 'dcheck_always_on=true', }, @@ -428,7 +455,11 @@ 'gn_args': 'symbol_level=1', }, 'msan': { - 'gn_args': 'is_msan=true msan_track_origins=2', + 'gn_args': 'is_msan=true msan_track_origins=2' + ' instrumented_libraries_release = "xenial"', + }, + 'no_goma': { + 'gn_args': 'use_goma=false', }, 'no_ios_code_signing': { 'gn_args': 'ios_enable_code_signing=false', @@ -445,6 +476,10 @@ 'optimize_for_fuzzing': { 'gn_args': 'optimize_for_fuzzing=true', }, + 'partial_code_coverage_instrumentation': { + 'gn_args': + 'coverage_instrumentation_input_file="//.code-coverage/files_to_instrument.txt"' + }, # The 'pure_release_bot' configuration is for release bots that are doing a # 100% release build without DCHECKs while 'release_bot' is a partial # release configs since `dcheck_always_on` is set to true. diff --git a/third_party/libwebrtc/tools_webrtc/mb/mb_unittest.py b/third_party/libwebrtc/tools_webrtc/mb/mb_unittest.py index 109d2bcb2da9..40c12eda178a 100755 --- a/third_party/libwebrtc/tools_webrtc/mb/mb_unittest.py +++ b/third_party/libwebrtc/tools_webrtc/mb/mb_unittest.py @@ -91,10 +91,11 @@ class FakeMBW(mb.WebRTCMetaBuildWrapper): abpath = self._AbsPath(path) self.files[abpath] = contents - def Call(self, cmd, env=None, capture_output=True, stdin=None): + def Call(self, cmd, env=None, capture_output=True, input=None): + # pylint: disable=redefined-builtin del env del capture_output - del stdin + del input self.calls.append(cmd) if self.cmds: return self.cmds.pop(0) @@ -324,12 +325,15 @@ class UnitTest(unittest.TestCase): files, ['../../.vpython3', '../../testing/test_env.py', 'foo_unittests']) self.assertEqual(command, [ + 'luci-auth', + 'context', + '--', 'vpython3', '../../build/android/test_wrapper/logdog_wrapper.py', '--target', 'foo_unittests', '--logdog-bin-cmd', - '../../bin/logdog_butler', + '../../.task_template_packages/logdog_butler', '--logcat-output-file', '${ISOLATED_OUTDIR}/logcats', '--store-tombstones', @@ -363,12 +367,15 @@ class UnitTest(unittest.TestCase): files, ['../../.vpython3', '../../testing/test_env.py', 'foo_unittests']) self.assertEqual(command, [ + 'luci-auth', + 'context', + '--', 'vpython3', '../../build/android/test_wrapper/logdog_wrapper.py', '--target', 'foo_unittests', '--logdog-bin-cmd', - '../../bin/logdog_butler', + '../../.task_template_packages/logdog_butler', '--logcat-output-file', '${ISOLATED_OUTDIR}/logcats', '--store-tombstones', @@ -489,14 +496,9 @@ class UnitTest(unittest.TestCase): self.assertEqual(files, [ '../../.vpython3', '../../testing/test_env.py', - '../../tools_webrtc/flags_compatibility.py', 'foo_unittests', ]) - self.assertEqual(command, [ - 'vpython3', - '../../tools_webrtc/flags_compatibility.py', - './foo_unittests', - ]) + self.assertEqual(command, ['bin/run_foo_unittests']) def test_gen_non_parallel_console_test_launcher(self): test_files = { diff --git a/third_party/libwebrtc/tools_webrtc/presubmit_checks_lib/build_helpers.py b/third_party/libwebrtc/tools_webrtc/presubmit_checks_lib/build_helpers.py index 86fc1a0bbdc9..3386d6d40cf3 100644 --- a/third_party/libwebrtc/tools_webrtc/presubmit_checks_lib/build_helpers.py +++ b/third_party/libwebrtc/tools_webrtc/presubmit_checks_lib/build_helpers.py @@ -69,7 +69,7 @@ def RunGnCheck(root_dir=None): def RunNinjaCommand(args, root_dir=None): """Runs ninja quietly. Any failure (e.g. clang not found) is silently discarded, since this is unlikely an error in submitted CL.""" - command = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja')] + args + command = [os.path.join(SRC_DIR, 'third_party', 'ninja', 'ninja')] + args p = subprocess.Popen(command, cwd=root_dir, stdout=subprocess.PIPE, diff --git a/third_party/libwebrtc/tools_webrtc/ubsan/suppressions.txt b/third_party/libwebrtc/tools_webrtc/ubsan/suppressions.txt index dc76f38c2069..2ece795570a9 100644 --- a/third_party/libwebrtc/tools_webrtc/ubsan/suppressions.txt +++ b/third_party/libwebrtc/tools_webrtc/ubsan/suppressions.txt @@ -6,10 +6,6 @@ # the RTC_NO_SANITIZE macro. Please think twice before adding new exceptions. ############################################################################# -# YASM does some funny things that UBsan doesn't like. -# https://crbug.com/489901 -src:*/third_party/yasm/* - # OpenH264 triggers some errors that are out of our control. src:*/third_party/ffmpeg/libavcodec/* src:*/third_party/openh264/* @@ -22,3 +18,9 @@ src:*/third_party/libvpx/source/libvpx/vp8/* ############################################################################# # Ignore system libraries. src:*/usr/* + +############################################################################# +[alignment] +# Libaom and libsrtp are doing unaligned memory access. +src:*/third_party/libaom/source/libaom/* +src:*/third_party/libsrtp/srtp/srtp.c diff --git a/third_party/libwebrtc/video/BUILD.gn b/third_party/libwebrtc/video/BUILD.gn index 7990d207ef6e..04e966d056aa 100644 --- a/third_party/libwebrtc/video/BUILD.gn +++ b/third_party/libwebrtc/video/BUILD.gn @@ -15,7 +15,9 @@ rtc_library("video_stream_encoder_interface") { ] deps = [ "../api:fec_controller_api", + "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_setparameters_callback", "../api:scoped_refptr", "../api/adaptation:resource_adaptation_api", "../api/units:data_rate", @@ -407,6 +409,7 @@ rtc_library("video_stream_encoder_impl") { ":video_stream_encoder_interface", "../api:field_trials_view", "../api:rtp_parameters", + "../api:rtp_sender_setparameters_callback", "../api:sequence_checker", "../api/adaptation:resource_adaptation_api", "../api/task_queue:pending_task_safety_flag", @@ -426,6 +429,7 @@ rtc_library("video_stream_encoder_impl") { "../api/video_codecs:video_codecs_api", "../call/adaptation:resource_adaptation", "../common_video", + "../media:rtc_media_base", "../modules:module_api_public", "../modules/video_coding", "../modules/video_coding:video_codec_interface", @@ -469,6 +473,7 @@ rtc_library("video_stream_encoder_impl") { "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/cleanup", + "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -854,6 +859,7 @@ if (rtc_include_tests) { "../api/video:video_frame_type", "../api/video:video_rtp_headers", "../api/video/test:video_frame_matchers", + "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../api/video_codecs:vp8_temporal_layers_factory", "../call:call_interfaces", diff --git a/third_party/libwebrtc/video/end_to_end_tests/fec_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/fec_tests.cc index 03c966c23e84..ff85dde53b99 100644 --- a/third_party/libwebrtc/video/end_to_end_tests/fec_tests.cc +++ b/third_party/libwebrtc/video/end_to_end_tests/fec_tests.cc @@ -159,7 +159,7 @@ class FlexfecRenderObserver : public test::EndToEndTest, static constexpr uint32_t kFlexfecLocalSsrc = 456; explicit FlexfecRenderObserver(bool enable_nack, bool expect_flexfec_rtcp) - : test::EndToEndTest(test::CallTest::kDefaultTimeout), + : test::EndToEndTest(test::CallTest::kLongTimeout), enable_nack_(enable_nack), expect_flexfec_rtcp_(expect_flexfec_rtcp), received_flexfec_rtcp_(false), diff --git a/third_party/libwebrtc/video/receive_statistics_proxy2_unittest.cc b/third_party/libwebrtc/video/receive_statistics_proxy2_unittest.cc index 5031cdfd403a..f0869c434194 100644 --- a/third_party/libwebrtc/video/receive_statistics_proxy2_unittest.cc +++ b/third_party/libwebrtc/video/receive_statistics_proxy2_unittest.cc @@ -1753,12 +1753,7 @@ TEST_P(ReceiveStatisticsProxy2TestWithContent, DownscalesReported) { statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); const int kExpectedDownscales = 30; // 2 per 4 seconds = 30 per minute. - if (videocontenttypehelpers::IsScreenshare(content_type_)) { - EXPECT_METRIC_EQ( - kExpectedDownscales, - metrics::MinSample("WebRTC.Video.Screenshare." - "NumberResolutionDownswitchesPerMinute")); - } else { + if (!videocontenttypehelpers::IsScreenshare(content_type_)) { EXPECT_METRIC_EQ(kExpectedDownscales, metrics::MinSample( "WebRTC.Video.NumberResolutionDownswitchesPerMinute")); diff --git a/third_party/libwebrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc b/third_party/libwebrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc index e6f33262b2e7..16015beee5e6 100644 --- a/third_party/libwebrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc +++ b/third_party/libwebrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc @@ -26,7 +26,7 @@ class TransformableVideoReceiverFrame TransformableVideoReceiverFrame(std::unique_ptr frame, uint32_t ssrc) : frame_(std::move(frame)), - metadata_(frame_->GetRtpVideoHeader()), + metadata_(frame_->GetRtpVideoHeader().GetAsMetadata()), ssrc_(ssrc) {} ~TransformableVideoReceiverFrame() override = default; diff --git a/third_party/libwebrtc/video/send_statistics_proxy.cc b/third_party/libwebrtc/video/send_statistics_proxy.cc index 523781558ba5..b6c2d60a73c9 100644 --- a/third_party/libwebrtc/video/send_statistics_proxy.cc +++ b/third_party/libwebrtc/video/send_statistics_proxy.cc @@ -985,6 +985,8 @@ void SendStatisticsProxy::OnSendEncodedImage( stats->frames_encoded++; stats->total_encode_time_ms += encoded_image.timing_.encode_finish_ms - encoded_image.timing_.encode_start_ms; + if (codec_info) + stats->scalability_mode = codec_info->scalability_mode; // Report resolution of the top spatial layer. bool is_top_spatial_layer = codec_info == nullptr || codec_info->end_of_picture; diff --git a/third_party/libwebrtc/video/send_statistics_proxy_unittest.cc b/third_party/libwebrtc/video/send_statistics_proxy_unittest.cc index d24b3c80a6e5..af3b0208e269 100644 --- a/third_party/libwebrtc/video/send_statistics_proxy_unittest.cc +++ b/third_party/libwebrtc/video/send_statistics_proxy_unittest.cc @@ -21,6 +21,7 @@ #include "api/video/video_adaptation_reason.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_codec.h" #include "rtc_base/fake_clock.h" #include "system_wrappers/include/metrics.h" @@ -32,6 +33,9 @@ namespace webrtc { namespace { + +using ::testing::Optional; + const uint32_t kFirstSsrc = 17; const uint32_t kSecondSsrc = 42; const uint32_t kFirstRtxSsrc = 18; @@ -397,6 +401,34 @@ TEST_F(SendStatisticsProxyTest, OnSendEncodedImageWithoutQpQpSumWontExist) { statistics_proxy_->GetStats().substreams[ssrc].qp_sum); } +TEST_F(SendStatisticsProxyTest, + OnSendEncodedImageSetsScalabilityModeOfCurrentLayer) { + EncodedImage encoded_image; + CodecSpecificInfo codec_info; + ScalabilityMode layer0_mode = ScalabilityMode::kL1T1; + ScalabilityMode layer1_mode = ScalabilityMode::kL1T3; + auto ssrc0 = config_.rtp.ssrcs[0]; + auto ssrc1 = config_.rtp.ssrcs[1]; + EXPECT_EQ(absl::nullopt, + statistics_proxy_->GetStats().substreams[ssrc0].scalability_mode); + EXPECT_EQ(absl::nullopt, + statistics_proxy_->GetStats().substreams[ssrc1].scalability_mode); + encoded_image.SetSpatialIndex(0); + codec_info.scalability_mode = layer0_mode; + statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); + EXPECT_THAT(statistics_proxy_->GetStats().substreams[ssrc0].scalability_mode, + layer0_mode); + EXPECT_EQ(absl::nullopt, + statistics_proxy_->GetStats().substreams[ssrc1].scalability_mode); + encoded_image.SetSpatialIndex(1); + codec_info.scalability_mode = layer1_mode; + statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); + EXPECT_THAT(statistics_proxy_->GetStats().substreams[ssrc0].scalability_mode, + layer0_mode); + EXPECT_THAT(statistics_proxy_->GetStats().substreams[ssrc1].scalability_mode, + layer1_mode); +} + TEST_F(SendStatisticsProxyTest, TotalEncodedBytesTargetFirstFrame) { const uint32_t kTargetBytesPerSecond = 100000; statistics_proxy_->OnSetEncoderTargetRate(kTargetBytesPerSecond * 8); diff --git a/third_party/libwebrtc/video/test/mock_video_stream_encoder.h b/third_party/libwebrtc/video/test/mock_video_stream_encoder.h index 2f982158f0c7..946f45cc7627 100644 --- a/third_party/libwebrtc/video/test/mock_video_stream_encoder.h +++ b/third_party/libwebrtc/video/test/mock_video_stream_encoder.h @@ -55,12 +55,20 @@ class MockVideoStreamEncoder : public VideoStreamEncoderInterface { MOCK_METHOD(void, MockedConfigureEncoder, (const VideoEncoderConfig&, size_t)); + MOCK_METHOD(void, + MockedConfigureEncoder, + (const VideoEncoderConfig&, size_t, SetParametersCallback)); // gtest generates implicit copy which is not allowed on VideoEncoderConfig, // so we can't mock ConfigureEncoder directly. void ConfigureEncoder(VideoEncoderConfig config, size_t max_data_payload_length) { MockedConfigureEncoder(config, max_data_payload_length); } + void ConfigureEncoder(VideoEncoderConfig config, + size_t max_data_payload_length, + SetParametersCallback) { + MockedConfigureEncoder(config, max_data_payload_length); + } }; } // namespace webrtc diff --git a/third_party/libwebrtc/video/video_quality_observer2.cc b/third_party/libwebrtc/video/video_quality_observer2.cc index 0751d3f4edbd..0afc2f5235a1 100644 --- a/third_party/libwebrtc/video/video_quality_observer2.cc +++ b/third_party/libwebrtc/video/video_quality_observer2.cc @@ -109,11 +109,13 @@ void VideoQualityObserver::UpdateHistograms(bool screenshare) { int num_resolution_downgrades_per_minute = num_resolution_downgrades_ * 60000 / video_duration_ms; - RTC_HISTOGRAM_COUNTS_SPARSE_100( - uma_prefix + ".NumberResolutionDownswitchesPerMinute", - num_resolution_downgrades_per_minute); - log_stream << uma_prefix << ".NumberResolutionDownswitchesPerMinute " - << num_resolution_downgrades_per_minute << "\n"; + if (!screenshare) { + RTC_HISTOGRAM_COUNTS_SPARSE_100( + uma_prefix + ".NumberResolutionDownswitchesPerMinute", + num_resolution_downgrades_per_minute); + log_stream << uma_prefix << ".NumberResolutionDownswitchesPerMinute " + << num_resolution_downgrades_per_minute << "\n"; + } int num_freezes_per_minute = freezes_durations_.NumSamples() * 60000 / video_duration_ms; diff --git a/third_party/libwebrtc/video/video_receive_stream2.cc b/third_party/libwebrtc/video/video_receive_stream2.cc index 151dce1bb9ab..78e8f1c017d7 100644 --- a/third_party/libwebrtc/video/video_receive_stream2.cc +++ b/third_party/libwebrtc/video/video_receive_stream2.cc @@ -665,11 +665,20 @@ int VideoReceiveStream2::GetBaseMinimumPlayoutDelayMs() const { } void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { - VideoFrameMetaData frame_meta(video_frame, clock_->CurrentTime()); + source_tracker_.OnFrameDelivered(video_frame.packet_infos()); + config_.renderer->OnFrame(video_frame); // TODO(bugs.webrtc.org/10739): we should set local capture clock offset for // `video_frame.packet_infos`. But VideoFrame is const qualified here. + // For frame delay metrics, calculated in `OnRenderedFrame`, to better reflect + // user experience measurements must be done as close as possible to frame + // rendering moment. Capture current time, which is used for calculation of + // delay metrics in `OnRenderedFrame`, right after frame is passed to + // renderer. Frame may or may be not rendered by this time. This results in + // inaccuracy but is still the best we can do in the absence of "frame + // rendered" callback from the renderer. + VideoFrameMetaData frame_meta(video_frame, clock_->CurrentTime()); call_->worker_thread()->PostTask( SafeTask(task_safety_.flag(), [frame_meta, this]() { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); @@ -685,8 +694,6 @@ void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { stats_proxy_.OnRenderedFrame(frame_meta); })); - source_tracker_.OnFrameDelivered(video_frame.packet_infos()); - config_.renderer->OnFrame(video_frame); webrtc::MutexLock lock(&pending_resolution_mutex_); if (pending_resolution_.has_value()) { if (!pending_resolution_->empty() && diff --git a/third_party/libwebrtc/video/video_send_stream.cc b/third_party/libwebrtc/video/video_send_stream.cc index bf5f99aca5eb..e5545e761cca 100644 --- a/third_party/libwebrtc/video/video_send_stream.cc +++ b/third_party/libwebrtc/video/video_send_stream.cc @@ -209,8 +209,12 @@ VideoSendStream::~VideoSendStream() { transport_->DestroyRtpVideoSender(rtp_video_sender_); } -void VideoSendStream::UpdateActiveSimulcastLayers( - const std::vector active_layers) { +void VideoSendStream::Start() { + const std::vector active_layers(config_.rtp.ssrcs.size(), true); + StartPerRtpStream(active_layers); +} + +void VideoSendStream::StartPerRtpStream(const std::vector active_layers) { RTC_DCHECK_RUN_ON(&thread_checker_); // Keep our `running_` flag expected state in sync with active layers since @@ -232,35 +236,16 @@ void VideoSendStream::UpdateActiveSimulcastLayers( } } active_layers_string << "}"; - RTC_LOG(LS_INFO) << "UpdateActiveSimulcastLayers: " - << active_layers_string.str(); + RTC_LOG(LS_INFO) << "StartPerRtpStream: " << active_layers_string.str(); rtp_transport_queue_->RunOrPost( SafeTask(transport_queue_safety_, [this, active_layers] { - send_stream_.UpdateActiveSimulcastLayers(active_layers); + send_stream_.StartPerRtpStream(active_layers); })); running_ = running; } -void VideoSendStream::Start() { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DLOG(LS_INFO) << "VideoSendStream::Start"; - if (running_) - return; - - running_ = true; - - // It is expected that after VideoSendStream::Start has been called, incoming - // frames are not dropped in VideoStreamEncoder. To ensure this, Start has to - // be synchronized. - // TODO(tommi): ^^^ Validate if this still holds. - rtp_transport_queue_->RunSynchronous([this] { - transport_queue_safety_->SetAlive(); - send_stream_.Start(); - }); -} - void VideoSendStream::Stop() { RTC_DCHECK_RUN_ON(&thread_checker_); if (!running_) @@ -301,11 +286,17 @@ void VideoSendStream::SetSource( } void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) { + ReconfigureVideoEncoder(std::move(config), nullptr); +} + +void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config, + SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&thread_checker_); RTC_DCHECK_EQ(content_type_, config.content_type); video_stream_encoder_->ConfigureEncoder( std::move(config), - config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp)); + config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp), + std::move(callback)); } VideoSendStream::Stats VideoSendStream::GetStats() { diff --git a/third_party/libwebrtc/video/video_send_stream.h b/third_party/libwebrtc/video/video_send_stream.h index 4df51905e1c2..404873fd39bd 100644 --- a/third_party/libwebrtc/video/video_send_stream.h +++ b/third_party/libwebrtc/video/video_send_stream.h @@ -77,8 +77,8 @@ class VideoSendStream : public webrtc::VideoSendStream { void DeliverRtcp(const uint8_t* packet, size_t length); // webrtc::VideoSendStream implementation. - void UpdateActiveSimulcastLayers(std::vector active_layers) override; void Start() override; + void StartPerRtpStream(std::vector active_layers) override; void Stop() override; bool started() override; @@ -88,7 +88,9 @@ class VideoSendStream : public webrtc::VideoSendStream { void SetSource(rtc::VideoSourceInterface* source, const DegradationPreference& degradation_preference) override; - void ReconfigureVideoEncoder(VideoEncoderConfig) override; + void ReconfigureVideoEncoder(VideoEncoderConfig config) override; + void ReconfigureVideoEncoder(VideoEncoderConfig config, + SetParametersCallback callback) override; Stats GetStats() override; void StopPermanentlyAndGetRtpStates(RtpStateMap* rtp_state_map, diff --git a/third_party/libwebrtc/video/video_send_stream_impl.cc b/third_party/libwebrtc/video/video_send_stream_impl.cc index 06f6a05e2f15..f34388e56aeb 100644 --- a/third_party/libwebrtc/video/video_send_stream_impl.cc +++ b/third_party/libwebrtc/video/video_send_stream_impl.cc @@ -312,31 +312,18 @@ void VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) { rtp_video_sender_->DeliverRtcp(packet, length); } -void VideoSendStreamImpl::UpdateActiveSimulcastLayers( +void VideoSendStreamImpl::StartPerRtpStream( const std::vector active_layers) { RTC_DCHECK_RUN_ON(rtp_transport_queue_); bool previously_active = rtp_video_sender_->IsActive(); rtp_video_sender_->SetActiveModules(active_layers); if (!rtp_video_sender_->IsActive() && previously_active) { - // Payload router switched from active to inactive. StopVideoSendStream(); } else if (rtp_video_sender_->IsActive() && !previously_active) { - // Payload router switched from inactive to active. StartupVideoSendStream(); } } -void VideoSendStreamImpl::Start() { - RTC_DCHECK_RUN_ON(rtp_transport_queue_); - RTC_LOG(LS_INFO) << "VideoSendStream::Start"; - if (rtp_video_sender_->IsActive()) - return; - - TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Start"); - rtp_video_sender_->SetActive(true); - StartupVideoSendStream(); -} - void VideoSendStreamImpl::StartupVideoSendStream() { RTC_DCHECK_RUN_ON(rtp_transport_queue_); transport_queue_safety_->SetAlive(); @@ -378,7 +365,7 @@ void VideoSendStreamImpl::Stop() { RTC_DCHECK(transport_queue_safety_->alive()); TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop"); - rtp_video_sender_->SetActive(false); + rtp_video_sender_->Stop(); StopVideoSendStream(); } diff --git a/third_party/libwebrtc/video/video_send_stream_impl.h b/third_party/libwebrtc/video/video_send_stream_impl.h index d444eabc21e1..f1454506555a 100644 --- a/third_party/libwebrtc/video/video_send_stream_impl.h +++ b/third_party/libwebrtc/video/video_send_stream_impl.h @@ -79,8 +79,7 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, ~VideoSendStreamImpl() override; void DeliverRtcp(const uint8_t* packet, size_t length); - void UpdateActiveSimulcastLayers(std::vector active_layers); - void Start(); + void StartPerRtpStream(std::vector active_layers); void Stop(); // TODO(holmer): Move these to RtpTransportControllerSend. diff --git a/third_party/libwebrtc/video/video_send_stream_impl_unittest.cc b/third_party/libwebrtc/video/video_send_stream_impl_unittest.cc index 8a88ba0f167a..c38dcd0e1edf 100644 --- a/third_party/libwebrtc/video/video_send_stream_impl_unittest.cc +++ b/third_party/libwebrtc/video/video_send_stream_impl_unittest.cc @@ -66,8 +66,8 @@ std::string GetAlrProbingExperimentString() { } class MockRtpVideoSender : public RtpVideoSenderInterface { public: - MOCK_METHOD(void, SetActive, (bool), (override)); - MOCK_METHOD(void, SetActiveModules, (const std::vector), (override)); + MOCK_METHOD(void, SetActiveModules, (const std::vector&), (override)); + MOCK_METHOD(void, Stop, (), (override)); MOCK_METHOD(bool, IsActive, (), (override)); MOCK_METHOD(void, OnNetworkAvailability, (bool), (override)); MOCK_METHOD((std::map), @@ -139,12 +139,19 @@ class VideoSendStreamImplTest : public ::testing::Test { .WillRepeatedly(Return(&packet_router_)); EXPECT_CALL(transport_controller_, CreateRtpVideoSender) .WillRepeatedly(Return(&rtp_video_sender_)); - EXPECT_CALL(rtp_video_sender_, SetActive(_)) - .WillRepeatedly(::testing::Invoke( - [&](bool active) { rtp_video_sender_active_ = active; })); - EXPECT_CALL(rtp_video_sender_, IsActive()) - .WillRepeatedly( - ::testing::Invoke([&]() { return rtp_video_sender_active_; })); + ON_CALL(rtp_video_sender_, Stop()).WillByDefault(::testing::Invoke([&] { + active_modules_.clear(); + })); + ON_CALL(rtp_video_sender_, IsActive()) + .WillByDefault(::testing::Invoke([&]() { + for (bool enabled : active_modules_) { + if (enabled) + return true; + } + return false; + })); + ON_CALL(rtp_video_sender_, SetActiveModules) + .WillByDefault(::testing::SaveArg<0>(&active_modules_)); ON_CALL(transport_controller_, GetWorkerQueue()) .WillByDefault(Return(&worker_queue_)); } @@ -183,8 +190,8 @@ class VideoSendStreamImplTest : public ::testing::Test { NiceMock bitrate_allocator_; NiceMock video_stream_encoder_; NiceMock rtp_video_sender_; + std::vector active_modules_; - bool rtp_video_sender_active_ = false; RtcEventLogNull event_log_; VideoSendStream::Config config_; SendDelayStats send_delay_stats_; @@ -210,7 +217,7 @@ TEST_F(VideoSendStreamImplTest, RegistersAsBitrateObserverOnStart) { EXPECT_EQ(config.bitrate_priority, kDefaultBitratePriority); })); worker_queue_.RunSynchronous([&] { - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1); vss_impl->Stop(); }); @@ -225,7 +232,7 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChange) { kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kRealtimeVideo); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); // QVGA + VGA configuration matching defaults in // media/engine/simulcast.cc. @@ -291,7 +298,7 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChangeWithAlr) { auto vss_impl = CreateVideoSendStreamImpl( kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kScreen); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); // Simulcast screenshare. VideoStream low_stream; @@ -357,7 +364,7 @@ TEST_F(VideoSendStreamImplTest, kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kRealtimeVideo); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); // 2-layer video simulcast. VideoStream low_stream; low_stream.width = 320; @@ -422,7 +429,7 @@ TEST_F(VideoSendStreamImplTest, SetsScreensharePacingFactorWithFeedback) { kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kScreen); worker_queue_.RunSynchronous([&] { - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); vss_impl->Stop(); }); } @@ -434,7 +441,7 @@ TEST_F(VideoSendStreamImplTest, DoesNotSetPacingFactorWithoutFeedback) { VideoEncoderConfig::ContentType::kScreen); worker_queue_.RunSynchronous([&] { EXPECT_CALL(transport_controller_, SetPacingFactor(_)).Times(0); - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); vss_impl->Stop(); }); } @@ -447,7 +454,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationWhenEnabled) { EXPECT_CALL(transport_controller_, SetPacingFactor(_)).Times(0); VideoStreamEncoderInterface::EncoderSink* const sink = static_cast(vss_impl.get()); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); // Populate a test instance of video bitrate allocation. VideoBitrateAllocation alloc; alloc.SetBitrate(0, 0, 10000); @@ -494,7 +501,7 @@ TEST_F(VideoSendStreamImplTest, ThrottlesVideoBitrateAllocationWhenTooSimilar) { kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kScreen); worker_queue_.RunSynchronous([&] { - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); // Unpause encoder, to allows allocations to be passed through. const uint32_t kBitrateBps = 100000; EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) @@ -556,7 +563,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationOnLayerChange) { VideoEncoderConfig::ContentType::kScreen); worker_queue_.RunSynchronous([&] { - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); // Unpause encoder, to allows allocations to be passed through. const uint32_t kBitrateBps = 100000; EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) @@ -599,7 +606,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) { kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kScreen); worker_queue_.RunSynchronous([&] { - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); const uint32_t kBitrateBps = 100000; // Unpause encoder, to allows allocations to be passed through. EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) @@ -709,7 +716,7 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { auto vss_impl = CreateVideoSendStreamImpl( kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kRealtimeVideo); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); VideoStream qvga_stream; qvga_stream.width = 320; qvga_stream.height = 180; @@ -842,7 +849,7 @@ TEST_F(VideoSendStreamImplTest, DisablesPaddingOnPausedEncoder) { int min_transmit_bitrate_bps = 30000; config_.rtp.ssrcs.emplace_back(1); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); // Starts without padding. EXPECT_EQ(0, padding_bitrate); encoder_queue_->PostTask([&] { @@ -893,7 +900,7 @@ TEST_F(VideoSendStreamImplTest, KeepAliveOnDroppedFrame) { VideoEncoderConfig::ContentType::kRealtimeVideo); EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(0); worker_queue_.RunSynchronous([&] { - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); const uint32_t kBitrateBps = 100000; EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) .Times(1) @@ -941,7 +948,7 @@ TEST_F(VideoSendStreamImplTest, ConfiguresBitratesForSvc) { ? VideoEncoderConfig::ContentType::kScreen : VideoEncoderConfig::ContentType::kRealtimeVideo); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); // Svc VideoStream stream; diff --git a/third_party/libwebrtc/video/video_send_stream_tests.cc b/third_party/libwebrtc/video/video_send_stream_tests.cc index 958d04e24745..923c318c6d62 100644 --- a/third_party/libwebrtc/video/video_send_stream_tests.cc +++ b/third_party/libwebrtc/video/video_send_stream_tests.cc @@ -2694,9 +2694,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { // bitrates than expected by this test, due to encoder pushback and subtracted // overhead. webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, - "WebRTC-VideoRateControl/bitrate_adjuster:false/" - "WebRTC-SendSideBwe-WithOverhead/Disabled/"); + field_trials_, "WebRTC-VideoRateControl/bitrate_adjuster:false/"); class EncoderBitrateThresholdObserver : public test::SendTest, public VideoBitrateAllocatorFactory, @@ -2722,8 +2720,8 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { EXPECT_LE(codec.startBitrate, codec.maxBitrate); if (num_rate_allocator_creations_ == 0) { EXPECT_EQ(static_cast(kMinBitrateKbps), codec.minBitrate); - EXPECT_EQ(static_cast(kStartBitrateKbps), - codec.startBitrate); + EXPECT_NEAR(static_cast(kStartBitrateKbps), + codec.startBitrate, 10); EXPECT_EQ(static_cast(kMaxBitrateKbps), codec.maxBitrate); } else if (num_rate_allocator_creations_ == 1) { EXPECT_EQ(static_cast(kLowerMaxBitrateKbps), @@ -2749,8 +2747,8 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { EXPECT_EQ(0, num_encoder_initializations_); EXPECT_EQ(static_cast(kMinBitrateKbps), codecSettings->minBitrate); - EXPECT_EQ(static_cast(kStartBitrateKbps), - codecSettings->startBitrate); + EXPECT_NEAR(static_cast(kStartBitrateKbps), + codecSettings->startBitrate, 10); EXPECT_EQ(static_cast(kMaxBitrateKbps), codecSettings->maxBitrate); @@ -2775,14 +2773,18 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { FakeEncoder::SetRates(parameters); } - void WaitForSetRates(uint32_t expected_bitrate) { + void WaitForSetRates(uint32_t expected_bitrate, int abs_error) { // Wait for the expected rate to be set. In some cases there can be // more than one update pending, in which case we keep waiting // until the correct value has been observed. + // The target_bitrate_ is reduced by the calculated packet overhead. const int64_t start_time = rtc::TimeMillis(); do { MutexLock lock(&mutex_); - if (target_bitrate_ == expected_bitrate) { + + int error = target_bitrate_ - expected_bitrate; + if ((error < 0 && error >= -abs_error) || + (error >= 0 && error <= abs_error)) { return; } } while (bitrate_changed_event_.Wait( @@ -2790,7 +2792,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { VideoSendStreamTest::kDefaultTimeout - TimeDelta::Millis(rtc::TimeMillis() - start_time)))); MutexLock lock(&mutex_); - EXPECT_EQ(target_bitrate_, expected_bitrate) + EXPECT_NEAR(target_bitrate_, expected_bitrate, abs_error) << "Timed out while waiting encoder rate to be set."; } @@ -2832,7 +2834,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { << "Timed out while waiting for rate allocator to be created."; ASSERT_TRUE(init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeout)) << "Timed out while waiting for encoder to be configured."; - WaitForSetRates(kStartBitrateKbps); + WaitForSetRates(kStartBitrateKbps, 80); BitrateConstraints bitrate_config; bitrate_config.start_bitrate_bps = kIncreasedStartBitrateKbps * 1000; bitrate_config.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000; @@ -2841,7 +2843,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { bitrate_config); }); // Encoder rate is capped by EncoderConfig max_bitrate_bps. - WaitForSetRates(kMaxBitrateKbps); + WaitForSetRates(kMaxBitrateKbps, 10); encoder_config_.max_bitrate_bps = kLowerMaxBitrateKbps * 1000; SendTask(task_queue_, [&]() { send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); @@ -2851,7 +2853,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { EXPECT_EQ(2, num_rate_allocator_creations_) << "Rate allocator should have been recreated."; - WaitForSetRates(kLowerMaxBitrateKbps); + WaitForSetRates(kLowerMaxBitrateKbps, 10); EXPECT_EQ(1, num_encoder_initializations_); encoder_config_.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000; @@ -2865,7 +2867,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { // Expected target bitrate is the start bitrate set in the call to // call_->GetTransportControllerSend()->SetSdpBitrateParameters. - WaitForSetRates(kIncreasedStartBitrateKbps); + WaitForSetRates(kIncreasedStartBitrateKbps, 10); EXPECT_EQ(1, num_encoder_initializations_); } @@ -3701,8 +3703,6 @@ TEST_F(VideoSendStreamTest, EncoderConfigMaxFramerateReportedToSource) { // testing that the maximum possible target payload rate is smaller than the // maximum bandwidth estimate by the overhead rate. TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); class RemoveOverheadFromBandwidthTest : public test::EndToEndTest, public test::FakeEncoder { public: diff --git a/third_party/libwebrtc/video/video_stream_encoder.cc b/third_party/libwebrtc/video/video_stream_encoder.cc index de40ccf34812..c680fe12c8fa 100644 --- a/third_party/libwebrtc/video/video_stream_encoder.cc +++ b/third_party/libwebrtc/video/video_stream_encoder.cc @@ -35,6 +35,7 @@ #include "call/adaptation/resource_adaptation_processor.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_adapter.h" +#include "media/base/media_channel.h" #include "modules/video_coding/include/video_codec_initializer.h" #include "modules/video_coding/svc/svc_rate_allocator.h" #include "modules/video_coding/utility/vp8_constants.h" @@ -149,6 +150,10 @@ bool RequiresEncoderReset(const VideoCodec& prev_send_codec, if (new_send_codec.codecType == kVideoCodecVP9) { size_t num_spatial_layers = new_send_codec.VP9().numberOfSpatialLayers; for (unsigned char i = 0; i < num_spatial_layers; ++i) { + if (!new_send_codec.spatialLayers[i].active) { + // No need to reset when layer is inactive. + continue; + } if (new_send_codec.spatialLayers[i].width != prev_send_codec.spatialLayers[i].width || new_send_codec.spatialLayers[i].height != @@ -156,7 +161,8 @@ bool RequiresEncoderReset(const VideoCodec& prev_send_codec, new_send_codec.spatialLayers[i].numberOfTemporalLayers != prev_send_codec.spatialLayers[i].numberOfTemporalLayers || new_send_codec.spatialLayers[i].qpMax != - prev_send_codec.spatialLayers[i].qpMax) { + prev_send_codec.spatialLayers[i].qpMax || + !prev_send_codec.spatialLayers[i].active) { return true; } } @@ -879,9 +885,16 @@ void VideoStreamEncoder::SetStartBitrate(int start_bitrate_bps) { void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, size_t max_data_payload_length) { + ConfigureEncoder(std::move(config), max_data_payload_length, nullptr); +} + +void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, + size_t max_data_payload_length, + SetParametersCallback callback) { RTC_DCHECK_RUN_ON(worker_queue_); encoder_queue_.PostTask( - [this, config = std::move(config), max_data_payload_length]() mutable { + [this, config = std::move(config), max_data_payload_length, + callback = std::move(callback)]() mutable { RTC_DCHECK_RUN_ON(&encoder_queue_); RTC_DCHECK(sink_); RTC_LOG(LS_INFO) << "ConfigureEncoder requested."; @@ -912,7 +925,13 @@ void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, // minimize the number of reconfigurations. The codec configuration // depends on incoming video frame size. if (last_frame_info_) { + if (callback) { + encoder_configuration_callbacks_.push_back(std::move(callback)); + } + ReconfigureEncoder(); + } else { + webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); } }); } @@ -1369,6 +1388,8 @@ void VideoStreamEncoder::ReconfigureEncoder() { stream_resource_manager_.ConfigureQualityScaler(info); stream_resource_manager_.ConfigureBandwidthQualityScaler(info); + webrtc::RTCError encoder_configuration_result = webrtc::RTCError::OK(); + if (!encoder_initialized_) { RTC_LOG(LS_WARNING) << "Failed to initialize " << CodecTypeToPayloadString(codec.codecType) @@ -1378,8 +1399,19 @@ void VideoStreamEncoder::ReconfigureEncoder() { if (switch_encoder_on_init_failures_) { RequestEncoderSwitch(); + } else { + encoder_configuration_result = + webrtc::RTCError(RTCErrorType::UNSUPPORTED_OPERATION); } } + + if (!encoder_configuration_callbacks_.empty()) { + for (auto& callback : encoder_configuration_callbacks_) { + webrtc::InvokeSetParametersCallback(callback, + encoder_configuration_result); + } + encoder_configuration_callbacks_.clear(); + } } void VideoStreamEncoder::RequestEncoderSwitch() { diff --git a/third_party/libwebrtc/video/video_stream_encoder.h b/third_party/libwebrtc/video/video_stream_encoder.h index e94c369a1912..ccff3ffefd77 100644 --- a/third_party/libwebrtc/video/video_stream_encoder.h +++ b/third_party/libwebrtc/video/video_stream_encoder.h @@ -17,8 +17,10 @@ #include #include +#include "absl/container/inlined_vector.h" #include "api/adaptation/resource.h" #include "api/field_trials_view.h" +#include "api/rtp_sender_interface.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/units/data_rate.h" @@ -106,6 +108,9 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, void ConfigureEncoder(VideoEncoderConfig config, size_t max_data_payload_length) override; + void ConfigureEncoder(VideoEncoderConfig config, + size_t max_data_payload_length, + SetParametersCallback callback) override; // Permanently stop encoding. After this method has returned, it is // guaranteed that no encoded frames will be delivered to the sink. @@ -302,6 +307,8 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // Set when configuration must create a new encoder object, e.g., // because of a codec change. bool pending_encoder_creation_ RTC_GUARDED_BY(&encoder_queue_); + absl::InlinedVector encoder_configuration_callbacks_ + RTC_GUARDED_BY(&encoder_queue_); absl::optional last_frame_info_ RTC_GUARDED_BY(&encoder_queue_); diff --git a/third_party/libwebrtc/video/video_stream_encoder_interface.h b/third_party/libwebrtc/video/video_stream_encoder_interface.h index e716572e682c..25190aa474a9 100644 --- a/third_party/libwebrtc/video/video_stream_encoder_interface.h +++ b/third_party/libwebrtc/video/video_stream_encoder_interface.h @@ -15,7 +15,9 @@ #include "api/adaptation/resource.h" #include "api/fec_controller_override.h" +#include "api/rtc_error.h" #include "api/rtp_parameters.h" // For DegradationPreference. +#include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" #include "api/units/data_rate.h" #include "api/video/video_bitrate_allocator.h" @@ -131,6 +133,9 @@ class VideoStreamEncoderInterface { // packetization for H.264. virtual void ConfigureEncoder(VideoEncoderConfig config, size_t max_data_payload_length) = 0; + virtual void ConfigureEncoder(VideoEncoderConfig config, + size_t max_data_payload_length, + SetParametersCallback callback) = 0; // Permanently stop encoding. After this method has returned, it is // guaranteed that no encoded frames will be delivered to the sink. diff --git a/third_party/libwebrtc/video/video_stream_encoder_unittest.cc b/third_party/libwebrtc/video/video_stream_encoder_unittest.cc index 5271654ac9c6..cdd4c75ab74b 100644 --- a/third_party/libwebrtc/video/video_stream_encoder_unittest.cc +++ b/third_party/libwebrtc/video/video_stream_encoder_unittest.cc @@ -885,7 +885,7 @@ class VideoStreamEncoderTest : public ::testing::Test { &video_source_, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); video_stream_encoder_->SetStartBitrate(kTargetBitrate.bps()); video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config), - kMaxPayloadLength); + kMaxPayloadLength, nullptr); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); } @@ -1097,7 +1097,8 @@ class VideoStreamEncoderTest : public ::testing::Test { quality_scaling_ = b; } - void SetRequestedResolutionAlignment(int requested_resolution_alignment) { + void SetRequestedResolutionAlignment( + uint32_t requested_resolution_alignment) { MutexLock lock(&local_mutex_); requested_resolution_alignment_ = requested_resolution_alignment; } @@ -1331,7 +1332,7 @@ class VideoStreamEncoderTest : public ::testing::Test { int last_input_width_ RTC_GUARDED_BY(local_mutex_) = 0; int last_input_height_ RTC_GUARDED_BY(local_mutex_) = 0; bool quality_scaling_ RTC_GUARDED_BY(local_mutex_) = true; - int requested_resolution_alignment_ RTC_GUARDED_BY(local_mutex_) = 1; + uint32_t requested_resolution_alignment_ RTC_GUARDED_BY(local_mutex_) = 1; bool apply_alignment_to_all_simulcast_layers_ RTC_GUARDED_BY(local_mutex_) = false; bool is_hardware_accelerated_ RTC_GUARDED_BY(local_mutex_) = false; @@ -2472,7 +2473,7 @@ class ResolutionAlignmentTest scale_factors_(::testing::get<1>(GetParam())) {} protected: - const int requested_alignment_; + const uint32_t requested_alignment_; const std::vector scale_factors_; }; @@ -2538,8 +2539,8 @@ TEST_P(ResolutionAlignmentTest, SinkWantsAlignmentApplied) { EXPECT_EQ(codec.numberOfSimulcastStreams, num_streams); // Frame size should be a multiple of the requested alignment. for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) { - EXPECT_EQ(codec.simulcastStream[i].width % requested_alignment_, 0); - EXPECT_EQ(codec.simulcastStream[i].height % requested_alignment_, 0); + EXPECT_EQ(codec.simulcastStream[i].width % requested_alignment_, 0u); + EXPECT_EQ(codec.simulcastStream[i].height % requested_alignment_, 0u); // Aspect ratio should match. EXPECT_EQ(codec.width * codec.simulcastStream[i].height, codec.height * codec.simulcastStream[i].width); @@ -8648,6 +8649,64 @@ TEST_F(VideoStreamEncoderTest, video_stream_encoder_->Stop(); } +TEST_F(VideoStreamEncoderTest, RecreatesEncoderWhenEnableVp9SpatialLayer) { + // Set up encoder to use VP9 SVC using two spatial layers. + fake_encoder_.SetTemporalLayersSupported(/*spatial_idx=*/0, true); + fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 1, true); + VideoEncoderConfig video_encoder_config; + test::FillEncoderConfiguration(VideoCodecType::kVideoCodecVP9, + /* num_streams*/ 1, &video_encoder_config); + video_encoder_config.max_bitrate_bps = 2 * kTargetBitrate.bps(); + video_encoder_config.content_type = + VideoEncoderConfig::ContentType::kRealtimeVideo; + VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); + vp9_settings.numberOfSpatialLayers = 2; + vp9_settings.numberOfTemporalLayers = 2; + vp9_settings.interLayerPred = InterLayerPredMode::kOn; + vp9_settings.automaticResizeOn = false; + video_encoder_config.encoder_specific_settings = + rtc::make_ref_counted( + vp9_settings); + video_encoder_config.spatial_layers = GetSvcConfig(1280, 720, + /*fps=*/30.0, + /*first_active_layer=*/0, + /*num_spatial_layers=*/2, + /*num_temporal_layers=*/3, + /*is_screenshare=*/false); + ConfigureEncoder(video_encoder_config.Copy(), + VideoStreamEncoder::BitrateAllocationCallbackType:: + kVideoLayersAllocation); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); + + video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720)); + WaitForEncodedFrame(CurrentTimeMs()); + EXPECT_EQ(fake_encoder_.GetNumInitializations(), 1); + + // Turn off the top spatial layer. This does not require an encoder reset. + video_encoder_config.spatial_layers[1].active = false; + video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(), + kMaxPayloadLength, nullptr); + + time_controller_.AdvanceTime(TimeDelta::Millis(33)); + video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720)); + WaitForEncodedFrame(CurrentTimeMs()); + EXPECT_EQ(fake_encoder_.GetNumInitializations(), 1); + + // Turn on the top spatial layer again, this does require an encoder reset. + video_encoder_config.spatial_layers[1].active = true; + video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(), + kMaxPayloadLength, nullptr); + + time_controller_.AdvanceTime(TimeDelta::Millis(33)); + video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720)); + WaitForEncodedFrame(CurrentTimeMs()); + EXPECT_EQ(fake_encoder_.GetNumInitializations(), 2); + + video_stream_encoder_->Stop(); +} + #endif // !defined(WEBRTC_IOS) // Test parameters: (VideoCodecType codec, bool allow_i420_conversion) diff --git a/third_party/libwebrtc/webrtc_gn/moz.build b/third_party/libwebrtc/webrtc_gn/moz.build index 8e6ad57a2a25..55364f4fffe2 100644 --- a/third_party/libwebrtc/webrtc_gn/moz.build +++ b/third_party/libwebrtc/webrtc_gn/moz.build @@ -170,12 +170,6 @@ if CONFIG["CPU_ARCH"] == "arm": DEFINES["WEBRTC_ARCH_ARM_V7"] = True DEFINES["WEBRTC_HAS_NEON"] = True -if CONFIG["CPU_ARCH"] == "ppc64": - - OS_LIBS += [ - "m" - ] - if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": DEFINES["_DEBUG"] = True @@ -229,10 +223,6 @@ if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True - OS_LIBS += [ - "m" - ] - if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True @@ -243,17 +233,9 @@ if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True - OS_LIBS += [ - "m" - ] - if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": DEFINES["WEBRTC_ENABLE_AVX2"] = True DEFINES["_GNU_SOURCE"] = True - OS_LIBS += [ - "m" - ] - Library("webrtc_gn") diff --git a/third_party/libwebrtc/whitespace.txt b/third_party/libwebrtc/whitespace.txt index daecb0a09d20..42d622a4cb0e 100644 --- a/third_party/libwebrtc/whitespace.txt +++ b/third_party/libwebrtc/whitespace.txt @@ -4,4 +4,3 @@ Try to write something funny. And please don't add trailing whitespace. Once upon a time there was an elephant in Stockholm. Everyone knew about it, but nobody dared say anything. In the end it didn't make a difference since everyone was working from home. -