Backed out 6 changesets (bug 1692873) for causing wpt failures @ screenshare.https.html

Backed out changeset d4850b36e2ff (bug 1692873)
Backed out changeset 26a05286fa42 (bug 1692873)
Backed out changeset d88f2751aa16 (bug 1692873)
Backed out changeset fd77389cbc70 (bug 1692873)
Backed out changeset 154f072ea936 (bug 1692873)
Backed out changeset 88a33c282af1 (bug 1692873)
This commit is contained in:
Alexandru Marc 2024-11-19 19:11:49 +02:00
parent 791097f054
commit b15977e5aa
16 changed files with 82 additions and 245 deletions

View File

@ -198,7 +198,11 @@ void VideoStreamFactory::SelectMaxFramerate(
std::vector<webrtc::VideoStream> VideoStreamFactory::CreateEncoderStreams(
const webrtc::FieldTrialsView& field_trials, int aWidth, int aHeight,
const webrtc::VideoEncoderConfig& aConfig) {
const size_t streamCount = aConfig.number_of_streams;
// We only allow one layer when screensharing
const size_t streamCount =
mCodecMode == webrtc::VideoCodecMode::kScreensharing
? 1
: aConfig.number_of_streams;
MOZ_RELEASE_ASSERT(streamCount >= 1, "Should request at least one stream");
MOZ_RELEASE_ASSERT(streamCount <= aConfig.simulcast_layers.size());
@ -237,7 +241,11 @@ std::vector<webrtc::VideoStream> VideoStreamFactory::CreateEncoderStreams(
video_stream.max_qp = kQpMax;
if (streamCount > 1) {
video_stream.num_temporal_layers = 2;
if (mCodecMode == webrtc::VideoCodecMode::kScreensharing) {
video_stream.num_temporal_layers = 1;
} else {
video_stream.num_temporal_layers = 2;
}
// XXX Bug 1390215 investigate using more of
// simulcast.cc:GetSimulcastConfig() or our own algorithm to replace it
}
@ -379,7 +387,10 @@ unsigned int VideoStreamFactory::SelectFrameRate(
bool VideoStreamFactory::ShouldDropFrame(const webrtc::VideoFrame& aFrame) {
bool hasNonZeroLayer = false;
{
const size_t streamCount = mCodecConfig.mEncodings.size();
const size_t streamCount =
mCodecMode == webrtc::VideoCodecMode::kScreensharing
? 1
: mCodecConfig.mEncodings.size();
for (int idx = streamCount - 1; idx >= 0; --idx) {
const auto& encoding = mCodecConfig.mEncodings[idx];
if (aFrame.width() / encoding.constraints.scaleDownBy >= 1.0 &&

View File

@ -58,7 +58,8 @@ class VideoStreamFactory
// This gets called off-main thread and may hold internal webrtc.org
// locks. May *NOT* lock the conduit's mutex, to avoid deadlocks.
std::vector<webrtc::VideoStream> CreateEncoderStreams(
const webrtc::FieldTrialsView& field_trials, int aWidth, int aHeight,
const webrtc::FieldTrialsView& field_trials,
int aWidth, int aHeight,
const webrtc::VideoEncoderConfig& aConfig) override;
/**

View File

@ -398,9 +398,6 @@ skip-if = [
["test_peerConnection_simulcastOffer_oldSetParameters.html"]
skip-if = ["os == 'android'"] # no simulcast support on android
["test_peerConnection_simulcastOffer_screenshare.html"]
skip-if = ["os == 'android'"] # no simulcast support on android
["test_peerConnection_stats.html"]
["test_peerConnection_stats_jitter.html"]

View File

@ -219,8 +219,6 @@ function createPlaybackElement(track) {
elem.autoplay = true;
elem.srcObject = new MediaStream([track]);
elem.id = track.id;
elem.width = 240;
elem.height = 180;
document.body.appendChild(elem);
return elem;
}

View File

@ -1,113 +0,0 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
<script type="application/javascript" src="parser_rtp.js"></script>
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
<script type="application/javascript" src="helpers_from_wpt/sdp.js"></script>
<script type="application/javascript" src="simulcast.js"></script>
<script type="application/javascript" src="stats.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1692873",
title: "Screensharing peer connection with Simulcast offer",
visible: true
});
runNetworkTest(async () => {
await pushPrefs(
// 400Kbps was determined empirically, set well-higher than
// the 140Kbps+overhead needed for the two restricted simulcast streams.
['media.peerconnection.video.min_bitrate_estimate', 400*1000],
["media.navigator.permission.disabled", true],
["media.peerconnection.video.lock_scaling", true],
);
const offerer = new RTCPeerConnection();
const answerer = new RTCPeerConnection();
const add = (pc, can, failed) => can && pc.addIceCandidate(can).catch(failed);
offerer.onicecandidate = e => add(answerer, e.candidate, generateErrorCallback());
answerer.onicecandidate = e => add(offerer, e.candidate, generateErrorCallback());
const metadataToBeLoaded = [];
answerer.ontrack = e => {
metadataToBeLoaded.push(getPlaybackWithLoadedMetadata(e.track));
};
// One send transceiver, that will be used to send both simulcast streams
SpecialPowers.wrap(document).notifyUserGestureActivation();
const videoStream = await navigator.mediaDevices.getDisplayMedia();
const sendEncodings = [
{ rid: '0' },
{ rid: '1', maxBitrate: 100000, scaleResolutionDownBy: 2 },
{ rid: '2', maxBitrate: 40000, scaleResolutionDownBy: 2 }
];
offerer.addTransceiver(videoStream.getVideoTracks()[0], {sendEncodings});
const [sender] = offerer.getSenders();
const offer = await offerer.createOffer();
const mungedOffer = ridToMid(offer);
info(`Transformed send simulcast offer to multiple m-sections: ${offer.sdp} to ${mungedOffer}`);
await answerer.setRemoteDescription({type: 'offer', sdp: mungedOffer});
await offerer.setLocalDescription(offer);
const rids = answerer.getTransceivers().map(({mid}) => mid);
is(rids.length, 3, 'Should have 3 mids in offer');
isnot(rids[0], '', 'First mid should be non-empty');
isnot(rids[1], '', 'Second mid should be non-empty');
isnot(rids[2], '', 'Third mid should be non-empty');
const answer = await answerer.createAnswer();
let mungedAnswer = midToRid(answer);
// Allow sending up to 4k without having to account for max-fs defaults
// when checking sizes below. 3840*2160/(16*16)=34560
mungedAnswer = mungedAnswer.replace(/max-fs=\d+/g, "max-fs=34560");
info(`Transformed recv answer to simulcast: ${answer.sdp} to ${mungedAnswer}`);
await offerer.setRemoteDescription({type: 'answer', sdp: mungedAnswer});
await answerer.setLocalDescription(answer);
is(metadataToBeLoaded.length, 3, 'Offerer should have gotten 3 ontrack events');
info('Waiting for 3 loadedmetadata events');
const videoElems = await Promise.all(metadataToBeLoaded);
const statsReady =
Promise.all([waitForSyncedRtcp(offerer), waitForSyncedRtcp(answerer)]);
const {width} = videoStream.getVideoTracks()[0].getSettings();
const {height} = videoStream.getVideoTracks()[0].getSettings();
is(videoElems[0].videoWidth, width,
"sink is same width as source, modulo our cropping algorithm");
is(videoElems[0].videoHeight, height,
"sink is same height as source, modulo our cropping algorithm");
is(videoElems[1].videoWidth, Math.trunc(width / 2),
"sink is 1/2 width of source, modulo our cropping algorithm");
is(videoElems[1].videoHeight, Math.trunc(height / 2),
"sink is 1/2 height of source, modulo our cropping algorithm");
is(videoElems[2].videoWidth, Math.trunc(width / 2),
"sink is 1/2 width of source, modulo our cropping algorithm");
is(videoElems[2].videoHeight, Math.trunc(height / 2),
"sink is 1/2 height of source, modulo our cropping algorithm");
await statsReady;
const senderStats = await sender.getStats();
checkSenderStats(senderStats, 3);
checkExpectedFields(senderStats);
pedanticChecks(senderStats);
videoStream.getVideoTracks()[0].stop();
offerer.close();
answerer.close();
});
</script>
</pre>
</body>
</html>

View File

@ -91,14 +91,6 @@ class VideoConduitTest : public Test {
ConcreteControl mControl;
};
class VideoConduitCodecModeTest
: public VideoConduitTest,
public WithParamInterface<webrtc::VideoCodecMode> {};
INSTANTIATE_TEST_SUITE_P(WebRtcCodecModes, VideoConduitCodecModeTest,
Values(webrtc::VideoCodecMode::kRealtimeVideo,
webrtc::VideoCodecMode::kScreensharing));
TEST_F(VideoConduitTest, TestConfigureReceiveMediaCodecs) {
// No codecs
mControl.Update([&](auto& aControl) {
@ -461,7 +453,6 @@ TEST_F(VideoConduitTest, TestConfigureSendMediaCodecTias) {
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
});
ASSERT_EQ(Call()->mVideoSendEncoderConfig->max_bitrate_bps, 1000000);
{
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
SendVideoFrame(1280, 720, 1);
@ -480,7 +471,6 @@ TEST_F(VideoConduitTest, TestConfigureSendMediaCodecTias) {
codecConfigTiasLow.mTias = 1000;
aControl.mVideoSendCodec = Some(codecConfigTiasLow);
});
ASSERT_EQ(Call()->mVideoSendEncoderConfig->max_bitrate_bps, 1000);
{
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
SendVideoFrame(1280, 720, 1);
@ -812,8 +802,7 @@ TEST_F(VideoConduitTestScalingLocked, TestOnSinkWantsChanged) {
}
}
TEST_P(VideoConduitCodecModeTest,
TestConfigureSendMediaCodecSimulcastOddResolution) {
TEST_F(VideoConduitTest, TestConfigureSendMediaCodecSimulcastOddScreen) {
mControl.Update([&](auto& aControl) {
aControl.mTransmitting = true;
{
@ -831,7 +820,6 @@ TEST_P(VideoConduitCodecModeTest,
}
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
aControl.mLocalSsrcs = {42, 43, 44};
aControl.mLocalVideoRtxSsrcs = {45, 46, 47};
});
@ -841,14 +829,14 @@ TEST_P(VideoConduitCodecModeTest,
rtc::VideoSinkWants wants;
mVideoConduit->AddOrUpdateSink(sink.get(), wants);
SendVideoFrame(27, 25, 1);
SendVideoFrame(26, 24, 1);
{
const std::vector<webrtc::VideoStream> videoStreams =
Call()->CreateEncoderStreams(sink->mVideoFrame.width(),
sink->mVideoFrame.height());
ASSERT_EQ(videoStreams.size(), 3U);
EXPECT_EQ(videoStreams[0].width, 27U);
EXPECT_EQ(videoStreams[0].height, 25U);
EXPECT_EQ(videoStreams[0].width, 26U);
EXPECT_EQ(videoStreams[0].height, 24U);
EXPECT_EQ(videoStreams[1].width, 13U);
EXPECT_EQ(videoStreams[1].height, 12U);
EXPECT_EQ(videoStreams[2].width, 6U);
@ -864,19 +852,18 @@ TEST_P(VideoConduitCodecModeTest,
aControl.mLocalVideoRtxSsrcs = {43};
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
SendVideoFrame(27, 25, 2);
SendVideoFrame(26, 24, 2);
{
const std::vector<webrtc::VideoStream> videoStreams =
Call()->CreateEncoderStreams(sink->mVideoFrame.width(),
sink->mVideoFrame.height());
ASSERT_EQ(videoStreams.size(), 1U);
EXPECT_EQ(videoStreams[0].width, 27U);
EXPECT_EQ(videoStreams[0].height, 25U);
EXPECT_EQ(videoStreams[0].width, 26U);
EXPECT_EQ(videoStreams[0].height, 24U);
}
}
TEST_P(VideoConduitCodecModeTest,
TestConfigureSendMediaCodecSimulcastAllScaling) {
TEST_F(VideoConduitTest, TestConfigureSendMediaCodecSimulcastAllScaling) {
mControl.Update([&](auto& aControl) {
aControl.mTransmitting = true;
VideoCodecConfig codecConfig(120, "VP8", EncodingConstraints());
@ -895,7 +882,6 @@ TEST_P(VideoConduitCodecModeTest,
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
aControl.mLocalSsrcs = {42, 43, 44};
aControl.mLocalVideoRtxSsrcs = {45, 46, 47};
});
@ -970,6 +956,33 @@ TEST_P(VideoConduitCodecModeTest,
}
}
TEST_F(VideoConduitTest, TestConfigureSendMediaCodecSimulcastScreenshare) {
mControl.Update([&](auto& aControl) {
VideoCodecConfig codecConfig(120, "VP8", EncodingConstraints());
codecConfig.mEncodings.emplace_back();
{
auto& encoding = codecConfig.mEncodings.emplace_back();
encoding.constraints.scaleDownBy = 2;
}
{
auto& encoding = codecConfig.mEncodings.emplace_back();
encoding.constraints.scaleDownBy = 4;
}
aControl.mTransmitting = true;
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mLocalSsrcs = {42, 43, 44};
aControl.mLocalVideoRtxSsrcs = {45, 46, 47};
aControl.mVideoCodecMode = webrtc::VideoCodecMode::kScreensharing;
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
const std::vector<webrtc::VideoStream> videoStreams =
Call()->CreateEncoderStreams(640, 480);
ASSERT_EQ(videoStreams.size(), 1U);
}
TEST_F(VideoConduitTest, TestReconfigureReceiveMediaCodecs) {
// Defaults
mControl.Update([&](auto& aControl) {
@ -1120,14 +1133,13 @@ TEST_F(VideoConduitTest, TestReconfigureReceiveMediaCodecs) {
Call()->mVideoReceiveConfig->rtp.rtx_associated_payload_types.size(), 0U);
}
TEST_P(VideoConduitCodecModeTest, TestReconfigureSendMediaCodec) {
TEST_F(VideoConduitTest, TestReconfigureSendMediaCodec) {
mControl.Update([&](auto& aControl) {
VideoCodecConfig codecConfig(120, "VP8", EncodingConstraints());
codecConfig.mEncodings.emplace_back();
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
});
ASSERT_FALSE(Call()->mVideoSendConfig);
@ -1140,9 +1152,7 @@ TEST_P(VideoConduitCodecModeTest, TestReconfigureSendMediaCodec) {
webrtc::RtcpMode::kCompound);
ASSERT_EQ(Call()->mVideoSendConfig->rtp.max_packet_size, kVideoMtu);
ASSERT_EQ(Call()->mVideoSendEncoderConfig->content_type,
GetParam() == webrtc::VideoCodecMode::kRealtimeVideo
? VideoEncoderConfig::ContentType::kRealtimeVideo
: VideoEncoderConfig::ContentType::kScreen);
VideoEncoderConfig::ContentType::kRealtimeVideo);
ASSERT_EQ(Call()->mVideoSendEncoderConfig->min_transmit_bitrate_bps, 0);
ASSERT_EQ(Call()->mVideoSendEncoderConfig->max_bitrate_bps, KBPS(10000));
ASSERT_EQ(Call()->mVideoSendEncoderConfig->number_of_streams, 1U);
@ -1187,7 +1197,6 @@ TEST_P(VideoConduitCodecModeTest, TestReconfigureSendMediaCodec) {
aControl.mVideoSendCodec = Some(codecConfigTias);
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
ASSERT_EQ(Call()->mVideoSendEncoderConfig->max_bitrate_bps, 1000000);
SendVideoFrame(1280, 720, 1);
{
@ -1273,15 +1282,13 @@ TEST_P(VideoConduitCodecModeTest, TestReconfigureSendMediaCodec) {
}
}
TEST_P(VideoConduitCodecModeTest,
TestReconfigureSendMediaCodecWhileTransmitting) {
TEST_F(VideoConduitTest, TestReconfigureSendMediaCodecWhileTransmitting) {
mControl.Update([&](auto& aControl) {
VideoCodecConfig codecConfig(120, "VP8", EncodingConstraints());
codecConfig.mEncodings.emplace_back();
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
});
ASSERT_FALSE(Call()->mVideoSendConfig);
@ -1294,9 +1301,7 @@ TEST_P(VideoConduitCodecModeTest,
webrtc::RtcpMode::kCompound);
ASSERT_EQ(Call()->mVideoSendConfig->rtp.max_packet_size, kVideoMtu);
ASSERT_EQ(Call()->mVideoSendEncoderConfig->content_type,
GetParam() == webrtc::VideoCodecMode::kRealtimeVideo
? VideoEncoderConfig::ContentType::kRealtimeVideo
: VideoEncoderConfig::ContentType::kScreen);
VideoEncoderConfig::ContentType::kRealtimeVideo);
ASSERT_EQ(Call()->mVideoSendEncoderConfig->min_transmit_bitrate_bps, 0);
ASSERT_EQ(Call()->mVideoSendEncoderConfig->max_bitrate_bps, KBPS(10000));
ASSERT_EQ(Call()->mVideoSendEncoderConfig->number_of_streams, 1U);
@ -1312,7 +1317,6 @@ TEST_P(VideoConduitCodecModeTest,
aControl.mVideoSendCodec = Some(codecConfigTias);
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
ASSERT_EQ(Call()->mVideoSendEncoderConfig->max_bitrate_bps, 1000000);
SendVideoFrame(1280, 720, 1);
{
@ -1435,7 +1439,7 @@ TEST_P(VideoConduitCodecModeTest,
}
}
TEST_P(VideoConduitCodecModeTest, TestVideoEncode) {
TEST_F(VideoConduitTest, TestVideoEncode) {
mControl.Update([&](auto& aControl) {
aControl.mTransmitting = true;
VideoCodecConfig codecConfig(120, "VP8", EncodingConstraints());
@ -1443,7 +1447,6 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncode) {
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
@ -1472,7 +1475,7 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncode) {
mVideoConduit->RemoveSink(sink.get());
}
TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxFs) {
TEST_F(VideoConduitTest, TestVideoEncodeMaxFs) {
mControl.Update([&](auto& aControl) {
aControl.mTransmitting = true;
VideoCodecConfig codecConfig(120, "VP8", EncodingConstraints());
@ -1481,7 +1484,6 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxFs) {
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
@ -1563,7 +1565,7 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxFs) {
mVideoConduit->RemoveSink(sink.get());
}
TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxFsNegotiatedThenSinkWants) {
TEST_F(VideoConduitTest, TestVideoEncodeMaxFsNegotiatedThenSinkWants) {
mControl.Update([&](auto& aControl) {
aControl.mTransmitting = true;
VideoCodecConfig codecConfig(120, "VP8", EncodingConstraints());
@ -1572,7 +1574,6 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxFsNegotiatedThenSinkWants) {
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
@ -1610,7 +1611,7 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxFsNegotiatedThenSinkWants) {
mVideoConduit->RemoveSink(sink.get());
}
TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxFsCodecChange) {
TEST_F(VideoConduitTest, TestVideoEncodeMaxFsCodecChange) {
mControl.Update([&](auto& aControl) {
aControl.mTransmitting = true;
VideoCodecConfig codecConfig(120, "VP8", EncodingConstraints());
@ -1619,7 +1620,6 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxFsCodecChange) {
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
@ -1662,8 +1662,7 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxFsCodecChange) {
mVideoConduit->RemoveSink(sink.get());
}
TEST_P(VideoConduitCodecModeTest,
TestVideoEncodeMaxFsSinkWantsThenCodecChange) {
TEST_F(VideoConduitTest, TestVideoEncodeMaxFsSinkWantsThenCodecChange) {
mControl.Update([&](auto& aControl) {
aControl.mTransmitting = true;
VideoCodecConfig codecConfig(120, "VP8", EncodingConstraints());
@ -1671,7 +1670,6 @@ TEST_P(VideoConduitCodecModeTest,
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
@ -1712,7 +1710,7 @@ TEST_P(VideoConduitCodecModeTest,
mVideoConduit->RemoveSink(sink.get());
}
TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxFsNegotiated) {
TEST_F(VideoConduitTest, TestVideoEncodeMaxFsNegotiated) {
mControl.Update([&](auto& aControl) {
aControl.mTransmitting = true;
VideoCodecConfig codecConfig(120, "VP8", EncodingConstraints());
@ -1720,7 +1718,6 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxFsNegotiated) {
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
@ -1771,7 +1768,7 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxFsNegotiated) {
mVideoConduit->RemoveSink(sink.get());
}
TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxWidthAndHeight) {
TEST_F(VideoConduitTest, TestVideoEncodeMaxWidthAndHeight) {
mControl.Update([&](auto& aControl) {
aControl.mTransmitting = true;
VideoCodecConfig codecConfig(120, "VP8", EncodingConstraints());
@ -1781,7 +1778,6 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxWidthAndHeight) {
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
@ -1815,7 +1811,7 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeMaxWidthAndHeight) {
mVideoConduit->RemoveSink(sink.get());
}
TEST_P(VideoConduitCodecModeTest, TestVideoEncodeScaleResolutionBy) {
TEST_F(VideoConduitTest, TestVideoEncodeScaleResolutionBy) {
mControl.Update([&](auto& aControl) {
aControl.mTransmitting = true;
VideoCodecConfig codecConfig(120, "VP8", EncodingConstraints());
@ -1825,7 +1821,6 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeScaleResolutionBy) {
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
@ -1857,7 +1852,7 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeScaleResolutionBy) {
}
}
TEST_P(VideoConduitCodecModeTest, TestVideoEncodeSimulcastScaleResolutionBy) {
TEST_F(VideoConduitTest, TestVideoEncodeSimulcastScaleResolutionBy) {
mControl.Update([&](auto& aControl) {
VideoCodecConfig codecConfig(120, "VP8", EncodingConstraints());
{
@ -1877,7 +1872,6 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeSimulcastScaleResolutionBy) {
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
aControl.mLocalSsrcs = {42, 43, 44};
aControl.mLocalVideoRtxSsrcs = {45, 46, 47};
});
@ -1911,8 +1905,7 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeSimulcastScaleResolutionBy) {
}
}
TEST_P(VideoConduitCodecModeTest,
TestVideoEncodeLargeScaleResolutionByFrameDropping) {
TEST_F(VideoConduitTest, TestVideoEncodeLargeScaleResolutionByFrameDropping) {
for (const auto& scales :
{std::vector{200U}, std::vector{200U, 300U}, std::vector{300U, 200U}}) {
mControl.Update([&](auto& aControl) {
@ -1925,7 +1918,6 @@ TEST_P(VideoConduitCodecModeTest,
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
aControl.mLocalSsrcs = scales;
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
@ -1965,8 +1957,7 @@ TEST_P(VideoConduitCodecModeTest,
}
}
TEST_P(VideoConduitCodecModeTest,
TestVideoEncodeLargeScaleResolutionByStreamCreation) {
TEST_F(VideoConduitTest, TestVideoEncodeLargeScaleResolutionByStreamCreation) {
for (const auto& scales :
{std::vector{200U}, std::vector{200U, 300U}, std::vector{300U, 200U}}) {
mControl.Update([&](auto& aControl) {
@ -1979,7 +1970,6 @@ TEST_P(VideoConduitCodecModeTest,
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
aControl.mLocalSsrcs = scales;
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);
@ -2060,7 +2050,7 @@ TEST_P(VideoConduitCodecModeTest,
}
}
TEST_P(VideoConduitCodecModeTest, TestVideoEncodeResolutionAlignment) {
TEST_F(VideoConduitTest, TestVideoEncodeResolutionAlignment) {
UniquePtr<MockVideoSink> sink(new MockVideoSink());
for (const auto& scales : {std::vector{1U}, std::vector{1U, 9U}}) {
@ -2074,7 +2064,6 @@ TEST_P(VideoConduitCodecModeTest, TestVideoEncodeResolutionAlignment) {
aControl.mVideoSendCodec = Some(codecConfig);
aControl.mVideoSendRtpRtcpConfig =
Some(RtpRtcpConfig(webrtc::RtcpMode::kCompound));
aControl.mVideoCodecMode = GetParam();
aControl.mLocalSsrcs = scales;
});
ASSERT_TRUE(Call()->mVideoSendEncoderConfig);

View File

@ -1,10 +0,0 @@
[basic.https.html]
expected:
if (os == "win") and not swgl and not debug and (processor == "x86"): [OK, TIMEOUT]
if (os == "linux") and not debug: [OK, TIMEOUT]
if (os == "win") and swgl: [OK, TIMEOUT]
[Basic simulcast setup with two spatial layers]
expected:
if (os == "win") and not swgl and not debug and (processor == "x86"): [PASS, TIMEOUT]
if (os == "linux") and not debug: [PASS, TIMEOUT]
if (os == "win") and swgl: [PASS, TIMEOUT]

View File

@ -18,6 +18,6 @@ promise_test(async t => {
const pc2 = new RTCPeerConnection();
t.add_cleanup(() => pc2.close());
await negotiateSimulcastAndWaitForVideo(t, await getCameraStream(t), rids, pc1, pc2);
return negotiateSimulcastAndWaitForVideo(t, rids, pc1, pc2);
}, 'Basic simulcast setup with two spatial layers');
</script>

View File

@ -18,7 +18,7 @@ promise_test(async t => {
const pc2 = new RTCPeerConnection();
t.add_cleanup(() => pc2.close());
await negotiateSimulcastAndWaitForVideo(t, await getCameraStream(t), rids, pc1, pc2);
await negotiateSimulcastAndWaitForVideo(t, rids, pc1, pc2);
const outboundStats = [];
const senderStats = await pc1.getSenders()[0].getStats();

View File

@ -26,7 +26,6 @@ promise_test(async t => {
const pc2 = new RTCPeerConnection();
t.add_cleanup(() => pc2.close());
await negotiateSimulcastAndWaitForVideo(t, await getCameraStream(t), rids, pc1, pc2,
{mimeType: 'video/H264'});
return negotiateSimulcastAndWaitForVideo(t, rids, pc1, pc2, {mimeType: 'video/H264'});
}, 'H264 simulcast setup with two streams');
</script>

View File

@ -1,28 +0,0 @@
<!doctype html>
<meta charset=utf-8>
<title>RTCPeerConnection Screen-sharing Simulcast Tests</title>
<meta name="timeout" content="long">
<script src="../third_party/sdp/sdp.js"></script>
<script src="simulcast.js"></script>
<script src="../RTCPeerConnection-helper.js"></script>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/resources/testdriver.js"></script>
<script src="/resources/testdriver-vendor.js"></script>
<script>
promise_test(async t => {
// Test getDisplayMedia with simulcast
await test_driver.bless('getDisplayMedia');
const stream = await navigator.mediaDevices.getDisplayMedia({
video: {width: 1280, height: 720}
});
t.add_cleanup(() => stream.getTracks().forEach(track => track.stop()));
const rids = [0, 1];
const pc1 = new RTCPeerConnection();
t.add_cleanup(() => pc1.close());
const pc2 = new RTCPeerConnection();
t.add_cleanup(() => pc2.close());
return negotiateSimulcastAndWaitForVideo(t, stream, rids, pc1, pc2);
}, 'Basic simulcast setup with two spatial layers');
</script>

View File

@ -33,7 +33,7 @@ promise_test(async t => {
const pc2 = new RTCPeerConnection();
t.add_cleanup(() => pc2.close());
await negotiateSimulcastAndWaitForVideo(t, await getCameraStream(t), rids, pc1, pc2);
await negotiateSimulcastAndWaitForVideo(t, rids, pc1, pc2);
// Deactivate first sender.
const parameters = pc1.getSenders()[0].getParameters();
@ -58,7 +58,7 @@ promise_test(async t => {
const pc2 = new RTCPeerConnection();
t.add_cleanup(() => pc2.close());
await negotiateSimulcastAndWaitForVideo(t, await getCameraStream(t), rids, pc1, pc2);
await negotiateSimulcastAndWaitForVideo(t, rids, pc1, pc2);
// Deactivate second sender.
const parameters = pc1.getSenders()[0].getParameters();
@ -83,7 +83,7 @@ promise_test(async t => {
const pc2 = new RTCPeerConnection();
t.add_cleanup(() => pc2.close());
await negotiateSimulcastAndWaitForVideo(t, await getCameraStream(t), rids, pc1, pc2);
await negotiateSimulcastAndWaitForVideo(t, rids, pc1, pc2);
// Deactivate all senders.
const parameters = pc1.getSenders()[0].getParameters();

View File

@ -220,7 +220,7 @@ function swapRidAndMidExtensionsInSimulcastAnswer(answer, localDescription, rids
}
async function negotiateSimulcastAndWaitForVideo(
t, stream, rids, pc1, pc2, codec, scalabilityMode = undefined) {
t, rids, pc1, pc2, codec, scalabilityMode = undefined) {
exchangeIceCandidates(pc1, pc2);
const metadataToBeLoaded = [];
@ -251,6 +251,10 @@ async function negotiateSimulcastAndWaitForVideo(
scaleResolutionDownBy *= 2;
}
// Use getUserMedia as getNoiseStream does not have enough entropy to ramp-up.
await setMediaPermission();
const stream = await navigator.mediaDevices.getUserMedia({video: {width: 1280, height: 720}});
t.add_cleanup(() => stream.getTracks().forEach(track => track.stop()));
const transceiver = pc1.addTransceiver(stream.getVideoTracks()[0], {
streams: [stream],
sendEncodings: sendEncodings,
@ -274,11 +278,3 @@ async function negotiateSimulcastAndWaitForVideo(
assert_equals(metadataToBeLoaded.length, rids.length);
return Promise.all(metadataToBeLoaded);
}
async function getCameraStream(t) {
// Use getUserMedia as getNoiseStream does not have enough entropy to ramp-up.
await setMediaPermission();
const stream = await navigator.mediaDevices.getUserMedia({video: {width: 1280, height: 720}});
t.add_cleanup(() => stream.getTracks().forEach(track => track.stop()));
return stream;
}

View File

@ -21,7 +21,6 @@ promise_test(async t => {
const pc2 = new RTCPeerConnection();
t.add_cleanup(() => pc2.close());
return negotiateSimulcastAndWaitForVideo(t, await getCameraStream(t), rids, pc1, pc2,
{mimeType: 'video/VP8'});
return negotiateSimulcastAndWaitForVideo(t, rids, pc1, pc2, {mimeType: 'video/VP8'});
}, 'VP8 simulcast setup with two streams');
</script>

View File

@ -23,14 +23,13 @@ promise_test(async t => {
// This is not a scalability mode test (see wpt/webrtc-svc/ for those) but a
// VP9 simulcast test. Setting `scalabilityMode` should not be needed, however
// some browsers interpret multiple VP9 encodings to mean multiple spatial
// many browsers interprets multiple VP9 encodings to mean multiple spatial
// layers by default. During a transition period, Chromium-based browsers
// requires explicitly specifying the scalability mode as a way to opt-in to
// spec-compliant simulcast. See also wpt/webrtc/simulcast/vp9.https.html for
// a version of this test that does not set the scalability mode.
const scalabilityMode = 'L1T2';
return negotiateSimulcastAndWaitForVideo(
t, await getCameraStream(t), rids, pc1, pc2, {mimeType: 'video/VP9'},
scalabilityMode);
t, rids, pc1, pc2, {mimeType: 'video/VP9'}, scalabilityMode);
}, 'VP9 simulcast setup with two streams and L1T2 set');
</script>

View File

@ -21,7 +21,6 @@ promise_test(async t => {
const pc2 = new RTCPeerConnection();
t.add_cleanup(() => pc2.close());
return negotiateSimulcastAndWaitForVideo(t, await getCameraStream(t), rids, pc1, pc2,
{mimeType: 'video/VP9'});
return negotiateSimulcastAndWaitForVideo(t, rids, pc1, pc2, {mimeType: 'video/VP9'});
}, 'VP9 simulcast setup with two streams');
</script>