fune/browser/base/content/test/webrtc/get_user_media.html
Karl Tomlinson d86987c6d7 Bug 1705289 require user gesture for screen capture through non-standard mediaSource in getUserMedia() r=jib
for consistency with getDisplayMedia().

System callers are trusted to be excluded from the requirement.
The exclusion may not be strictly necessary for capture previews, but removes
any doubt and simplifies testing.

Differential Revision: https://phabricator.services.mozilla.com/D122193
2021-08-24 23:41:18 +00:00

119 lines
2.8 KiB
HTML

<!DOCTYPE html>
<html>
<head><meta charset="UTF-8"></head>
<body>
<div id="message"></div>
<script>
// Specifies whether we are using fake streams to run this automation
var useFakeStreams = true;
try {
var audioDevice = SpecialPowers.getCharPref("media.audio_loopback_dev");
var videoDevice = SpecialPowers.getCharPref("media.video_loopback_dev");
dump("TEST DEVICES: Using media devices:\n");
dump("audio: " + audioDevice + "\nvideo: " + videoDevice + "\n");
useFakeStreams = false;
} catch (e) {
dump("TEST DEVICES: No test devices found (in media.{audio,video}_loopback_dev, using fake streams.\n");
useFakeStreams = true;
}
function message(m) {
// eslint-disable-next-line no-unsanitized/property
document.getElementById("message").innerHTML = m;
top.postMessage(m, "*");
}
var gStreams = [];
var gVideoEvents = [];
var gAudioEvents = [];
async function requestDevice(aAudio, aVideo, aShare, aBadDevice = false) {
const opts = {video: aVideo, audio: aAudio};
if (aShare) {
opts.video = { mediaSource: aShare };
SpecialPowers.wrap(document).notifyUserGestureActivation();
}
if (useFakeStreams) {
opts.fake = true;
}
if (aVideo && aBadDevice) {
opts.video = {
deviceId: "bad device",
};
opts.fake = true;
}
if (aAudio && aBadDevice) {
opts.audio = {
deviceId: "bad device",
};
opts.fake = true;
}
try {
const stream = await navigator.mediaDevices.getUserMedia(opts)
gStreams.push(stream);
const videoTrack = stream.getVideoTracks()[0];
if (videoTrack) {
for (const name of ["mute", "unmute", "ended"]) {
videoTrack.addEventListener(name, () => gVideoEvents.push(name));
}
}
const audioTrack = stream.getAudioTracks()[0];
if (audioTrack) {
for (const name of ["mute", "unmute", "ended"]) {
audioTrack.addEventListener(name, () => gAudioEvents.push(name));
}
}
message("ok");
} catch (err) {
message("error: " + err);
}
}
async function requestAudioOutput() {
SpecialPowers.wrap(document).notifyUserGestureActivation();
try {
await navigator.mediaDevices.selectAudioOutput();
message("ok");
} catch (err) {
message("error: " + err);
}
}
message("pending");
function stopTracks(aKind) {
for (let stream of gStreams) {
for (let track of stream.getTracks()) {
if (track.kind == aKind) {
track.stop();
stream.removeTrack(track);
}
}
}
gStreams = gStreams.filter(s => !!s.getTracks().length);
if (aKind == "video") {
gVideoEvents = [];
} else if (aKind == "audio") {
gAudioEvents = [];
}
}
function closeStream() {
for (let stream of gStreams) {
for (let track of stream.getTracks()) {
track.stop();
}
}
gStreams = [];
gVideoEvents = [];
gAudioEvents = [];
message("closed");
}
</script>
</body>
</html>