Merge autoland to mozilla-central a=merge. CLOSED TREE

This commit is contained in:
Cristina Coroiu 2020-11-16 18:20:41 +02:00
commit 1eba664ba7
47 changed files with 528 additions and 252 deletions

View file

@ -59,10 +59,6 @@ about-debugging-sidebar-item-connect-button-connection-not-responding = Connecti
# Text displayed as connection error in sidebar item when the connection has timed out.
about-debugging-sidebar-item-connect-button-connection-timeout = Connection timed out
# Temporary text displayed in sidebar items representing remote runtimes after
# successfully connecting to them. Temporary UI, do not localize.
about-debugging-sidebar-item-connected-label = Connected
# Text displayed in sidebar items for remote devices where a compatible browser (eg
# Firefox) has not been detected yet. Typically, Android phones connected via USB with
# USB debugging enabled, but where Firefox is not started.

View file

@ -471,7 +471,6 @@ const ErrorCodes = {
NS_ERROR_STORAGE_CONSTRAINT: 0x80630003,
NS_ERROR_DOM_FILE_NOT_FOUND_ERR: 0x80650000,
NS_ERROR_DOM_FILE_NOT_READABLE_ERR: 0x80650001,
NS_ERROR_DOM_FILE_ABORT_ERR: 0x80650002,
NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR: 0x80660001,
NS_ERROR_DOM_INDEXEDDB_NOT_FOUND_ERR: 0x80660003,
NS_ERROR_DOM_INDEXEDDB_CONSTRAINT_ERR: 0x80660004,

View file

@ -133,7 +133,7 @@ XPCOMUtils.defineLazyGetter(this, "newLinesRegex", () => /[\r\n]/g);
XPCOMUtils.defineLazyGetter(
this,
"possibleProtocolRegex",
() => /^([a-z][a-z0-9.+\t-]*):/i
() => /^([a-z][a-z0-9.+\t-]*)(:|;)?(\/\/)?/i
);
// Regex used to match IPs. Note that these are not made to validate IPs, but
@ -272,7 +272,14 @@ URIFixup.prototype = {
let info = new URIFixupInfo(uriString);
let scheme = extractScheme(uriString);
const {
scheme,
fixedSchemeUriString,
fixupChangedProtocol,
} = extractScheme(uriString, fixupFlags);
uriString = fixedSchemeUriString;
info.fixupChangedProtocol = fixupChangedProtocol;
if (scheme == "view-source") {
let { preferredURI, postData } = fixupViewSource(uriString, fixupFlags);
info.preferredURI = info.fixedURI = preferredURI;
@ -291,34 +298,7 @@ URIFixup.prototype = {
}
}
// Fix up common scheme typos.
// TODO: Use levenshtein distance here?
let isCommonProtocol = COMMON_PROTOCOLS.includes(scheme);
if (
fixupSchemeTypos &&
fixupFlags & FIXUP_FLAG_FIX_SCHEME_TYPOS &&
scheme &&
!isCommonProtocol
) {
info.fixupChangedProtocol = [
["ttp", "http"],
["htp", "http"],
["ttps", "https"],
["tps", "https"],
["ps", "https"],
["htps", "https"],
["ile", "file"],
["le", "file"],
].some(([typo, fixed]) => {
if (uriString.startsWith(typo + ":")) {
scheme = fixed;
uriString = scheme + uriString.substring(typo.length);
isCommonProtocol = true;
return true;
}
return false;
});
}
const isCommonProtocol = COMMON_PROTOCOLS.includes(scheme);
let canHandleProtocol =
scheme &&
@ -936,12 +916,73 @@ function keywordURIFixup(uriString, fixupInfo, isPrivateContext) {
/**
* Mimics the logic in Services.io.extractScheme, but avoids crossing XPConnect.
* This also tries to fixup the scheme if it was clearly mistyped.
* @param {string} uriString the string to examine
* @returns {string} a scheme or empty string if one could not be identified
* @param {integer} fixupFlags The original fixup flags
* @returns {object}
* scheme: a typo fixed scheme or empty string if one could not be identified
* fixedSchemeUriString: uri string with a typo fixed scheme
* fixupChangedProtocol: true if the scheme is fixed up
*/
function extractScheme(uriString) {
let matches = uriString.match(possibleProtocolRegex);
return matches ? matches[1].replace("\t", "").toLowerCase() : "";
function extractScheme(uriString, fixupFlags = FIXUP_FLAG_NONE) {
const matches = uriString.match(possibleProtocolRegex);
const hasColon = matches?.[2] === ":";
const hasSlash2 = matches?.[3] === "//";
const isFixupSchemeTypos =
fixupSchemeTypos && fixupFlags & FIXUP_FLAG_FIX_SCHEME_TYPOS;
if (
!matches ||
(!hasColon && !hasSlash2) ||
(!hasColon && !isFixupSchemeTypos)
) {
return {
scheme: "",
fixedSchemeUriString: uriString,
fixupChangedProtocol: false,
};
}
let scheme = matches[1].replace("\t", "").toLowerCase();
let fixedSchemeUriString = uriString;
if (isFixupSchemeTypos && hasSlash2) {
// Fix up typos for string that user would have intented as protocol.
const afterProtocol = uriString.substring(matches[0].length);
fixedSchemeUriString = `${scheme}://${afterProtocol}`;
}
let fixupChangedProtocol = false;
if (isFixupSchemeTypos) {
// Fix up common scheme typos.
// TODO: Use levenshtein distance here?
fixupChangedProtocol = [
["ttp", "http"],
["htp", "http"],
["ttps", "https"],
["tps", "https"],
["ps", "https"],
["htps", "https"],
["ile", "file"],
["le", "file"],
].some(([typo, fixed]) => {
if (scheme === typo) {
scheme = fixed;
fixedSchemeUriString =
scheme + fixedSchemeUriString.substring(typo.length);
return true;
}
return false;
});
}
return {
scheme,
fixedSchemeUriString,
fixupChangedProtocol,
};
}
/**
@ -965,7 +1006,7 @@ function fixupViewSource(uriString, fixupFlags) {
let innerURIString = uriString.substring(12).trim();
// Prevent recursion.
let innerScheme = extractScheme(innerURIString);
const { scheme: innerScheme } = extractScheme(innerURIString);
if (innerScheme == "view-source") {
throw new Components.Exception(
"Prevent view-source recursion",

View file

@ -41,6 +41,24 @@ var data = [
wrong: "le:///this/is/a/test.html",
fixed: "file:///this/is/a/test.html",
},
{
// Replace ';' with ':'.
wrong: "http;//www.example.com/",
fixed: "http://www.example.com/",
noPrefValue: "http://http;//www.example.com/",
},
{
// Missing ':'.
wrong: "https//www.example.com/",
fixed: "https://www.example.com/",
noPrefValue: "http://https//www.example.com/",
},
{
// Missing ':' for file scheme.
wrong: "file///this/is/a/test.html",
fixed: "file:///this/is/a/test.html",
noPrefValue: "http://file///this/is/a/test.html",
},
{
// Valid should not be changed.
wrong: "https://example.com/this/is/a/test.html",
@ -86,7 +104,7 @@ add_task(function test_false_pref_keeps_typos() {
item.wrong,
Services.uriFixup.FIXUP_FLAG_FIX_SCHEME_TYPOS
);
Assert.equal(preferredURI.spec, item.wrong);
Assert.equal(preferredURI.spec, item.noPrefValue || item.wrong);
}
});

View file

@ -635,6 +635,30 @@ var testcases = [
fixedURI: "http://pserver:8080/",
protocolChange: true,
},
{
input: "http;mozilla",
fixedURI: "http://http;mozilla/",
alternateURI: "http://www.http;mozilla.com/",
keywordLookup: true,
protocolChange: true,
affectedByDNSForSingleWordHosts: true,
},
{
input: "http//mozilla.org",
fixedURI: "http://mozilla.org/",
shouldRunTest: flags =>
flags & Services.uriFixup.FIXUP_FLAG_FIX_SCHEME_TYPOS,
},
{
input: "http//mozilla.org",
fixedURI: "http://http//mozilla.org",
alternateURI: "http://www.http.com//mozilla.org",
keywordLookup: true,
protocolChange: true,
affectedByDNSForSingleWordHosts: true,
shouldRunTest: flags =>
!(flags & Services.uriFixup.FIXUP_FLAG_FIX_SCHEME_TYPOS),
},
];
if (AppConstants.platform == "win") {
@ -752,6 +776,7 @@ async function do_single_test_run() {
protocolChange: expectProtocolChange,
inWhitelist: inWhitelist,
affectedByDNSForSingleWordHosts: affectedByDNSForSingleWordHosts,
shouldRunTest,
} of relevantTests) {
// Explicitly force these into a boolean
expectKeywordLookup = !!expectKeywordLookup;
@ -774,6 +799,10 @@ async function do_single_test_run() {
")"
);
if (shouldRunTest && !shouldRunTest(flags)) {
continue;
}
let URIInfo;
try {
URIInfo = Services.uriFixup.getFixupURIInfo(testInput, flags);

View file

@ -157,6 +157,7 @@ already_AddRefed<Exception> CreateException(nsresult aRv,
switch (NS_ERROR_GET_MODULE(aRv)) {
case NS_ERROR_MODULE_DOM:
case NS_ERROR_MODULE_SVG:
case NS_ERROR_MODULE_DOM_FILE:
case NS_ERROR_MODULE_DOM_XPATH:
case NS_ERROR_MODULE_DOM_INDEXEDDB:
case NS_ERROR_MODULE_DOM_FILEHANDLE:

View file

@ -24,15 +24,20 @@ already_AddRefed<SpeechRecognitionError> SpeechRecognitionError::Constructor(
new SpeechRecognitionError(t, nullptr, nullptr);
bool trusted = e->Init(t);
e->InitSpeechRecognitionError(aType, aParam.mBubbles, aParam.mCancelable,
aParam.mError, aParam.mMessage);
aParam.mError,
NS_ConvertUTF16toUTF8(aParam.mMessage));
e->SetTrusted(trusted);
e->SetComposed(aParam.mComposed);
return e.forget();
}
void SpeechRecognitionError::GetMessage(nsAString& aString) {
CopyUTF8toUTF16(mMessage, aString);
}
void SpeechRecognitionError::InitSpeechRecognitionError(
const nsAString& aType, bool aCanBubble, bool aCancelable,
SpeechRecognitionErrorCode aError, const nsAString& aMessage) {
SpeechRecognitionErrorCode aError, const nsACString& aMessage) {
Event::InitEvent(aType, aCanBubble, aCancelable);
mError = aError;
mMessage = aMessage;

View file

@ -29,18 +29,19 @@ class SpeechRecognitionError : public Event {
aGivenProto);
}
void GetMessage(nsAString& aString) { aString = mMessage; }
void GetMessage(nsAString& aString);
SpeechRecognitionErrorCode Error() { return mError; }
// aMessage should be valid UTF-8, but invalid UTF-8 byte sequences are
// replaced with the REPLACEMENT CHARACTER on conversion to UTF-16.
void InitSpeechRecognitionError(const nsAString& aType, bool aCanBubble,
bool aCancelable,
SpeechRecognitionErrorCode aError,
const nsAString& aMessage);
const nsACString& aMessage);
protected:
SpeechRecognitionErrorCode mError;
nsString mMessage;
nsCString mMessage;
};
} // namespace dom

View file

@ -63,7 +63,7 @@ already_AddRefed<Promise> MediaDevices::GetUserMedia(
if (!window) {
return; // Leave Promise pending after navigation by design.
}
p->MaybeReject(MakeRefPtr<MediaStreamError>(window, *error));
error->Reject(p);
});
return p.forget();
}
@ -127,7 +127,7 @@ already_AddRefed<Promise> MediaDevices::EnumerateDevices(CallerType aCallerType,
if (!window) {
return; // Leave Promise pending after navigation by design.
}
p->MaybeReject(MakeRefPtr<MediaStreamError>(window, *error));
error->Reject(p);
});
return p.forget();
}
@ -155,7 +155,7 @@ already_AddRefed<Promise> MediaDevices::GetDisplayMedia(
if (!window) {
return; // leave promise pending after navigation.
}
p->MaybeReject(MakeRefPtr<MediaStreamError>(window, *error));
error->Reject(p);
});
return p.forget();
}

View file

@ -1340,7 +1340,7 @@ class GetUserMediaStreamRunnable : public Runnable {
mHolder.Reject(MakeRefPtr<MediaMgrError>(
MediaMgrError::Name::AbortError,
sHasShutdown ? u"In shutdown"_ns : u"No stream."_ns),
sHasShutdown ? "In shutdown"_ns : "No stream."_ns),
__func__);
return NS_OK;
}
@ -1379,7 +1379,7 @@ class GetUserMediaStreamRunnable : public Runnable {
return SourceListener::SourceListenerPromise::
CreateAndReject(MakeRefPtr<MediaMgrError>(
MediaMgrError::Name::AbortError,
u"In shutdown"_ns),
"In shutdown"),
__func__);
});
return resolvePromise;
@ -1568,7 +1568,7 @@ class GetUserMediaTask : public Runnable {
}
}
void Fail(MediaMgrError::Name aName, const nsString& aMessage = u""_ns,
void Fail(MediaMgrError::Name aName, const nsCString& aMessage = ""_ns,
const nsString& aConstraint = u""_ns) {
NS_DispatchToMainThread(NS_NewRunnableFunction(
"GetUserMediaTask::Fail",
@ -1639,11 +1639,10 @@ class GetUserMediaTask : public Runnable {
if (errorMsg) {
LOG("%s %" PRIu32, errorMsg, static_cast<uint32_t>(rv));
if (badConstraint) {
Fail(MediaMgrError::Name::OverconstrainedError, u""_ns,
Fail(MediaMgrError::Name::OverconstrainedError, ""_ns,
NS_ConvertUTF8toUTF16(badConstraint));
} else {
Fail(MediaMgrError::Name::NotReadableError,
NS_ConvertUTF8toUTF16(errorMsg));
Fail(MediaMgrError::Name::NotReadableError, nsCString(errorMsg));
}
NS_DispatchToMainThread(
NS_NewRunnableFunction("MediaManager::SendPendingGUMRequest", []() {
@ -1668,7 +1667,7 @@ class GetUserMediaTask : public Runnable {
}
nsresult Denied(MediaMgrError::Name aName,
const nsString& aMessage = u""_ns) {
const nsCString& aMessage = ""_ns) {
// We add a disabled listener to the StreamListeners array until accepted
// If this was the only active MediaStream, remove the window from the list.
if (NS_IsMainThread()) {
@ -2248,7 +2247,7 @@ void MediaManager::DeviceListChanged() {
if (!MediaManager::GetIfExists()) {
return MgrPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::AbortError,
u"In shutdown"_ns),
"In shutdown"),
__func__);
}
return EnumerateRawDevices(
@ -2382,7 +2381,7 @@ RefPtr<MediaManager::StreamPromise> MediaManager::GetUserMedia(
if (!IsOn(c.mVideo) && !IsOn(c.mAudio)) {
return StreamPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::TypeError,
u"audio and/or video is required"_ns),
"audio and/or video is required"),
__func__);
}
@ -2395,7 +2394,7 @@ RefPtr<MediaManager::StreamPromise> MediaManager::GetUserMedia(
if (sHasShutdown) {
return StreamPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::AbortError,
u"In shutdown"_ns),
"In shutdown"),
__func__);
}
@ -2506,7 +2505,7 @@ RefPtr<MediaManager::StreamPromise> MediaManager::GetUserMedia(
default: {
return StreamPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::OverconstrainedError,
u""_ns, u"mediaSource"_ns),
"", u"mediaSource"_ns),
__func__);
}
}
@ -2584,7 +2583,7 @@ RefPtr<MediaManager::StreamPromise> MediaManager::GetUserMedia(
default: {
return StreamPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::OverconstrainedError,
u""_ns, u"mediaSource"_ns),
"", u"mediaSource"_ns),
__func__);
}
}
@ -2775,7 +2774,7 @@ RefPtr<MediaManager::StreamPromise> MediaManager::GetUserMedia(
windowListener->Remove(sourceListener);
return StreamPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(
MediaMgrError::Name::OverconstrainedError, u""_ns,
MediaMgrError::Name::OverconstrainedError, "",
constraint),
__func__);
}
@ -2882,15 +2881,15 @@ RefPtr<MediaManager::StreamPromise> MediaManager::GetDisplayMedia(
if (!doc->HasBeenUserGestureActivated()) {
return StreamPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::InvalidStateError,
u"getDisplayMedia must be called from a user "
u"gesture handler."_ns),
"getDisplayMedia must be called from a user "
"gesture handler."),
__func__);
}
if (!IsOn(aConstraintsPassedIn.mVideo)) {
return StreamPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::TypeError,
u"video is required"_ns),
"video is required"),
__func__);
}
@ -2902,7 +2901,7 @@ RefPtr<MediaManager::StreamPromise> MediaManager::GetDisplayMedia(
if (vc.mAdvanced.WasPassed()) {
return StreamPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::TypeError,
u"advanced not allowed"_ns),
"advanced not allowed"),
__func__);
}
auto getCLR = [](const auto& aCon) -> const ConstrainLongRange& {
@ -2923,13 +2922,13 @@ RefPtr<MediaManager::StreamPromise> MediaManager::GetDisplayMedia(
if (w.mMin.WasPassed() || h.mMin.WasPassed() || f.mMin.WasPassed()) {
return StreamPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::TypeError,
u"min not allowed"_ns),
"min not allowed"),
__func__);
}
if (w.mExact.WasPassed() || h.mExact.WasPassed() || f.mExact.WasPassed()) {
return StreamPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::TypeError,
u"exact not allowed"_ns),
"exact not allowed"),
__func__);
}
// As a UA optimization, we fail early without incurring a prompt, on
@ -2948,8 +2947,7 @@ RefPtr<MediaManager::StreamPromise> MediaManager::GetDisplayMedia(
if (badConstraint) {
return StreamPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::OverconstrainedError,
u""_ns,
NS_ConvertASCIItoUTF16(badConstraint)),
"", NS_ConvertASCIItoUTF16(badConstraint)),
__func__);
}
}
@ -3184,7 +3182,7 @@ RefPtr<MediaManager::DevicesPromise> MediaManager::EnumerateDevices(
if (sHasShutdown) {
return DevicesPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::AbortError,
u"In shutdown"_ns),
"In shutdown"),
__func__);
}
uint64_t windowId = aWindow->WindowID();
@ -3888,7 +3886,7 @@ nsresult MediaManager::Observe(nsISupports* aSubject, const char* aTopic,
}
if (sHasShutdown) {
return task->Denied(MediaMgrError::Name::AbortError, u"In shutdown"_ns);
return task->Denied(MediaMgrError::Name::AbortError, "In shutdown"_ns);
}
// Reuse the same thread to save memory.
MediaManager::Dispatch(task.forget());
@ -4205,7 +4203,7 @@ SourceListener::InitializeAsync() {
rv = audioDevice->Start();
}
if (NS_FAILED(rv)) {
nsString log;
nsCString log;
if (rv == NS_ERROR_NOT_AVAILABLE) {
log.AssignLiteral("Concurrent mic process limit.");
aHolder.Reject(
@ -4230,10 +4228,9 @@ SourceListener::InitializeAsync() {
MOZ_ASSERT_UNREACHABLE("Stopping audio failed");
}
}
nsString log;
log.AssignLiteral("Starting video failed");
aHolder.Reject(MakeRefPtr<MediaMgrError>(
MediaMgrError::Name::AbortError, log),
MediaMgrError::Name::AbortError,
"Starting video failed"),
__func__);
return;
}
@ -4774,7 +4771,7 @@ SourceListener::ApplyConstraintsToTrack(
}
aHolder.Reject(MakeRefPtr<MediaMgrError>(
MediaMgrError::Name::OverconstrainedError, u""_ns,
MediaMgrError::Name::OverconstrainedError, "",
NS_ConvertASCIItoUTF16(badConstraint)),
__func__);
return;

View file

@ -6,11 +6,12 @@
#include "MediaStreamError.h"
#include "mozilla/dom/MediaStreamErrorBinding.h"
#include "mozilla/dom/Promise.h"
#include "nsContentUtils.h"
namespace mozilla {
BaseMediaMgrError::BaseMediaMgrError(Name aName, const nsAString& aMessage,
BaseMediaMgrError::BaseMediaMgrError(Name aName, const nsACString& aMessage,
const nsAString& aConstraint)
: mMessage(aMessage), mConstraint(aConstraint), mName(aName) {
#define MAP_MEDIAERR(name, msg) \
@ -28,7 +29,6 @@ BaseMediaMgrError::BaseMediaMgrError(Name aName, const nsAString& aMessage,
"or the platform in the current context."),
MAP_MEDIAERR(NotFoundError, "The object can not be found here."),
MAP_MEDIAERR(NotReadableError, "The I/O read operation failed."),
MAP_MEDIAERR(NotSupportedError, "The operation is not supported."),
MAP_MEDIAERR(OverconstrainedError, "Constraints could be not satisfied."),
MAP_MEDIAERR(SecurityError, "The operation is insecure."),
MAP_MEDIAERR(TypeError, ""),
@ -47,12 +47,42 @@ BaseMediaMgrError::BaseMediaMgrError(Name aName, const nsAString& aMessage,
NS_IMPL_ISUPPORTS0(MediaMgrError)
namespace dom {
void MediaMgrError::Reject(dom::Promise* aPromise) {
switch (mName) {
case Name::AbortError:
aPromise->MaybeRejectWithAbortError(mMessage);
return;
case Name::InvalidStateError:
aPromise->MaybeRejectWithInvalidStateError(mMessage);
return;
case Name::NotAllowedError:
aPromise->MaybeRejectWithNotAllowedError(mMessage);
return;
case Name::NotFoundError:
aPromise->MaybeRejectWithNotFoundError(mMessage);
return;
case Name::NotReadableError:
aPromise->MaybeRejectWithNotReadableError(mMessage);
return;
case Name::OverconstrainedError: {
// TODO: Add OverconstrainedError type.
// https://bugzilla.mozilla.org/show_bug.cgi?id=1453013
nsCOMPtr<nsPIDOMWindowInner> window =
do_QueryInterface(aPromise->GetGlobalObject());
aPromise->MaybeReject(MakeRefPtr<dom::MediaStreamError>(window, *this));
return;
}
case Name::SecurityError:
aPromise->MaybeRejectWithSecurityError(mMessage);
return;
case Name::TypeError:
aPromise->MaybeRejectWithTypeError(mMessage);
return;
// -Wswitch ensures all cases are covered so don't add default:.
}
}
MediaStreamError::MediaStreamError(nsPIDOMWindowInner* aParent, Name aName,
const nsAString& aMessage,
const nsAString& aConstraint)
: BaseMediaMgrError(aName, aMessage, aConstraint), mParent(aParent) {}
namespace dom {
NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(MediaStreamError, mParent)
NS_IMPL_CYCLE_COLLECTING_ADDREF(MediaStreamError)
@ -71,7 +101,7 @@ JSObject* MediaStreamError::WrapObject(JSContext* aCx,
void MediaStreamError::GetName(nsAString& aName) const { aName = mNameString; }
void MediaStreamError::GetMessage(nsAString& aMessage) const {
aMessage = mMessage;
CopyUTF8toUTF16(mMessage, aMessage);
}
void MediaStreamError::GetConstraint(nsAString& aConstraint) const {

View file

@ -42,31 +42,38 @@ class BaseMediaMgrError {
NotAllowedError,
NotFoundError,
NotReadableError,
NotSupportedError,
OverconstrainedError,
SecurityError,
TypeError,
};
protected:
BaseMediaMgrError(Name aName, const nsAString& aMessage,
BaseMediaMgrError(Name aName, const nsACString& aMessage,
const nsAString& aConstraint);
public:
nsString mNameString;
nsString mMessage;
nsCString mMessage;
const nsString mConstraint;
const Name mName;
};
class MediaMgrError final : public nsISupports, public BaseMediaMgrError {
public:
explicit MediaMgrError(Name aName, const nsAString& aMessage = u""_ns,
// aMessage should be valid UTF-8, but invalid UTF-8 byte sequences are
// replaced with the REPLACEMENT CHARACTER on conversion to UTF-16.
explicit MediaMgrError(Name aName, const nsACString& aMessage = ""_ns,
const nsAString& aConstraint = u""_ns)
: BaseMediaMgrError(aName, aMessage, aConstraint) {}
template <int N>
explicit MediaMgrError(Name aName, const char (&aMessage)[N],
const nsAString& aConstraint = u""_ns)
: BaseMediaMgrError(aName, nsLiteralCString(aMessage), aConstraint) {}
NS_DECL_THREADSAFE_ISUPPORTS
void Reject(dom::Promise* aPromise);
private:
~MediaMgrError() = default;
};
@ -76,10 +83,6 @@ class MediaStreamError final : public nsISupports,
public BaseMediaMgrError,
public nsWrapperCache {
public:
MediaStreamError(nsPIDOMWindowInner* aParent, Name aName,
const nsAString& aMessage = u""_ns,
const nsAString& aConstraint = u""_ns);
MediaStreamError(nsPIDOMWindowInner* aParent, const BaseMediaMgrError& aOther)
: BaseMediaMgrError(aOther.mName, aOther.mMessage, aOther.mConstraint),
mParent(aParent) {}

View file

@ -50,8 +50,7 @@ auto MediaStreamTrackSource::ApplyConstraints(
const dom::MediaTrackConstraints& aConstraints, CallerType aCallerType)
-> RefPtr<ApplyConstraintsPromise> {
return ApplyConstraintsPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::OverconstrainedError,
u""_ns),
MakeRefPtr<MediaMgrError>(MediaMgrError::Name::OverconstrainedError, ""),
__func__);
}

View file

@ -15,7 +15,7 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=802982
function boom()
{
for (var j = 0; j < 100; ++j) {
navigator.mozGetUserMedia({}, function(){}, function(){});
navigator.mozGetUserMedia({video:true}, function(){}, function(){});
}
finish(); // we're not waiting for success/error callbacks here
}

View file

@ -25,7 +25,7 @@ class RemoteTrackSource : public dom::MediaStreamTrackSource {
dom::CallerType aCallerType) override {
return ApplyConstraintsPromise::CreateAndReject(
MakeRefPtr<MediaMgrError>(
dom::MediaStreamError::Name::OverconstrainedError, u""_ns),
dom::MediaStreamError::Name::OverconstrainedError, ""),
__func__);
}

View file

@ -96,14 +96,14 @@ OnlineSpeechRecognitionService::OnStopRequest(nsIRequest* aRequest,
bool parsingSuccessful;
nsAutoCString result;
nsAutoCString hypoValue;
nsAutoString errorMsg;
nsAutoCString errorMsg;
SpeechRecognitionErrorCode errorCode;
SR_LOG("STT Result: %s", mBuf.get());
if (NS_FAILED(aStatusCode)) {
success = false;
errorMsg.Assign(u"Error connecting to the service."_ns);
errorMsg.AssignLiteral("Error connecting to the service.");
errorCode = SpeechRecognitionErrorCode::Network;
} else {
success = true;
@ -113,7 +113,7 @@ OnlineSpeechRecognitionService::OnStopRequest(nsIRequest* aRequest,
if (!parsingSuccessful) {
// there's an internal server error
success = false;
errorMsg.Assign(u"Internal server error"_ns);
errorMsg.AssignLiteral("Internal server error");
errorCode = SpeechRecognitionErrorCode::Network;
} else {
result.Assign(root.get("status", "error").asString().c_str());
@ -124,13 +124,12 @@ OnlineSpeechRecognitionService::OnStopRequest(nsIRequest* aRequest,
confidence = root["data"][0].get("confidence", "0").asFloat();
} else {
success = false;
errorMsg.Assign(u"Error reading result data."_ns);
errorMsg.AssignLiteral("Error reading result data.");
errorCode = SpeechRecognitionErrorCode::Network;
}
} else {
success = false;
NS_ConvertUTF8toUTF16 error(root.get("message", "").asString().c_str());
errorMsg.Assign(error);
errorMsg.Assign(root.get("message", "").asString().c_str());
errorCode = SpeechRecognitionErrorCode::No_speech;
}
}
@ -237,7 +236,7 @@ void OnlineSpeechRecognitionService::EncoderError() {
}
mRecognition->DispatchError(
SpeechRecognition::EVENT_RECOGNITIONSERVICE_ERROR,
SpeechRecognitionErrorCode::Audio_capture, u"Encoder error"_ns);
SpeechRecognitionErrorCode::Audio_capture, "Encoder error");
}));
}
@ -309,7 +308,7 @@ void OnlineSpeechRecognitionService::DoSTT() {
if (NS_WARN_IF(NS_FAILED(rv))) {
mRecognition->DispatchError(
SpeechRecognition::EVENT_RECOGNITIONSERVICE_ERROR,
SpeechRecognitionErrorCode::Network, u"Unknown URI"_ns);
SpeechRecognitionErrorCode::Network, "Unknown URI");
return;
}
@ -324,7 +323,7 @@ void OnlineSpeechRecognitionService::DoSTT() {
if (NS_WARN_IF(!window)) {
mRecognition->DispatchError(
SpeechRecognition::EVENT_RECOGNITIONSERVICE_ERROR,
SpeechRecognitionErrorCode::Aborted, u"No window"_ns);
SpeechRecognitionErrorCode::Aborted, "No window");
return;
}
@ -332,7 +331,7 @@ void OnlineSpeechRecognitionService::DoSTT() {
if (NS_WARN_IF(!doc)) {
mRecognition->DispatchError(
SpeechRecognition::EVENT_RECOGNITIONSERVICE_ERROR,
SpeechRecognitionErrorCode::Aborted, u"No document"_ns);
SpeechRecognitionErrorCode::Aborted, "No document");
}
rv = NS_NewChannel(getter_AddRefs(chan), uri, doc->NodePrincipal(), secFlags,
contentPolicy, nullptr, nullptr, nullptr, nullptr,
@ -340,7 +339,7 @@ void OnlineSpeechRecognitionService::DoSTT() {
if (NS_WARN_IF(NS_FAILED(rv))) {
mRecognition->DispatchError(
SpeechRecognition::EVENT_RECOGNITIONSERVICE_ERROR,
SpeechRecognitionErrorCode::Network, u"Failed to open channel"_ns);
SpeechRecognitionErrorCode::Network, "Failed to open channel");
return;
}
@ -385,7 +384,7 @@ void OnlineSpeechRecognitionService::DoSTT() {
if (!audio.SetCapacity(length, fallible)) {
mRecognition->DispatchError(
SpeechRecognition::EVENT_RECOGNITIONSERVICE_ERROR,
SpeechRecognitionErrorCode::Audio_capture, u"Allocation error"_ns);
SpeechRecognitionErrorCode::Audio_capture, "Allocation error");
return;
}
@ -399,7 +398,7 @@ void OnlineSpeechRecognitionService::DoSTT() {
if (NS_WARN_IF(NS_FAILED(rv))) {
mRecognition->DispatchError(
SpeechRecognition::EVENT_RECOGNITIONSERVICE_ERROR,
SpeechRecognitionErrorCode::Network, u"Failed to open stream"_ns);
SpeechRecognitionErrorCode::Network, "Failed to open stream");
return;
}
if (bodyStream) {
@ -413,7 +412,7 @@ void OnlineSpeechRecognitionService::DoSTT() {
if (NS_WARN_IF(NS_FAILED(rv))) {
mRecognition->DispatchError(
SpeechRecognition::EVENT_RECOGNITIONSERVICE_ERROR,
SpeechRecognitionErrorCode::Network, u"Internal server error"_ns);
SpeechRecognitionErrorCode::Network, "Internal server error");
}
}

View file

@ -698,7 +698,7 @@ SpeechRecognition::Observe(nsISupports* aSubject, const char* aTopic,
StateBetween(STATE_IDLE, STATE_WAITING_FOR_SPEECH)) {
DispatchError(SpeechRecognition::EVENT_AUDIO_ERROR,
SpeechRecognitionErrorCode::No_speech,
u"No speech detected (timeout)"_ns);
"No speech detected (timeout)");
} else if (!strcmp(aTopic, SPEECH_RECOGNITION_TEST_END_TOPIC)) {
nsCOMPtr<nsIObserverService> obs = services::GetObserverService();
obs->RemoveObserver(this, SPEECH_RECOGNITION_TEST_EVENT_REQUEST_TOPIC);
@ -719,7 +719,7 @@ void SpeechRecognition::ProcessTestEventRequest(nsISupports* aSubject,
DispatchError(
SpeechRecognition::EVENT_AUDIO_ERROR,
SpeechRecognitionErrorCode::Audio_capture, // TODO different codes?
u"AUDIO_ERROR test event"_ns);
"AUDIO_ERROR test event");
} else {
NS_ASSERTION(StaticPrefs::media_webspeech_test_fake_recognition_service(),
"Got request for fake recognition service event, but "
@ -963,7 +963,7 @@ void SpeechRecognition::NotifyTrackAdded(
void SpeechRecognition::DispatchError(EventType aErrorType,
SpeechRecognitionErrorCode aErrorCode,
const nsAString& aMessage) {
const nsACString& aMessage) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aErrorType == EVENT_RECOGNITIONSERVICE_ERROR ||
aErrorType == EVENT_AUDIO_ERROR,

View file

@ -134,10 +134,17 @@ class SpeechRecognition final : public DOMEventTargetHelper,
};
void NotifyTrackAdded(const RefPtr<MediaStreamTrack>& aTrack) override;
// aMessage should be valid UTF-8, but invalid UTF-8 byte sequences are
// replaced with the REPLACEMENT CHARACTER on conversion to UTF-16.
void DispatchError(EventType aErrorType,
SpeechRecognitionErrorCode aErrorCode,
const nsAString& aMessage);
const nsACString& aMessage);
template <int N>
void DispatchError(EventType aErrorType,
SpeechRecognitionErrorCode aErrorCode,
const char (&aMessage)[N]) {
DispatchError(aErrorType, aErrorCode, nsLiteralCString(aMessage));
}
uint32_t FillSamplesBuffer(const int16_t* aSamples, uint32_t aSampleCount);
uint32_t SplitSamplesBuffer(const int16_t* aSamplesBuffer,
uint32_t aSampleCount,

View file

@ -84,7 +84,7 @@ FakeSpeechRecognitionService::Observe(nsISupports* aSubject, const char* aTopic,
mRecognition->DispatchError(
SpeechRecognition::EVENT_RECOGNITIONSERVICE_ERROR,
SpeechRecognitionErrorCode::Network, // TODO different codes?
u"RECOGNITIONSERVICE_ERROR test event"_ns);
"RECOGNITIONSERVICE_ERROR test event");
} else if (eventName.EqualsLiteral("EVENT_RECOGNITIONSERVICE_FINAL_RESULT")) {
RefPtr<SpeechEvent> event = new SpeechEvent(

View file

@ -356,7 +356,17 @@
await Assert.rejects(
IOUtils.read(tmpFileName, { decompress: true }),
/Could not decompress file because it has an invalid LZ4 header \(wrong magic number: .*\)/,
(actual) => {
is(actual.constructor, DOMException,
"rejection reason constructor for decompress with bad header");
is(actual.name, "NotReadableError",
"rejection error name for decompress with bad header");
ok(/Could not decompress file because it has an invalid LZ4 header \(wrong magic number: .*\)/
.test(actual.message),
"rejection error message for decompress with bad header. Got "
+ actual.message);
return true;
},
"IOUtils::read fails to decompress LZ4 data with a bad header"
);

View file

@ -225,8 +225,9 @@ bool RenderCompositorD3D11SWGL::MapTile(wr::NativeTileId aId,
mCompositor->GetDevice()->GetImmediateContext(getter_AddRefs(context));
D3D11_MAPPED_SUBRESOURCE mappedSubresource;
DebugOnly<HRESULT> hr = context->Map(mCurrentTile.mStagingTexture, 0,
D3D11_MAP_WRITE, 0, &mappedSubresource);
DebugOnly<HRESULT> hr =
context->Map(mCurrentTile.mStagingTexture, 0, D3D11_MAP_READ_WRITE, 0,
&mappedSubresource);
MOZ_ASSERT(SUCCEEDED(hr));
// aData is expected to contain a pointer to the first pixel within the valid
@ -309,7 +310,7 @@ void RenderCompositorD3D11SWGL::CreateTile(wr::NativeSurfaceId aId, int32_t aX,
// add an extra row instead.
desc.Height += 1;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE | D3D11_CPU_ACCESS_READ;
desc.Usage = D3D11_USAGE_STAGING;
desc.BindFlags = 0;

View file

@ -468,7 +468,6 @@ impl FrameBuilder {
resource_cache.block_until_all_resources_added(
gpu_cache,
render_tasks,
profile,
);
}

View file

@ -218,8 +218,6 @@ impl GlyphRasterizer {
glyph_cache: &mut GlyphCache,
texture_cache: &mut TextureCache,
gpu_cache: &mut GpuCache,
_: &mut RenderTaskCache,
_: &mut RenderTaskGraph,
profile: &mut TransactionProfile,
) {
profile.start_time(profiler::GLYPH_RESOLVE_TIME);
@ -1207,8 +1205,6 @@ mod test_glyph_rasterizer {
&mut glyph_cache,
&mut TextureCache::new_for_testing(4096, FORMAT),
&mut gpu_cache,
&mut render_task_cache,
&mut render_task_tree,
&mut TransactionProfile::new(),
);
}
@ -1292,8 +1288,6 @@ mod test_glyph_rasterizer {
&mut glyph_cache,
&mut TextureCache::new_for_testing(4096, FORMAT),
&mut gpu_cache,
&mut render_task_cache,
&mut render_task_tree,
&mut TransactionProfile::new(),
);
}

View file

@ -122,7 +122,7 @@ use crate::print_tree::{PrintTree, PrintTreePrinter};
use crate::render_backend::{DataStores, FrameId};
use crate::render_task_graph::RenderTaskId;
use crate::render_target::RenderTargetKind;
use crate::render_task::{RenderTask, RenderTaskLocation, BlurTaskCache, ClearMode};
use crate::render_task::{RenderTask, RenderTaskLocation, BlurTaskCache};
use crate::resource_cache::{ResourceCache, ImageGeneration};
use crate::space::{SpaceMapper, SpaceSnapper};
use crate::scene::SceneProperties;
@ -4959,7 +4959,6 @@ impl PicturePrimitive {
picture_task_id,
frame_state.render_tasks,
RenderTargetKind::Color,
ClearMode::Transparent,
None,
original_size.to_i32(),
);
@ -5041,7 +5040,6 @@ impl PicturePrimitive {
picture_task_id,
frame_state.render_tasks,
RenderTargetKind::Color,
ClearMode::Transparent,
Some(&mut blur_tasks),
device_rect.size.to_i32(),
);

View file

@ -20,7 +20,7 @@ use crate::picture::{SliceId, SurfaceInfo, ResolvedSurfaceTexture, TileCacheInst
use crate::prim_store::{PrimitiveStore, DeferredResolve, PrimitiveScratchBuffer};
use crate::prim_store::gradient::GRADIENT_FP_STOPS;
use crate::render_backend::DataStores;
use crate::render_task::{RenderTaskKind, RenderTaskAddress, ClearMode, BlitSource};
use crate::render_task::{RenderTaskKind, RenderTaskAddress, BlitSource};
use crate::render_task::{RenderTask, ScalingTask, SvgFilterInfo};
use crate::render_task_graph::{RenderTaskGraph, RenderTaskId};
use crate::resource_cache::ResourceCache;
@ -190,7 +190,7 @@ impl<T: RenderTarget> RenderTargetList<T> {
max_dynamic_size: DeviceIntSize::new(0, 0),
targets: Vec::new(),
saved_index: None,
alloc_tracker: ArrayAllocationTracker::new(),
alloc_tracker: ArrayAllocationTracker::new(None),
gpu_supports_fast_clears,
}
}
@ -342,15 +342,6 @@ impl RenderTarget for ColorRenderTarget {
profile_scope!("alpha_task");
let task = &render_tasks[*task_id];
match task.clear_mode {
ClearMode::One |
ClearMode::Zero => {
panic!("bug: invalid clear mode for color task");
}
ClearMode::DontCare |
ClearMode::Transparent => {}
}
match task.kind {
RenderTaskKind::Picture(ref pic_task) => {
let pic = &ctx.prim_store.pictures[pic_task.pic_index.0];
@ -619,19 +610,6 @@ impl RenderTarget for AlphaRenderTarget {
let task = &render_tasks[task_id];
let (target_rect, _) = task.get_target_rect();
match task.clear_mode {
ClearMode::Zero => {
self.zero_clears.push(task_id);
}
ClearMode::One => {
self.one_clears.push(task_id);
}
ClearMode::DontCare => {}
ClearMode::Transparent => {
panic!("bug: invalid clear mode for alpha task");
}
}
match task.kind {
RenderTaskKind::Readback |
RenderTaskKind::Picture(..) |
@ -643,6 +621,7 @@ impl RenderTarget for AlphaRenderTarget {
panic!("BUG: should not be added to alpha target!");
}
RenderTaskKind::VerticalBlur(..) => {
self.zero_clears.push(task_id);
add_blur_instances(
&mut self.vertical_blurs,
BlurDirection::Vertical,
@ -651,6 +630,7 @@ impl RenderTarget for AlphaRenderTarget {
);
}
RenderTaskKind::HorizontalBlur(..) => {
self.zero_clears.push(task_id);
add_blur_instances(
&mut self.horizontal_blurs,
BlurDirection::Horizontal,
@ -659,6 +639,9 @@ impl RenderTarget for AlphaRenderTarget {
);
}
RenderTaskKind::CacheMask(ref task_info) => {
if task_info.clear_to_one {
self.one_clears.push(task_id);
}
self.clip_batcher.add(
task_info.clip_node_range,
task_info.root_spatial_node_index,
@ -676,6 +659,9 @@ impl RenderTarget for AlphaRenderTarget {
);
}
RenderTaskKind::ClipRegion(ref region_task) => {
if region_task.clear_to_one {
self.one_clears.push(task_id);
}
let device_rect = DeviceRect::new(
DevicePoint::zero(),
target_rect.size.to_f32(),

View file

@ -129,6 +129,7 @@ pub struct CacheMaskTask {
pub root_spatial_node_index: SpatialNodeIndex,
pub clip_node_range: ClipNodeRange,
pub device_pixel_scale: DevicePixelScale,
pub clear_to_one: bool,
}
#[derive(Debug)]
@ -138,6 +139,7 @@ pub struct ClipRegionTask {
pub local_pos: LayoutPoint,
pub device_pixel_scale: DevicePixelScale,
pub clip_data: ClipData,
pub clear_to_one: bool,
}
#[cfg_attr(feature = "capture", derive(Serialize))]
@ -309,20 +311,6 @@ impl RenderTaskKind {
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
#[cfg_attr(feature = "capture", derive(Serialize))]
#[cfg_attr(feature = "replay", derive(Deserialize))]
pub enum ClearMode {
// Applicable to color and alpha targets.
Zero,
One,
/// This task doesn't care what it is cleared to - it will completely overwrite it.
DontCare,
// Applicable to color targets only.
Transparent,
}
/// In order to avoid duplicating the down-scaling and blur passes when a picture has several blurs,
/// we use a local (primitive-level) cache of the render tasks generated for a single shadowed primitive
/// in a single frame.
@ -361,7 +349,6 @@ pub struct RenderTask {
pub location: RenderTaskLocation,
pub children: TaskDependencies,
pub kind: RenderTaskKind,
pub clear_mode: ClearMode,
pub saved_index: Option<SavedTargetIndex>,
}
@ -371,7 +358,6 @@ impl RenderTask {
size: DeviceIntSize,
children: TaskDependencies,
kind: RenderTaskKind,
clear_mode: ClearMode,
) -> Self {
render_task_sanity_check(&size);
@ -379,7 +365,6 @@ impl RenderTask {
location: RenderTaskLocation::Dynamic(None, size),
children,
kind,
clear_mode,
saved_index: None,
}
}
@ -394,7 +379,6 @@ impl RenderTask {
location,
children,
kind: RenderTaskKind::Test(target),
clear_mode: ClearMode::Transparent,
saved_index: None,
}
}
@ -438,7 +422,6 @@ impl RenderTask {
scissor_rect,
valid_rect,
}),
clear_mode: ClearMode::Transparent,
saved_index: None,
}
}
@ -459,7 +442,6 @@ impl RenderTask {
start_point,
end_point,
}),
ClearMode::DontCare,
)
}
@ -468,7 +450,6 @@ impl RenderTask {
size,
TaskDependencies::new(),
RenderTaskKind::Readback,
ClearMode::Transparent,
)
}
@ -501,7 +482,6 @@ impl RenderTask {
source,
padding,
}),
ClearMode::Transparent,
)
}
@ -521,7 +501,6 @@ impl RenderTask {
wavy_line_thickness,
local_size,
}),
ClearMode::Transparent,
)
}
@ -593,7 +572,6 @@ impl RenderTask {
mask_task_id,
render_tasks,
RenderTargetKind::Alpha,
ClearMode::Zero,
None,
cache_size,
)
@ -618,12 +596,6 @@ impl RenderTask {
// If we have a potentially tiled clip mask, clear the mask area first. Otherwise,
// the first (primary) clip mask will overwrite all the clip mask pixels with
// blending disabled to set to the initial value.
let clear_mode = if needs_clear {
ClearMode::One
} else {
ClearMode::DontCare
};
render_tasks.add().init(
RenderTask::with_dynamic_location(
outer_rect.size.to_i32(),
@ -633,8 +605,8 @@ impl RenderTask {
clip_node_range,
root_spatial_node_index,
device_pixel_scale,
clear_to_one: needs_clear,
}),
clear_mode,
)
)
}
@ -646,12 +618,6 @@ impl RenderTask {
device_pixel_scale: DevicePixelScale,
fb_config: &FrameBuilderConfig,
) -> Self {
let clear_mode = if fb_config.gpu_supports_fast_clears {
ClearMode::One
} else {
ClearMode::DontCare
};
RenderTask::with_dynamic_location(
size,
TaskDependencies::new(),
@ -659,8 +625,8 @@ impl RenderTask {
local_pos,
device_pixel_scale,
clip_data,
clear_to_one: fb_config.gpu_supports_fast_clears,
}),
clear_mode,
)
}
@ -706,7 +672,6 @@ impl RenderTask {
src_task_id: RenderTaskId,
render_tasks: &mut RenderTaskGraph,
target_kind: RenderTargetKind,
clear_mode: ClearMode,
mut blur_cache: Option<&mut BlurTaskCache>,
blur_region: DeviceIntSize,
) -> RenderTaskId {
@ -772,7 +737,6 @@ impl RenderTask {
blur_region,
uv_rect_kind,
}),
clear_mode,
));
render_tasks.add().init(RenderTask::with_dynamic_location(
@ -785,7 +749,6 @@ impl RenderTask {
blur_region,
uv_rect_kind,
}),
clear_mode,
))
});
@ -806,7 +769,6 @@ impl RenderTask {
RenderTaskKind::Border(BorderTask {
instances,
}),
ClearMode::Transparent,
)
}
@ -847,7 +809,6 @@ impl RenderTask {
uv_rect_kind,
padding,
}),
ClearMode::DontCare,
)
)
}
@ -981,7 +942,6 @@ impl RenderTask {
)),
render_tasks,
RenderTargetKind::Color,
ClearMode::Transparent,
None,
content_size,
)
@ -1050,7 +1010,6 @@ impl RenderTask {
offset_task_id,
render_tasks,
RenderTargetKind::Color,
ClearMode::Transparent,
None,
content_size,
);
@ -1167,7 +1126,6 @@ impl RenderTask {
uv_rect_kind,
info,
}),
ClearMode::Transparent,
)
}
@ -1547,7 +1505,6 @@ impl RenderTask {
}
}
pt.add_item(format!("clear to: {:?}", self.clear_mode));
pt.add_item(format!("dimensions: {:?}", self.location.size()));
for &child_id in &self.children {

View file

@ -4119,7 +4119,7 @@ impl Renderer {
if use_batch_upload {
let (allocator, buffers) = batch_upload_buffers.entry(texture.get_format())
.or_insert_with(|| (ArrayAllocationTracker::new(), Vec::new()));
.or_insert_with(|| (ArrayAllocationTracker::new(None), Vec::new()));
// Allocate a region within the staging buffer for this update. If there is
// no room in an existing buffer then allocate another texture and buffer.
@ -5990,11 +5990,11 @@ impl Renderer {
let transformed_dirty_rect = if let Some(transform) = tile.transform {
transform.outer_transformed_rect(&tile_dirty_rect)
} else {
Some(tile_dirty_rect)
Some(tile_dirty_rect)
};
if let Some(dirty_rect) = transformed_dirty_rect {
combined_dirty_rect = combined_dirty_rect.union(&dirty_rect);
combined_dirty_rect = combined_dirty_rect.union(&dirty_rect);
}
}

View file

@ -1168,7 +1168,6 @@ impl ResourceCache {
pub fn block_until_all_resources_added(
&mut self,
gpu_cache: &mut GpuCache,
render_tasks: &mut RenderTaskGraph,
profile: &mut TransactionProfile,
) {
profile_scope!("block_until_all_resources_added");
@ -1180,8 +1179,6 @@ impl ResourceCache {
&mut self.cached_glyphs,
&mut self.texture_cache,
gpu_cache,
&mut self.cached_render_tasks,
render_tasks,
profile,
);

View file

@ -64,14 +64,23 @@ pub struct ArrayAllocationTracker {
}
impl ArrayAllocationTracker {
pub fn new() -> Self {
ArrayAllocationTracker {
pub fn new(initial_size: Option<DeviceIntSize>) -> Self {
let mut allocator = ArrayAllocationTracker {
bins: [
Vec::new(),
Vec::new(),
Vec::new(),
],
};
if let Some(initial_size) = initial_size {
allocator.push(
FreeRectSlice(0),
initial_size.into(),
);
}
allocator
}
fn push(&mut self, slice: FreeRectSlice, rect: DeviceIntRect) {
@ -213,7 +222,7 @@ fn random_fill(count: usize, texture_size: i32) -> f32 {
DeviceIntSize::new(texture_size, texture_size),
);
let mut rng = thread_rng();
let mut allocator = ArrayAllocationTracker::new();
let mut allocator = ArrayAllocationTracker::new(None);
// check for empty allocation
assert_eq!(

View file

@ -19,5 +19,5 @@ origin:
license: "ISC"
# update.sh will update this value
release: "4e851e996031381d03acd26b93e0578101eade33 (2020-11-03 16:36:58 +0100)"
release: "df5fe422b77a58fd8f7e0b3953e83807ae04c060 (2020-11-16 09:19:25 +0000)"

View file

@ -2085,9 +2085,11 @@ int setup_wasapi_stream_one_side(cubeb_stream * stm,
cubeb_device_info device_info;
int rv = wasapi_create_device(stm->context, device_info, stm->device_enumerator.get(), device.get());
if (rv == CUBEB_OK) {
const char* HANDSFREE_TAG = "BTHHFEENUM";
const char* HANDSFREE_TAG = "BTHHFENUM";
size_t len = sizeof(HANDSFREE_TAG);
if (direction == eCapture && strncmp(device_info.group_id, HANDSFREE_TAG, len) == 0) {
if (direction == eCapture &&
strlen(device_info.group_id) >= len &&
strncmp(device_info.group_id, HANDSFREE_TAG, len) == 0) {
// Rather high-latency to prevent constant under-runs in this particular
// case of an input device using bluetooth handsfree.
uint32_t default_period_frames = hns_to_frames(device_info.default_rate, default_period);

View file

@ -21,6 +21,13 @@ const nsIBinaryInputStream = Components.Constructor(
"setInputStream"
);
// Enable the collection (during test) for all products so even products
// that don't collect the data will be able to run the test without failure.
Services.prefs.setBoolPref(
"toolkit.telemetry.testing.overrideProductsCheck",
true
);
const fileBase = "test_empty_file.zip";
const file = do_get_file("data/" + fileBase);
const jarBase = "jar:" + Services.io.newFileURI(file).spec + "!";

View file

@ -152,6 +152,7 @@ int16_t gBadPortList[] = {
532, // netnews
540, // uucp
548, // afp
554, // rtsp
556, // remotefs
563, // nntp+ssl
587, // smtp (outgoing)
@ -159,6 +160,8 @@ int16_t gBadPortList[] = {
636, // ldap+ssl
993, // imap+ssl
995, // pop3+ssl
1720, // h323hostcall
1723, // pptp
2049, // nfs
3659, // apple-sasl
4045, // lockd

View file

@ -19,7 +19,6 @@ import time
import mozlog.structured
from marionette_driver import Wait
from marionette_driver.legacy_actions import Actions
from marionette_driver.errors import JavascriptException, ScriptTimeoutException
from marionette_driver.keys import Keys
from marionette_harness import MarionetteTestCase
@ -380,10 +379,13 @@ class AwsyTestCase(MarionetteTestCase):
browser will see the user as becoming inactive and trigger
appropriate GCs, as would have happened in real use.
"""
action = Actions(self.marionette)
action.key_down(Keys.SHIFT)
action.key_up(Keys.SHIFT)
action.perform()
try:
action = self.marionette.actions.sequence("key", "keyboard_id")
action.key_down(Keys.SHIFT)
action.key_up(Keys.SHIFT)
action.perform()
finally:
self.marionette.actions.release()
def open_pages(self):
"""

View file

@ -1,2 +1,2 @@
[GUM-deny.https.html]
prefs: [media.navigator.permission.disabled:false,media.getusermedia.camera.deny:true]
prefs: [media.navigator.permission.disabled:false, media.getusermedia.camera.deny:true]

View file

@ -53,6 +53,7 @@ var BLOCKED_PORTS_LIST = [
532, // netnews
540, // uucp
548, // afp
554, // rtsp
556, // remotefs
563, // nntp+ssl
587, // smtp (outgoing)
@ -60,6 +61,8 @@ var BLOCKED_PORTS_LIST = [
636, // ldap+ssl
993, // ldap+ssl
995, // pop3+ssl
1720, // h323hostcall
1723, // pptp
2049, // nfs
3659, // apple-sasl
4045, // lockd

View file

@ -3,8 +3,7 @@
<head>
<title>getUserMedia() triggers error callback when auth is denied</title>
<link rel="author" title="Dr. A. Gouaillard" href="mailto:agouaillard@gmail.com"/>
<link rel="help" href="http://w3c.github.io/mediacapture-main/getusermedia.html#methods-5">
<link rel="help" href="http://w3c.github.io/mediacapture-main/getusermedia.html#navigatorusermediaerrorcallback">
<link rel="help" href="https://w3c.github.io/mediacapture-main/#dom-mediadevices-getusermedia">
</head>
<body>
<p class="instructions">When prompted, <strong>please deny</strong> access to
@ -16,18 +15,17 @@
<script src=/resources/testharness.js></script>
<script src=/resources/testharnessreport.js></script>
<script>
var t = async_test("Tests that the error callback is triggered when permission is denied");
t.step(function() {
navigator.mediaDevices.getUserMedia({video: true})
.then(t.step_func(function (stream) {
assert_unreached("The success callback should not be triggered since access is to be denied");
t.done();
}), t.step_func(function (error) {
assert_equals(error.name, "NotAllowedError", "NotAllowedError returned as expected");
assert_equals(error.constraintName, undefined, "constraintName attribute not set as expected");
t.done();
}))
});
promise_test(async () => {
try {
await navigator.mediaDevices.getUserMedia({video: true})
} catch (error) {
assert_throws_dom("NotAllowedError", () => { throw error });
assert_false('constraintName' in error,
"constraintName attribute not set as expected");
return;
};
assert_unreached("The success callback should not be triggered since access is to be denied");
}, "Tests that the error callback is triggered when permission is denied");
</script>
</body>
</html>

View file

@ -3,7 +3,7 @@
<head>
<title>getUserMedia({}) rejects with TypeError</title>
<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-NavigatorUserMedia-getUserMedia-void-MediaStreamConstraints-constraints-NavigatorUserMediaSuccessCallback-successCallback-NavigatorUserMediaErrorCallback-errorCallback">
<link rel="help" href="https://w3c.github.io/mediacapture-main/#dom-mediadevices-getusermedia">
</head>
<body>
<h1 class="instructions">Description</h1>
@ -14,18 +14,17 @@ options parameter raises a TypeError exception.</p>
<script src=/resources/testharness.js></script>
<script src=/resources/testharnessreport.js></script>
<script>
var t = async_test("Tests that getUserMedia is rejected with a TypeError when used with an empty options parameter");
t.step( function () {
navigator.mediaDevices.getUserMedia({})
.then(t.step_func(function () {
assert_unreached("This should never be triggered since the constraints parameter is empty");
}), t.step_func(function (error) {
assert_equals(error.name, "TypeError", "TypeError returned as expected");
assert_equals(error.constraintName, undefined, "constraintName attribute not set as expected");
t.done();
}));
t.done();
});
promise_test(async () => {
try {
await navigator.mediaDevices.getUserMedia({});
} catch (error) {
assert_throws_js(TypeError, () => { throw error });
assert_false('constraintName' in error,
"constraintName attribute not set as expected");
return;
}
assert_unreached("This should never be triggered since the constraints parameter is empty.");
}, "Tests that getUserMedia is rejected with a TypeError when used with an empty options parameter");
</script>
</body>

View file

@ -24,7 +24,6 @@ t.step(function () {
assert_equals(error.constraintName, undefined, "constraintName attribute not set as expected");
t.done();
}));
t.done();
});
</script>
</body>

View file

@ -27,7 +27,7 @@ promise_test(async t => {
try {
stopTracks(await navigator.mediaDevices.getDisplayMedia({video: true}));
} catch (err) {
assert_equals(err.name, 'InvalidStateError', err.message);
assert_throws_dom('InvalidStateError', () => { throw err });
return;
}
assert_unreached('getDisplayMedia should have failed');
@ -62,7 +62,7 @@ promise_test(async t => {
try {
stopTracks(await getDisplayMedia(constraints));
} catch (err) {
assert_equals(err.name, 'TypeError', err.message);
assert_throws_js(TypeError, () => { throw err });
return;
}
assert_unreached('getDisplayMedia should have failed');

View file

@ -120,6 +120,7 @@ def is_bad_port(port):
532, # netnews
540, # uucp
548, # afp
554, # rtsp
556, # remotefs
563, # nntp+ssl
587, # smtp (outgoing)
@ -127,6 +128,8 @@ def is_bad_port(port):
636, # ldap+ssl
993, # ldap+ssl
995, # pop3+ssl
1720, # h323hostcall
1723, # pptp
2049, # nfs
3659, # apple-sasl
4045, # lockd

View file

@ -58,6 +58,7 @@ async_test(t => {
532, // netnews
540, // uucp
548, // afp
554, // rtsp
556, // remotefs
563, // nntp+ssl
587, // smtp (outgoing)
@ -65,6 +66,8 @@ async_test(t => {
636, // ldap+ssl
993, // ldap+ssl
995, // pop3+ssl
1720, // h323hostcall
1723, // pptp
2049, // nfs
3659, // apple-sasl
4045, // lockd

View file

@ -129,6 +129,16 @@ class PointConduit extends BaseConduit {
} catch (ex) {}
}
}
this.closeCallback?.();
this.closeCallback = null;
}
/**
* Set the callback to be called when the conduit is closed.
* @param {function} callback
*/
setCloseCallback(callback) {
this.closeCallback = callback;
}
}

View file

@ -504,6 +504,9 @@ class BaseContext {
...address,
});
this.callOnClose(conduit);
conduit.setCloseCallback(() => {
this.forgetOnClose(conduit);
});
return conduit;
}

View file

@ -0,0 +1,168 @@
"use strict";
let gcExperimentAPIs = {
gcHelper: {
schema: "schema.json",
child: {
scopes: ["addon_child"],
script: "child.js",
paths: [["gcHelper"]],
},
},
};
let gcExperimentFiles = {
"schema.json": JSON.stringify([
{
namespace: "gcHelper",
functions: [
{
name: "forceGarbageCollect",
type: "function",
parameters: [],
async: true,
},
{
name: "registerWitness",
type: "function",
parameters: [
{
name: "obj",
// Expected type is "object", but using "any" here to ensure that
// the parameter is untouched (not normalized).
type: "any",
},
],
returns: { type: "number" },
},
{
name: "isGarbageCollected",
type: "function",
parameters: [
{
name: "witnessId",
description: "return value of registerWitness",
type: "number",
},
],
returns: { type: "boolean" },
},
],
},
]),
"child.js": () => {
let { setTimeout } = ChromeUtils.import("resource://gre/modules/Timer.jsm");
/* globals ExtensionAPI */
this.gcHelper = class extends ExtensionAPI {
getAPI(context) {
let witnesses = new Map();
return {
gcHelper: {
async forceGarbageCollect() {
// Logic copied from test_ext_contexts_gc.js
for (let i = 0; i < 3; ++i) {
Cu.forceShrinkingGC();
Cu.forceCC();
Cu.forceGC();
await new Promise(resolve => setTimeout(resolve, 0));
}
},
registerWitness(obj) {
let witnessId = witnesses.size;
witnesses.set(witnessId, Cu.getWeakReference(obj));
return witnessId;
},
isGarbageCollected(witnessId) {
return witnesses.get(witnessId).get() === null;
},
},
};
}
};
},
};
// Verify that the experiment is working as intended before using it in tests.
add_task(async function test_gc_experiment() {
let extension = ExtensionTestUtils.loadExtension({
isPrivileged: true,
manifest: {
experiment_apis: gcExperimentAPIs,
},
files: gcExperimentFiles,
async background() {
let obj1 = {};
let obj2 = {};
let witness1 = browser.gcHelper.registerWitness(obj1);
let witness2 = browser.gcHelper.registerWitness(obj2);
obj1 = null;
await browser.gcHelper.forceGarbageCollect();
browser.test.assertTrue(
browser.gcHelper.isGarbageCollected(witness1),
"obj1 should have been garbage-collected"
);
browser.test.assertFalse(
browser.gcHelper.isGarbageCollected(witness2),
"obj2 should not have been garbage-collected"
);
browser.test.sendMessage("done");
},
});
await extension.startup();
await extension.awaitMessage("done");
await extension.unload();
});
add_task(async function test_port_gc() {
let extension = ExtensionTestUtils.loadExtension({
isPrivileged: true,
manifest: {
experiment_apis: gcExperimentAPIs,
},
files: gcExperimentFiles,
async background() {
let witnessPortSender;
let witnessPortReceiver;
browser.runtime.onConnect.addListener(port => {
browser.test.assertEq("daName", port.name, "expected port");
witnessPortReceiver = browser.gcHelper.registerWitness(port);
port.disconnect();
});
// runtime.connect() only triggers onConnect for different contexts,
// so create a frame to have a different context.
// A blank frame in a moz-extension:-document will have access to the
// extension APIs.
let frameWindow = await new Promise(resolve => {
let f = document.createElement("iframe");
f.onload = () => resolve(f.contentWindow);
document.body.append(f);
});
await new Promise(resolve => {
let port = frameWindow.browser.runtime.connect({ name: "daName" });
witnessPortSender = browser.gcHelper.registerWitness(port);
port.onDisconnect.addListener(() => resolve());
});
await browser.gcHelper.forceGarbageCollect();
browser.test.assertTrue(
browser.gcHelper.isGarbageCollected(witnessPortSender),
"runtime.connect() port should have been garbage-collected"
);
browser.test.assertTrue(
browser.gcHelper.isGarbageCollected(witnessPortReceiver),
"runtime.onConnect port should have been garbage-collected"
);
browser.test.sendMessage("done");
},
});
await extension.startup();
await extension.awaitMessage("done");
await extension.unload();
});

View file

@ -138,6 +138,7 @@ skip-if = ccov && os == 'linux' # bug 1607581
[test_ext_runtime_messaging_self.js]
[test_ext_runtime_onInstalled_and_onStartup.js]
[test_ext_runtime_ports.js]
[test_ext_runtime_ports_gc.js]
[test_ext_runtime_sendMessage.js]
[test_ext_runtime_sendMessage_errors.js]
[test_ext_runtime_sendMessage_multiple.js]

View file

@ -1086,7 +1086,6 @@ with modules["STORAGE"]:
with modules["DOM_FILE"]:
errors["NS_ERROR_DOM_FILE_NOT_FOUND_ERR"] = FAILURE(0)
errors["NS_ERROR_DOM_FILE_NOT_READABLE_ERR"] = FAILURE(1)
errors["NS_ERROR_DOM_FILE_ABORT_ERR"] = FAILURE(2)
# =======================================================================