From b7eb95f7b4b832a57a370e42687416a8d0836b39 Mon Sep 17 00:00:00 2001 From: Jon Coppeard Date: Thu, 24 Nov 2022 15:47:55 +0000 Subject: [PATCH 01/37] Bug 1802172 - Pack the CyclicModuleFields structure r=arai This reduces the size from 184 to 160 bytes on 64 bit. and from 128 to 104 bytes on 32 bit builds. Differential Revision: https://phabricator.services.mozilla.com/D162869 --- js/src/builtin/ModuleObject.cpp | 83 ++++++++++++++++++++++++--------- js/src/builtin/ModuleObject.h | 28 +++++------ 2 files changed, 72 insertions(+), 39 deletions(-) diff --git a/js/src/builtin/ModuleObject.cpp b/js/src/builtin/ModuleObject.cpp index db4d58cc7641..fa3aeed12819 100644 --- a/js/src/builtin/ModuleObject.cpp +++ b/js/src/builtin/ModuleObject.cpp @@ -669,7 +669,18 @@ void ModuleNamespaceObject::ProxyHandler::finalize(JS::GCContext* gcx, // https://tc39.es/ecma262/#sec-cyclic-module-records class js::CyclicModuleFields { public: + CyclicModuleFields(); + ModuleStatus status = ModuleStatus::Unlinked; + + // Flag bits. Some of these determine whether other fields are present, + // for example hasDfsIndex and dfsIndex. + bool hasDfsIndex : 1; + bool hasDfsAncestorIndex : 1; + bool hasTopLevelAwait : 1; + bool isAsyncEvaluating : 1; + bool hasPendingAsyncDependencies : 1; + HeapPtr evaluationError; HeapPtr metaObject; HeapPtr scriptSourceObject; @@ -680,18 +691,24 @@ class js::CyclicModuleFields { HeapPtr starExportEntries; IndirectBindingMap importBindings; UniquePtr functionDeclarations; - Maybe dfsIndex; - Maybe dfsAncestorIndex; - bool hasTopLevelAwait = false; - Maybe asyncEvaluatingPostOrder; HeapPtr topLevelCapability; HeapPtr asyncParentModules; - Maybe pendingAsyncDependencies; HeapPtr cycleRoot; + uint32_t dfsIndex = 0; + uint32_t dfsAncestorIndex = 0; + uint32_t asyncEvaluatingPostOrder = 0; + uint32_t pendingAsyncDependencies = 0; void trace(JSTracer* trc); }; +CyclicModuleFields::CyclicModuleFields() + : hasDfsIndex(false), + hasDfsAncestorIndex(false), + hasTopLevelAwait(false), + isAsyncEvaluating(false), + hasPendingAsyncDependencies(false) {} + void CyclicModuleFields::trace(JSTracer* trc) { TraceEdge(trc, &evaluationError, "CyclicModuleFields::evaluationError"); TraceNullableEdge(trc, &metaObject, "CyclicModuleFields::metaObject"); @@ -706,12 +723,12 @@ void CyclicModuleFields::trace(JSTracer* trc) { "CyclicModuleFields::indirectExportEntries"); TraceNullableEdge(trc, &starExportEntries, "CyclicModuleFields::starExportEntries"); + importBindings.trace(trc); TraceNullableEdge(trc, &topLevelCapability, "CyclicModuleFields::topLevelCapability"); TraceNullableEdge(trc, &asyncParentModules, "CyclicModuleFields::asyncParentModules"); TraceNullableEdge(trc, &cycleRoot, "CyclicModuleFields::cycleRoot"); - importBindings.trace(trc); } /////////////////////////////////////////////////////////////////////////// @@ -851,7 +868,7 @@ void ModuleObject::initAsyncSlots(JSContext* cx, bool hasTopLevelAwait, static uint32_t NextPostOrder(JSRuntime* rt) { uint32_t ordinal = rt->moduleAsyncEvaluatingPostOrder; - MOZ_ASSERT(ordinal != ASYNC_EVALUATING_POST_ORDER_TRUE); + MOZ_ASSERT(ordinal != ASYNC_EVALUATING_POST_ORDER_CLEARED); MOZ_ASSERT(ordinal < MAX_UINT32); rt->moduleAsyncEvaluatingPostOrder++; return ordinal; @@ -870,8 +887,10 @@ static void MaybeResetPostOrderCounter(JSRuntime* rt, } void ModuleObject::setAsyncEvaluating() { - cyclicModuleFields()->asyncEvaluatingPostOrder = - Some(NextPostOrder(runtimeFromMainThread())); + CyclicModuleFields* fields = cyclicModuleFields(); + MOZ_ASSERT(!fields->isAsyncEvaluating); + fields->isAsyncEvaluating = true; + fields->asyncEvaluatingPostOrder = NextPostOrder(runtimeFromMainThread()); } void ModuleObject::initScriptSlots(HandleScript script) { @@ -1017,21 +1036,26 @@ bool ModuleObject::hasTopLevelAwait() const { } bool ModuleObject::isAsyncEvaluating() const { - return cyclicModuleFields()->asyncEvaluatingPostOrder.isSome(); + return cyclicModuleFields()->isAsyncEvaluating; } Maybe ModuleObject::maybeDfsIndex() const { - return cyclicModuleFields()->dfsIndex; + const CyclicModuleFields* fields = cyclicModuleFields(); + return fields->hasDfsIndex ? Some(fields->dfsIndex) : Nothing(); } uint32_t ModuleObject::dfsIndex() const { return maybeDfsIndex().value(); } void ModuleObject::setDfsIndex(uint32_t index) { - cyclicModuleFields()->dfsIndex = Some(index); + CyclicModuleFields* fields = cyclicModuleFields(); + fields->dfsIndex = index; + fields->hasDfsIndex = true; } Maybe ModuleObject::maybeDfsAncestorIndex() const { - return cyclicModuleFields()->dfsAncestorIndex; + const CyclicModuleFields* fields = cyclicModuleFields(); + return fields->hasDfsAncestorIndex ? Some(fields->dfsAncestorIndex) + : Nothing(); } uint32_t ModuleObject::dfsAncestorIndex() const { @@ -1039,12 +1063,17 @@ uint32_t ModuleObject::dfsAncestorIndex() const { } void ModuleObject::setDfsAncestorIndex(uint32_t index) { - cyclicModuleFields()->dfsAncestorIndex = Some(index); + CyclicModuleFields* fields = cyclicModuleFields(); + fields->dfsAncestorIndex = index; + fields->hasDfsAncestorIndex = true; } void ModuleObject::clearDfsIndexes() { - cyclicModuleFields()->dfsIndex = Nothing(); - cyclicModuleFields()->dfsAncestorIndex = Nothing(); + CyclicModuleFields* fields = cyclicModuleFields(); + fields->dfsIndex = 0; + fields->hasDfsIndex = false; + fields->dfsAncestorIndex = 0; + fields->hasDfsAncestorIndex = false; } PromiseObject* ModuleObject::maybeTopLevelCapability() const { @@ -1088,7 +1117,10 @@ bool ModuleObject::appendAsyncParentModule(JSContext* cx, } Maybe ModuleObject::maybePendingAsyncDependencies() const { - return cyclicModuleFields()->pendingAsyncDependencies; + const CyclicModuleFields* fields = cyclicModuleFields(); + return fields->hasPendingAsyncDependencies + ? Some(fields->pendingAsyncDependencies) + : Nothing(); } uint32_t ModuleObject::pendingAsyncDependencies() const { @@ -1096,17 +1128,20 @@ uint32_t ModuleObject::pendingAsyncDependencies() const { } bool ModuleObject::hasAsyncEvaluatingPostOrder() const { - Maybe value = cyclicModuleFields()->asyncEvaluatingPostOrder; - return value.isSome() && *value != ASYNC_EVALUATING_POST_ORDER_TRUE; + const CyclicModuleFields* fields = cyclicModuleFields(); + return fields->isAsyncEvaluating && fields->asyncEvaluatingPostOrder != + ASYNC_EVALUATING_POST_ORDER_CLEARED; } Maybe ModuleObject::maybeAsyncEvaluatingPostOrder() const { - return cyclicModuleFields()->asyncEvaluatingPostOrder; + const CyclicModuleFields* fields = cyclicModuleFields(); + return fields->isAsyncEvaluating ? Some(fields->asyncEvaluatingPostOrder) + : Nothing(); } uint32_t ModuleObject::getAsyncEvaluatingPostOrder() const { MOZ_ASSERT(hasAsyncEvaluatingPostOrder()); - return maybeAsyncEvaluatingPostOrder().value(); + return cyclicModuleFields()->asyncEvaluatingPostOrder; } void ModuleObject::clearAsyncEvaluatingPostOrder() { @@ -1116,11 +1151,13 @@ void ModuleObject::clearAsyncEvaluatingPostOrder() { MaybeResetPostOrderCounter(rt, getAsyncEvaluatingPostOrder()); cyclicModuleFields()->asyncEvaluatingPostOrder = - Some(ASYNC_EVALUATING_POST_ORDER_TRUE); + ASYNC_EVALUATING_POST_ORDER_CLEARED; } void ModuleObject::setPendingAsyncDependencies(uint32_t newValue) { - cyclicModuleFields()->pendingAsyncDependencies = Some(newValue); + CyclicModuleFields* fields = cyclicModuleFields(); + fields->pendingAsyncDependencies = newValue; + fields->hasPendingAsyncDependencies = true; } void ModuleObject::setCycleRoot(ModuleObject* cycleRoot) { diff --git a/js/src/builtin/ModuleObject.h b/js/src/builtin/ModuleObject.h index 905c21e4a5c9..14e4d6835d89 100644 --- a/js/src/builtin/ModuleObject.h +++ b/js/src/builtin/ModuleObject.h @@ -250,7 +250,7 @@ class ModuleNamespaceObject : public ProxyObject { // Value types of [[Status]] in a Cyclic Module Record // https://tc39.es/ecma262/#table-cyclic-module-fields -enum class ModuleStatus : int32_t { +enum class ModuleStatus : int8_t { Unlinked, Linking, Linked, @@ -265,30 +265,26 @@ enum class ModuleStatus : int32_t { Evaluated_Error }; -// Special values for ModuleObject's AsyncEvaluatingPostOrderSlot slot, which is -// used to implement the AsyncEvaluation field of cyclic module records. +// Special values for CyclicModuleFields' asyncEvaluatingPostOrderSlot field, +// which is used as part of the implementation of the AsyncEvaluation field of +// cyclic module records. // -// The spec requires us to distinguish true, false, and 'never previously set to -// true', as well as the order in which the field was set to true for async -// evaluating modules. +// The spec requires us to be able to tell the order in which the field was set +// to true for async evaluating modules. // -// This is arranged by using an integer to record the order. Undefined is used -// to mean false and any integer value true. While a module is async evaluating -// the integer value gives the order that the field was set to true. After -// evaluation is complete the value is set to ASYNC_EVALUATING_POST_ORDER_TRUE, -// which still signifies true but loses the order information. +// This is arranged by using an integer to record the order. After evaluation is +// complete the value is set to ASYNC_EVALUATING_POST_ORDER_CLEARED. // // See https://tc39.es/ecma262/#sec-cyclic-module-records for field defintion. // See https://tc39.es/ecma262/#sec-async-module-execution-fulfilled for sort // requirement. -// True value that also indicates that the field was previously true. -constexpr uint32_t ASYNC_EVALUATING_POST_ORDER_TRUE = 0; - -// Initial value for the runtime's counter used to generate these values; the -// first non-false value. +// Initial value for the runtime's counter used to generate these values. constexpr uint32_t ASYNC_EVALUATING_POST_ORDER_INIT = 1; +// Value that the field is set to after being cleared. +constexpr uint32_t ASYNC_EVALUATING_POST_ORDER_CLEARED = 0; + class ModuleObject : public NativeObject { public: // Module fields including those for AbstractModuleRecords described by: From a9aceb087b11fb737b1016c0369e68d1f5db6010 Mon Sep 17 00:00:00 2001 From: Jon Coppeard Date: Thu, 24 Nov 2022 15:47:55 +0000 Subject: [PATCH 02/37] Bug 1802172 - Refactor CyclicModuleFields to provide getters and setters for some fields r=arai As suggsted in code review for the previous patch, it's clearer and less error-prone to provide getters and setters for module fields which are represented using more than one field in the structure. This patch changes the operation of ModuleObject::maybeAsyncEvaluatingPostOrder to return Nothing() after the post order is cleared, rather than the magic value ASYNC_EVALUATING_POST_ORDER_CLEARED. This doesn't make any difference to the modules system itself but required some changes to test code expectations. Depends on D162869 Differential Revision: https://phabricator.services.mozilla.com/D162985 --- js/src/builtin/ModuleObject.cpp | 153 ++++++++++++------ js/src/builtin/ModuleObject.h | 1 - .../tests/modules/async-eval-state.js | 26 +-- 3 files changed, 114 insertions(+), 66 deletions(-) diff --git a/js/src/builtin/ModuleObject.cpp b/js/src/builtin/ModuleObject.cpp index fa3aeed12819..b3eaac7bb6d6 100644 --- a/js/src/builtin/ModuleObject.cpp +++ b/js/src/builtin/ModuleObject.cpp @@ -669,18 +669,24 @@ void ModuleNamespaceObject::ProxyHandler::finalize(JS::GCContext* gcx, // https://tc39.es/ecma262/#sec-cyclic-module-records class js::CyclicModuleFields { public: - CyclicModuleFields(); - ModuleStatus status = ModuleStatus::Unlinked; - // Flag bits. Some of these determine whether other fields are present, - // for example hasDfsIndex and dfsIndex. + bool hasTopLevelAwait : 1; + + private: + // Flag bits that determine whether other fields are present. bool hasDfsIndex : 1; bool hasDfsAncestorIndex : 1; - bool hasTopLevelAwait : 1; bool isAsyncEvaluating : 1; bool hasPendingAsyncDependencies : 1; + // Fields whose presence is conditional on the flag bits above. + uint32_t dfsIndex = 0; + uint32_t dfsAncestorIndex = 0; + uint32_t asyncEvaluatingPostOrder = 0; + uint32_t pendingAsyncDependencies = 0; + + public: HeapPtr evaluationError; HeapPtr metaObject; HeapPtr scriptSourceObject; @@ -694,18 +700,31 @@ class js::CyclicModuleFields { HeapPtr topLevelCapability; HeapPtr asyncParentModules; HeapPtr cycleRoot; - uint32_t dfsIndex = 0; - uint32_t dfsAncestorIndex = 0; - uint32_t asyncEvaluatingPostOrder = 0; - uint32_t pendingAsyncDependencies = 0; + + public: + CyclicModuleFields(); void trace(JSTracer* trc); + + void setDfsIndex(uint32_t index); + Maybe maybeDfsIndex() const; + void setDfsAncestorIndex(uint32_t index); + Maybe maybeDfsAncestorIndex() const; + void clearDfsIndexes(); + + void setAsyncEvaluating(uint32_t postOrder); + bool getIsAsyncEvaluating() const; + Maybe maybeAsyncEvaluatingPostOrder() const; + void clearAsyncEvaluatingPostOrder(); + + void setPendingAsyncDependencies(uint32_t newValue); + Maybe maybePendingAsyncDependencies() const; }; CyclicModuleFields::CyclicModuleFields() - : hasDfsIndex(false), + : hasTopLevelAwait(false), + hasDfsIndex(false), hasDfsAncestorIndex(false), - hasTopLevelAwait(false), isAsyncEvaluating(false), hasPendingAsyncDependencies(false) {} @@ -731,6 +750,63 @@ void CyclicModuleFields::trace(JSTracer* trc) { TraceNullableEdge(trc, &cycleRoot, "CyclicModuleFields::cycleRoot"); } +void CyclicModuleFields::setDfsIndex(uint32_t index) { + dfsIndex = index; + hasDfsIndex = true; +} + +Maybe CyclicModuleFields::maybeDfsIndex() const { + return hasDfsIndex ? Some(dfsIndex) : Nothing(); +} + +void CyclicModuleFields::setDfsAncestorIndex(uint32_t index) { + dfsAncestorIndex = index; + hasDfsAncestorIndex = true; +} + +Maybe CyclicModuleFields::maybeDfsAncestorIndex() const { + return hasDfsAncestorIndex ? Some(dfsAncestorIndex) : Nothing(); +} + +void CyclicModuleFields::clearDfsIndexes() { + dfsIndex = 0; + hasDfsIndex = false; + dfsAncestorIndex = 0; + hasDfsAncestorIndex = false; +} + +void CyclicModuleFields::setAsyncEvaluating(uint32_t postOrder) { + isAsyncEvaluating = true; + asyncEvaluatingPostOrder = postOrder; +} + +bool CyclicModuleFields::getIsAsyncEvaluating() const { + return isAsyncEvaluating; +} + +Maybe CyclicModuleFields::maybeAsyncEvaluatingPostOrder() const { + if (!isAsyncEvaluating || + asyncEvaluatingPostOrder == ASYNC_EVALUATING_POST_ORDER_CLEARED) { + return Nothing(); + } + + return Some(asyncEvaluatingPostOrder); +} + +void CyclicModuleFields::clearAsyncEvaluatingPostOrder() { + asyncEvaluatingPostOrder = ASYNC_EVALUATING_POST_ORDER_CLEARED; +} + +void CyclicModuleFields::setPendingAsyncDependencies(uint32_t newValue) { + pendingAsyncDependencies = newValue; + hasPendingAsyncDependencies = true; +} + +Maybe CyclicModuleFields::maybePendingAsyncDependencies() const { + return hasPendingAsyncDependencies ? Some(pendingAsyncDependencies) + : Nothing(); +} + /////////////////////////////////////////////////////////////////////////// // ModuleObject @@ -887,10 +963,9 @@ static void MaybeResetPostOrderCounter(JSRuntime* rt, } void ModuleObject::setAsyncEvaluating() { - CyclicModuleFields* fields = cyclicModuleFields(); - MOZ_ASSERT(!fields->isAsyncEvaluating); - fields->isAsyncEvaluating = true; - fields->asyncEvaluatingPostOrder = NextPostOrder(runtimeFromMainThread()); + MOZ_ASSERT(!isAsyncEvaluating()); + uint32_t postOrder = NextPostOrder(runtimeFromMainThread()); + cyclicModuleFields()->setAsyncEvaluating(postOrder); } void ModuleObject::initScriptSlots(HandleScript script) { @@ -1036,26 +1111,21 @@ bool ModuleObject::hasTopLevelAwait() const { } bool ModuleObject::isAsyncEvaluating() const { - return cyclicModuleFields()->isAsyncEvaluating; + return cyclicModuleFields()->getIsAsyncEvaluating(); } Maybe ModuleObject::maybeDfsIndex() const { - const CyclicModuleFields* fields = cyclicModuleFields(); - return fields->hasDfsIndex ? Some(fields->dfsIndex) : Nothing(); + return cyclicModuleFields()->maybeDfsIndex(); } uint32_t ModuleObject::dfsIndex() const { return maybeDfsIndex().value(); } void ModuleObject::setDfsIndex(uint32_t index) { - CyclicModuleFields* fields = cyclicModuleFields(); - fields->dfsIndex = index; - fields->hasDfsIndex = true; + cyclicModuleFields()->setDfsIndex(index); } Maybe ModuleObject::maybeDfsAncestorIndex() const { - const CyclicModuleFields* fields = cyclicModuleFields(); - return fields->hasDfsAncestorIndex ? Some(fields->dfsAncestorIndex) - : Nothing(); + return cyclicModuleFields()->maybeDfsAncestorIndex(); } uint32_t ModuleObject::dfsAncestorIndex() const { @@ -1063,17 +1133,11 @@ uint32_t ModuleObject::dfsAncestorIndex() const { } void ModuleObject::setDfsAncestorIndex(uint32_t index) { - CyclicModuleFields* fields = cyclicModuleFields(); - fields->dfsAncestorIndex = index; - fields->hasDfsAncestorIndex = true; + cyclicModuleFields()->setDfsAncestorIndex(index); } void ModuleObject::clearDfsIndexes() { - CyclicModuleFields* fields = cyclicModuleFields(); - fields->dfsIndex = 0; - fields->hasDfsIndex = false; - fields->dfsAncestorIndex = 0; - fields->hasDfsAncestorIndex = false; + cyclicModuleFields()->clearDfsIndexes(); } PromiseObject* ModuleObject::maybeTopLevelCapability() const { @@ -1117,31 +1181,19 @@ bool ModuleObject::appendAsyncParentModule(JSContext* cx, } Maybe ModuleObject::maybePendingAsyncDependencies() const { - const CyclicModuleFields* fields = cyclicModuleFields(); - return fields->hasPendingAsyncDependencies - ? Some(fields->pendingAsyncDependencies) - : Nothing(); + return cyclicModuleFields()->maybePendingAsyncDependencies(); } uint32_t ModuleObject::pendingAsyncDependencies() const { return maybePendingAsyncDependencies().value(); } -bool ModuleObject::hasAsyncEvaluatingPostOrder() const { - const CyclicModuleFields* fields = cyclicModuleFields(); - return fields->isAsyncEvaluating && fields->asyncEvaluatingPostOrder != - ASYNC_EVALUATING_POST_ORDER_CLEARED; -} - Maybe ModuleObject::maybeAsyncEvaluatingPostOrder() const { - const CyclicModuleFields* fields = cyclicModuleFields(); - return fields->isAsyncEvaluating ? Some(fields->asyncEvaluatingPostOrder) - : Nothing(); + return cyclicModuleFields()->maybeAsyncEvaluatingPostOrder(); } uint32_t ModuleObject::getAsyncEvaluatingPostOrder() const { - MOZ_ASSERT(hasAsyncEvaluatingPostOrder()); - return cyclicModuleFields()->asyncEvaluatingPostOrder; + return cyclicModuleFields()->maybeAsyncEvaluatingPostOrder().value(); } void ModuleObject::clearAsyncEvaluatingPostOrder() { @@ -1150,14 +1202,11 @@ void ModuleObject::clearAsyncEvaluatingPostOrder() { JSRuntime* rt = runtimeFromMainThread(); MaybeResetPostOrderCounter(rt, getAsyncEvaluatingPostOrder()); - cyclicModuleFields()->asyncEvaluatingPostOrder = - ASYNC_EVALUATING_POST_ORDER_CLEARED; + cyclicModuleFields()->clearAsyncEvaluatingPostOrder(); } void ModuleObject::setPendingAsyncDependencies(uint32_t newValue) { - CyclicModuleFields* fields = cyclicModuleFields(); - fields->pendingAsyncDependencies = newValue; - fields->hasPendingAsyncDependencies = true; + cyclicModuleFields()->setPendingAsyncDependencies(newValue); } void ModuleObject::setCycleRoot(ModuleObject* cycleRoot) { diff --git a/js/src/builtin/ModuleObject.h b/js/src/builtin/ModuleObject.h index 14e4d6835d89..268242d7ae5f 100644 --- a/js/src/builtin/ModuleObject.h +++ b/js/src/builtin/ModuleObject.h @@ -361,7 +361,6 @@ class ModuleObject : public NativeObject { ListObject* asyncParentModules() const; mozilla::Maybe maybePendingAsyncDependencies() const; uint32_t pendingAsyncDependencies() const; - bool hasAsyncEvaluatingPostOrder() const; mozilla::Maybe maybeAsyncEvaluatingPostOrder() const; uint32_t getAsyncEvaluatingPostOrder() const; void clearAsyncEvaluatingPostOrder(); diff --git a/js/src/jit-test/tests/modules/async-eval-state.js b/js/src/jit-test/tests/modules/async-eval-state.js index e7edbf13a8e5..b3c524d961b2 100644 --- a/js/src/jit-test/tests/modules/async-eval-state.js +++ b/js/src/jit-test/tests/modules/async-eval-state.js @@ -34,7 +34,7 @@ const StatusEvaluated = 5; drainJobQueue(); assertEq(m.isAsyncEvaluating, true); assertEq(m.status, StatusEvaluated); - assertEq(m.asyncEvaluatingPostOrder, 0); + assertEq(m.asyncEvaluatingPostOrder, undefined); } { @@ -50,7 +50,7 @@ const StatusEvaluated = 5; assertEq(m.isAsyncEvaluating, true); assertEq(m.status, StatusEvaluated); assertEq(m.evaluationError, 2); - assertEq(m.asyncEvaluatingPostOrder, 0); + assertEq(m.asyncEvaluatingPostOrder, undefined); } { @@ -65,7 +65,7 @@ const StatusEvaluated = 5; assertEq(m.isAsyncEvaluating, true); assertEq(m.status, StatusEvaluated); assertEq(m.evaluationError, 1); - assertEq(m.asyncEvaluatingPostOrder, 0); + assertEq(m.asyncEvaluatingPostOrder, undefined); } { @@ -86,7 +86,7 @@ const StatusEvaluated = 5; assertEq(a.status, StatusEvaluated); assertEq(b.isAsyncEvaluating, true); assertEq(b.status, StatusEvaluated); - assertEq(b.asyncEvaluatingPostOrder, 0); + assertEq(b.asyncEvaluatingPostOrder, undefined); } { @@ -106,10 +106,10 @@ const StatusEvaluated = 5; drainJobQueue(); assertEq(a.isAsyncEvaluating, true); assertEq(a.status, StatusEvaluated); - assertEq(a.asyncEvaluatingPostOrder, 0); + assertEq(a.asyncEvaluatingPostOrder, undefined); assertEq(b.isAsyncEvaluating, true); assertEq(b.status, StatusEvaluated); - assertEq(b.asyncEvaluatingPostOrder, 0); + assertEq(b.asyncEvaluatingPostOrder, undefined); } { @@ -136,13 +136,13 @@ const StatusEvaluated = 5; drainJobQueue(); assertEq(a.isAsyncEvaluating, true); assertEq(a.status, StatusEvaluated); - assertEq(a.asyncEvaluatingPostOrder, 0); + assertEq(a.asyncEvaluatingPostOrder, undefined); assertEq(b.isAsyncEvaluating, true); assertEq(b.status, StatusEvaluated); - assertEq(b.asyncEvaluatingPostOrder, 0); + assertEq(b.asyncEvaluatingPostOrder, undefined); assertEq(c.isAsyncEvaluating, true); assertEq(c.status, StatusEvaluated); - assertEq(c.asyncEvaluatingPostOrder, 0); + assertEq(c.asyncEvaluatingPostOrder, undefined); } { @@ -176,11 +176,11 @@ const StatusEvaluated = 5; assertEq(a.isAsyncEvaluating, true); assertEq(a.status, StatusEvaluated); assertEq(a.evaluationError, 1); - assertEq(a.asyncEvaluatingPostOrder, 0); + assertEq(a.asyncEvaluatingPostOrder, undefined); assertEq(b.isAsyncEvaluating, true); assertEq(b.status, StatusEvaluated); assertEq(b.evaluationError, 1); - assertEq(b.asyncEvaluatingPostOrder, 0); + assertEq(b.asyncEvaluatingPostOrder, undefined); } { @@ -199,9 +199,9 @@ const StatusEvaluated = 5; assertEq(a.isAsyncEvaluating, true); assertEq(a.status, StatusEvaluated); assertEq(a.evaluationError, 2); - assertEq(a.asyncEvaluatingPostOrder, 0); + assertEq(a.asyncEvaluatingPostOrder, undefined); assertEq(b.isAsyncEvaluating, true); assertEq(b.status, StatusEvaluated); assertEq(b.evaluationError, 2); - assertEq(b.asyncEvaluatingPostOrder, 0); + assertEq(b.asyncEvaluatingPostOrder, undefined); } From 4cf7602483c3b72088549ba4af4e115295fccae2 Mon Sep 17 00:00:00 2001 From: Andi-Bogdan Postelnicu Date: Thu, 24 Nov 2022 16:15:51 +0000 Subject: [PATCH 03/37] Bug 1799640 - `unused include` files checker in clang-tidy. r=glandium,sergesanspaille Ported from clangd, this still can be improved over time, but it can be landed. This was based on the work from https://bit.ly/3TkV2N1 * The utility makes the assumption that all header are self contained! * It only checkes `Decls` from the main translation file, where SourceLocarion is the passed `cpp` file. * It builds a list with all of the includes from the translation unit. * It matches all of the `Decls` from the main translation units with definitions from the included header files and builds a list with used header files. * All of the includes that are not part of the matched used header files are considered to be unused. Of course this is correct if the first assumption if followed by the coding guide, where all of the header are self contained. Since the mozilla code base doesn't follow this approach false positives might appear where the is the following situation: FOO.cpp #include #Include If header `A` defines a symbol that is used by header `B` and `B` doesn't include `A` nor it has symbols defined that are used by `FOO.cpp` then `B` it will be marked as potentially to be removed by the tool. This is the limitation determined by header that are not self contained. The limitation presented above can be fixed in the future with extra work, but it's very time expensive during the runtime of the checker. Differential Revision: https://phabricator.services.mozilla.com/D161583 --- build/build-clang/clang-tidy-linux64.json | 1 + build/build-clang/clang-tidy-macosx64.json | 1 + build/build-clang/clang-tidy-win64.json | 1 + build/build-clang/clang_include_cleaner.patch | 2235 +++++++++++++++++ 4 files changed, 2238 insertions(+) create mode 100644 build/build-clang/clang_include_cleaner.patch diff --git a/build/build-clang/clang-tidy-linux64.json b/build/build-clang/clang-tidy-linux64.json index e654aeef92e2..53dee2120d02 100644 --- a/build/build-clang/clang-tidy-linux64.json +++ b/build/build-clang/clang-tidy-linux64.json @@ -5,6 +5,7 @@ "cxx": "{MOZ_FETCHES_DIR}/clang/bin/clang++", "as": "{MOZ_FETCHES_DIR}/clang/bin/clang", "patches": [ + "clang_include_cleaner.patch", "clang-tidy-ci.patch" ] } diff --git a/build/build-clang/clang-tidy-macosx64.json b/build/build-clang/clang-tidy-macosx64.json index a0d5bfa9b537..ac21a121fc43 100644 --- a/build/build-clang/clang-tidy-macosx64.json +++ b/build/build-clang/clang-tidy-macosx64.json @@ -10,6 +10,7 @@ "libtool": "{MOZ_FETCHES_DIR}/cctools/bin/x86_64-apple-darwin-libtool", "ld": "{MOZ_FETCHES_DIR}/clang/bin/clang", "patches": [ + "clang_include_cleaner.patch", "clang-tidy-ci.patch" ] } diff --git a/build/build-clang/clang-tidy-win64.json b/build/build-clang/clang-tidy-win64.json index 9951d45fc433..38a017bc24c2 100644 --- a/build/build-clang/clang-tidy-win64.json +++ b/build/build-clang/clang-tidy-win64.json @@ -5,6 +5,7 @@ "cxx": "cl.exe", "ml": "ml64.exe", "patches": [ + "clang_include_cleaner.patch", "clang-tidy-ci.patch" ] } diff --git a/build/build-clang/clang_include_cleaner.patch b/build/build-clang/clang_include_cleaner.patch new file mode 100644 index 000000000000..0bc3b5b019f8 --- /dev/null +++ b/build/build-clang/clang_include_cleaner.patch @@ -0,0 +1,2235 @@ +Ported from clangd, this still can be improved over time, but it can be landed. +This was based on the work from https://bit.ly/3TkV2N1 + + The utility makes the assumption that all header are self contained! + It only checkes Decls from the main translation file, where SourceLocarion is the passed cpp file. + It builds a list with all of the includes from the translation unit. + It matches all of the Decls from the main translation units with definitions from the included header files and builds a list with used header files. + All of the includes that are not part of the matched used header files are considered to be unused. Of course this is correct if the first assumption if followed by the coding guide, where all of the header are self contained. Since the mozilla code base doesn't follow this approach false positives might appear where the is the following situation: + +FOO.cpp + +#include +#Include + +If header A defines a symbol that is used by header B and B doesn't include A nor +it has symbols defined that are used by FOO.cpp then B it will be marked as potentially to be removed +by the tool. +This is the limitation determined by header that are not self contained. + +The limitation presented above can be fixed in the future with extra work, but it's very time expensive +during the runtime of the checker. + +diff --git a/clang-tools-extra/CMakeLists.txt b/clang-tools-extra/CMakeLists.txt +index 6a3f741721ee..ff17c8e8472a 100644 +--- a/clang-tools-extra/CMakeLists.txt ++++ b/clang-tools-extra/CMakeLists.txt +@@ -16,6 +16,7 @@ endif() + add_subdirectory(clang-apply-replacements) + add_subdirectory(clang-reorder-fields) + add_subdirectory(modularize) ++add_subdirectory(include-cleaner) + add_subdirectory(clang-tidy) + + add_subdirectory(clang-change-namespace) +@@ -23,7 +24,6 @@ add_subdirectory(clang-doc) + add_subdirectory(clang-include-fixer) + add_subdirectory(clang-move) + add_subdirectory(clang-query) +-add_subdirectory(include-cleaner) + add_subdirectory(pp-trace) + add_subdirectory(pseudo) + add_subdirectory(tool-template) +diff --git a/clang-tools-extra/clang-tidy/CMakeLists.txt b/clang-tools-extra/clang-tidy/CMakeLists.txt +index 8a953eeea275..f2edc509acaf 100644 +--- a/clang-tools-extra/clang-tidy/CMakeLists.txt ++++ b/clang-tools-extra/clang-tidy/CMakeLists.txt +@@ -50,6 +50,7 @@ endif() + + # Checks. + # If you add a check, also add it to ClangTidyForceLinker.h in this directory. ++add_subdirectory(alpha) + add_subdirectory(android) + add_subdirectory(abseil) + add_subdirectory(altera) +@@ -77,6 +78,7 @@ add_subdirectory(portability) + add_subdirectory(readability) + add_subdirectory(zircon) + set(ALL_CLANG_TIDY_CHECKS ++ clangTidyAlphaModule + clangTidyAndroidModule + clangTidyAbseilModule + clangTidyAlteraModule +diff --git a/clang-tools-extra/clang-tidy/ClangTidyForceLinker.h b/clang-tools-extra/clang-tidy/ClangTidyForceLinker.h +index 2691d90fa521..2fa064cff22a 100644 +--- a/clang-tools-extra/clang-tidy/ClangTidyForceLinker.h ++++ b/clang-tools-extra/clang-tidy/ClangTidyForceLinker.h +@@ -20,6 +20,11 @@ extern volatile int AbseilModuleAnchorSource; + static int LLVM_ATTRIBUTE_UNUSED AbseilModuleAnchorDestination = + AbseilModuleAnchorSource; + ++// This anchor is used to force the linker to link the AlphaModule. ++extern volatile int AlphaModuleAnchorSource; ++static int LLVM_ATTRIBUTE_UNUSED AlphaModuleAnchorDestination = ++ AlphaModuleAnchorSource; ++ + // This anchor is used to force the linker to link the AlteraModule. + extern volatile int AlteraModuleAnchorSource; + static int LLVM_ATTRIBUTE_UNUSED AlteraModuleAnchorDestination = +diff --git a/clang-tools-extra/clang-tidy/alpha/AlphaTidyModule.cpp b/clang-tools-extra/clang-tidy/alpha/AlphaTidyModule.cpp +new file mode 100644 +index 000000000000..b598a36cebf7 +--- /dev/null ++++ b/clang-tools-extra/clang-tidy/alpha/AlphaTidyModule.cpp +@@ -0,0 +1,38 @@ ++//===--- AlphaTidyModule.cpp - clang-tidy ----------------------------------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "../ClangTidy.h" ++#include "../ClangTidyModule.h" ++#include "../ClangTidyModuleRegistry.h" ++#include "UnusedIncludesCheck.h" ++ ++ ++namespace clang { ++namespace tidy { ++namespace alpha { ++ ++class AlphaModule : public ClangTidyModule { ++public: ++ void addCheckFactories(ClangTidyCheckFactories &CheckFactories) override { ++ ++ CheckFactories.registerCheck("alpha-unused-includes"); ++ } ++}; ++ ++} // namespace alpha ++ ++// Register the AlphaTidyModule using this statically initialized variable. ++static ClangTidyModuleRegistry::Add ++ X("alpha-module", "Adds alpha lint checks."); ++ ++// This anchor is used to force the linker to link in the generated object file ++// and thus register the AlphaModule. ++volatile int AlphaModuleAnchorSource = 0; ++ ++} // namespace tidy ++} // namespace clang +diff --git a/clang-tools-extra/clang-tidy/alpha/CMakeLists.txt b/clang-tools-extra/clang-tidy/alpha/CMakeLists.txt +new file mode 100644 +index 000000000000..b50576868645 +--- /dev/null ++++ b/clang-tools-extra/clang-tidy/alpha/CMakeLists.txt +@@ -0,0 +1,32 @@ ++include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../../include-cleaner/include) ++ ++set(LLVM_LINK_COMPONENTS ++ Support ++ ) ++ ++add_clang_library(clangTidyAlphaModule ++ ++ AlphaTidyModule.cpp ++ UnusedIncludesCheck.cpp ++ ++ LINK_LIBS ++ clangAnalysis ++ clangIncludeCleaner ++ clangTidy ++ clangTidyUtils ++ ++ DEPENDS ++ omp_gen ++ ) ++ ++clang_target_link_libraries(clangTidyAlphaModule ++ PRIVATE ++ clangAnalysis ++ clangAST ++ clangASTMatchers ++ clangBasic ++ clangIncludeCleaner ++ clangLex ++ clangSerialization ++ clangTooling ++ ) +diff --git a/clang-tools-extra/clang-tidy/alpha/UnusedIncludesCheck.cpp b/clang-tools-extra/clang-tidy/alpha/UnusedIncludesCheck.cpp +new file mode 100644 +index 000000000000..0d6a6bf7a367 +--- /dev/null ++++ b/clang-tools-extra/clang-tidy/alpha/UnusedIncludesCheck.cpp +@@ -0,0 +1,76 @@ ++//===--- UnusedIncludesCheck.cpp - clang-tidy------------------------------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "UnusedIncludesCheck.h" ++#include "clang-include-cleaner/Analysis.h" ++#include "clang-include-cleaner/Hooks.h" ++#include "clang/Basic/Diagnostic.h" ++#include "clang/Basic/LLVM.h" ++#include "clang/Basic/SourceLocation.h" ++#include "clang/Lex/Preprocessor.h" ++ ++using namespace clang::ast_matchers; ++ ++namespace clang { ++namespace tidy { ++namespace alpha { ++ ++UnusedIncludesCheck::UnusedIncludesCheck(StringRef Name, ++ ClangTidyContext *Context) ++ : ClangTidyCheck(Name, Context) {} ++ ++void UnusedIncludesCheck::registerPPCallbacks(const SourceManager &SM, ++ Preprocessor *PP, ++ Preprocessor *) { ++ Ctx = std::make_unique( ++ include_cleaner::Policy{}, *PP); ++ RecordedPP = std::make_unique(); ++ PP->addPPCallbacks(RecordedPP->record(*Ctx)); ++} ++ ++void UnusedIncludesCheck::registerMatchers(MatchFinder *Finder) { ++ Finder->addMatcher( ++ translationUnitDecl(forEach(decl(isExpansionInMainFile()).bind("top"))), ++ this); ++} ++ ++void UnusedIncludesCheck::check(const MatchFinder::MatchResult &Result) { ++ Top.push_back(const_cast(Result.Nodes.getNodeAs("top"))); ++} ++ ++void UnusedIncludesCheck::onEndOfTranslationUnit() { ++ llvm::DenseSet Used; ++ llvm::DenseSet Seen; ++ include_cleaner::walkUsed( ++ *Ctx, Top, RecordedPP->MacroReferences, ++ [&](SourceLocation Loc, include_cleaner::Symbol Sym, ++ llvm::ArrayRef Headers) { ++ for (const auto &Header : Headers) { ++ if (!Seen.insert(Header).second) ++ continue; ++ const auto& HeadersToInsert = RecordedPP->Includes.match(Header); ++ Used.insert(HeadersToInsert.begin(), HeadersToInsert.end()); ++ } ++ }); ++ for (const auto &I : RecordedPP->Includes.all()) { ++ if (!Used.contains(&I)) { ++ const auto &SM = Ctx->sourceManager(); ++ FileID FID = SM.getFileID(I.Location); ++ diag(I.Location, "there is a high probability that include is unused") ++ << FixItHint::CreateRemoval(CharSourceRange::getCharRange( ++ SM.translateLineCol(FID, I.Line, 1), ++ SM.translateLineCol(FID, I.Line + 1, 1))); ++ } ++ } ++} ++ ++UnusedIncludesCheck::~UnusedIncludesCheck() = default; ++ ++} // namespace alpha ++} // namespace tidy ++} // namespace clang +diff --git a/clang-tools-extra/clang-tidy/alpha/UnusedIncludesCheck.h b/clang-tools-extra/clang-tidy/alpha/UnusedIncludesCheck.h +new file mode 100644 +index 000000000000..f67c46e6cc3e +--- /dev/null ++++ b/clang-tools-extra/clang-tidy/alpha/UnusedIncludesCheck.h +@@ -0,0 +1,42 @@ ++//===--- UnusedIncludesCheck.h - clang-tidy----------------------*- C++ -*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#ifndef LLVM_CLANG_TOOLS_EXTRA_CLANG_TIDY_MISC_UNUSED_INCLUDES_H ++#define LLVM_CLANG_TOOLS_EXTRA_CLANG_TIDY_MISC_UNUSED_INCLUDES_H ++ ++#include "../ClangTidyCheck.h" ++ ++namespace clang { ++namespace include_cleaner { ++class AnalysisContext; ++struct RecordedPP; ++} // namespace include_cleaner ++namespace tidy { ++namespace alpha { ++ ++class UnusedIncludesCheck : public ClangTidyCheck { ++public: ++ UnusedIncludesCheck(StringRef Name, ClangTidyContext *Context); ++ ~UnusedIncludesCheck(); ++ void registerPPCallbacks(const SourceManager &SM, Preprocessor *, ++ Preprocessor *) override; ++ void registerMatchers(ast_matchers::MatchFinder *Finder) override; ++ void check(const ast_matchers::MatchFinder::MatchResult &Result) override; ++ void onEndOfTranslationUnit() override; ++ ++private: ++ std::unique_ptr Ctx; ++ std::unique_ptr RecordedPP; ++ std::vector Top; ++}; ++ ++} // namespace misc ++} // namespace tidy ++} // namespace clang ++ ++#endif // LLVM_CLANG_TOOLS_EXTRA_CLANG_TIDY_MISC_UNUSED_INCLUDES_H +diff --git a/clang-tools-extra/clangd/CMakeLists.txt b/clang-tools-extra/clangd/CMakeLists.txt +index de8f087a52a5..14f605b1efaf 100644 +--- a/clang-tools-extra/clangd/CMakeLists.txt ++++ b/clang-tools-extra/clangd/CMakeLists.txt +@@ -2,6 +2,8 @@ + include_directories(${CMAKE_CURRENT_SOURCE_DIR}) + include_directories(${CMAKE_CURRENT_BINARY_DIR}) + ++include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../include-cleaner/include) ++ + add_subdirectory(support) + + # Configure the Features.inc file. +@@ -153,6 +155,7 @@ clang_target_link_libraries(clangDaemon + clangDriver + clangFormat + clangFrontend ++ clangIncludeCleaner + clangIndex + clangLex + clangSema +diff --git a/clang-tools-extra/clangd/Hover.cpp b/clang-tools-extra/clangd/Hover.cpp +index 26eb2574195d..a3cbc8894f6d 100644 +--- a/clang-tools-extra/clangd/Hover.cpp ++++ b/clang-tools-extra/clangd/Hover.cpp +@@ -12,9 +12,11 @@ + #include "CodeCompletionStrings.h" + #include "Config.h" + #include "FindTarget.h" ++#include "IncludeCleaner.h" + #include "ParsedAST.h" + #include "Selection.h" + #include "SourceCode.h" ++#include "clang-include-cleaner/Analysis.h" + #include "index/SymbolCollector.h" + #include "support/Markup.h" + #include "clang/AST/ASTContext.h" +@@ -985,6 +987,23 @@ llvm::Optional getHover(ParsedAST &AST, Position Pos, + // FIXME: We don't have a fitting value for Kind. + HI.Definition = + URIForFile::canonicalize(Inc.Resolved, *MainFilePath).file().str(); ++ ++ // FIXME: share code, macros too... ++ include_cleaner::AnalysisContext Ctx(include_cleaner::Policy{}, ++ AST.getPreprocessor()); ++ std::vector Provides; ++ include_cleaner::walkUsed( ++ Ctx, AST.getLocalTopLevelDecls(), /*Macros=*/{}, ++ [&](SourceLocation Loc, include_cleaner::Symbol S, ++ llvm::ArrayRef Headers) { ++ for (const auto &H : Headers) ++ if (match(H, Inc, AST.getIncludeStructure())) ++ Provides.push_back(S.name()); ++ }); ++ llvm::sort(Provides); ++ Provides.erase(std::unique(Provides.begin(), Provides.end()), ++ Provides.end()); ++ HI.Documentation = "provides " + llvm::join(Provides, ", "); + HI.DefinitionLanguage = ""; + return HI; + } +diff --git a/clang-tools-extra/clangd/IncludeCleaner.cpp b/clang-tools-extra/clangd/IncludeCleaner.cpp +index e5b5187e030c..3c0ba06316ac 100644 +--- a/clang-tools-extra/clangd/IncludeCleaner.cpp ++++ b/clang-tools-extra/clangd/IncludeCleaner.cpp +@@ -12,6 +12,8 @@ + #include "ParsedAST.h" + #include "Protocol.h" + #include "SourceCode.h" ++#include "clang-include-cleaner/Analysis.h" ++#include "clang-include-cleaner/Types.h" + #include "index/CanonicalIncludes.h" + #include "support/Logger.h" + #include "support/Trace.h" +@@ -40,181 +42,6 @@ void setIncludeCleanerAnalyzesStdlib(bool B) { AnalyzeStdlib = B; } + + namespace { + +-/// Crawler traverses the AST and feeds in the locations of (sometimes +-/// implicitly) used symbols into \p Result. +-class ReferencedLocationCrawler +- : public RecursiveASTVisitor { +-public: +- ReferencedLocationCrawler(ReferencedLocations &Result, +- const SourceManager &SM) +- : Result(Result), SM(SM) {} +- +- bool VisitDeclRefExpr(DeclRefExpr *DRE) { +- add(DRE->getDecl()); +- add(DRE->getFoundDecl()); +- return true; +- } +- +- bool VisitMemberExpr(MemberExpr *ME) { +- add(ME->getMemberDecl()); +- add(ME->getFoundDecl().getDecl()); +- return true; +- } +- +- bool VisitTagType(TagType *TT) { +- add(TT->getDecl()); +- return true; +- } +- +- bool VisitFunctionDecl(FunctionDecl *FD) { +- // Function definition will require redeclarations to be included. +- if (FD->isThisDeclarationADefinition()) +- add(FD); +- return true; +- } +- +- bool VisitCXXConstructExpr(CXXConstructExpr *CCE) { +- add(CCE->getConstructor()); +- return true; +- } +- +- bool VisitTemplateSpecializationType(TemplateSpecializationType *TST) { +- // Using templateName case is handled by the override TraverseTemplateName. +- if (TST->getTemplateName().getKind() == TemplateName::UsingTemplate) +- return true; +- add(TST->getAsCXXRecordDecl()); // Specialization +- return true; +- } +- +- // There is no VisitTemplateName in RAV, thus we override the Traverse version +- // to handle the Using TemplateName case. +- bool TraverseTemplateName(TemplateName TN) { +- VisitTemplateName(TN); +- return Base::TraverseTemplateName(TN); +- } +- // A pseudo VisitTemplateName, dispatched by the above TraverseTemplateName! +- bool VisitTemplateName(TemplateName TN) { +- if (const auto *USD = TN.getAsUsingShadowDecl()) { +- add(USD); +- return true; +- } +- add(TN.getAsTemplateDecl()); // Primary template. +- return true; +- } +- +- bool VisitUsingType(UsingType *UT) { +- add(UT->getFoundDecl()); +- return true; +- } +- +- bool VisitTypedefType(TypedefType *TT) { +- add(TT->getDecl()); +- return true; +- } +- +- // Consider types of any subexpression used, even if the type is not named. +- // This is helpful in getFoo().bar(), where Foo must be complete. +- // FIXME(kirillbobyrev): Should we tweak this? It may not be desirable to +- // consider types "used" when they are not directly spelled in code. +- bool VisitExpr(Expr *E) { +- TraverseType(E->getType()); +- return true; +- } +- +- bool TraverseType(QualType T) { +- if (isNew(T.getTypePtrOrNull())) // don't care about quals +- Base::TraverseType(T); +- return true; +- } +- +- bool VisitUsingDecl(UsingDecl *D) { +- for (const auto *Shadow : D->shadows()) +- add(Shadow->getTargetDecl()); +- return true; +- } +- +- // Enums may be usefully forward-declared as *complete* types by specifying +- // an underlying type. In this case, the definition should see the declaration +- // so they can be checked for compatibility. +- bool VisitEnumDecl(EnumDecl *D) { +- if (D->isThisDeclarationADefinition() && D->getIntegerTypeSourceInfo()) +- add(D); +- return true; +- } +- +- // When the overload is not resolved yet, mark all candidates as used. +- bool VisitOverloadExpr(OverloadExpr *E) { +- for (const auto *ResolutionDecl : E->decls()) +- add(ResolutionDecl); +- return true; +- } +- +-private: +- using Base = RecursiveASTVisitor; +- +- void add(const Decl *D) { +- if (!D || !isNew(D->getCanonicalDecl())) +- return; +- if (auto SS = StdRecognizer(D)) { +- Result.Stdlib.insert(*SS); +- return; +- } +- // Special case RecordDecls, as it is common for them to be forward +- // declared multiple times. The most common cases are: +- // - Definition available in TU, only mark that one as usage. The rest is +- // likely to be unnecessary. This might result in false positives when an +- // internal definition is visible. +- // - There's a forward declaration in the main file, no need for other +- // redecls. +- if (const auto *RD = llvm::dyn_cast(D)) { +- if (const auto *Definition = RD->getDefinition()) { +- Result.User.insert(Definition->getLocation()); +- return; +- } +- if (SM.isInMainFile(RD->getMostRecentDecl()->getLocation())) +- return; +- } +- for (const Decl *Redecl : D->redecls()) +- Result.User.insert(Redecl->getLocation()); +- } +- +- bool isNew(const void *P) { return P && Visited.insert(P).second; } +- +- ReferencedLocations &Result; +- llvm::DenseSet Visited; +- const SourceManager &SM; +- tooling::stdlib::Recognizer StdRecognizer; +-}; +- +-// Given a set of referenced FileIDs, determines all the potentially-referenced +-// files and macros by traversing expansion/spelling locations of macro IDs. +-// This is used to map the referenced SourceLocations onto real files. +-struct ReferencedFilesBuilder { +- ReferencedFilesBuilder(const SourceManager &SM) : SM(SM) {} +- llvm::DenseSet Files; +- llvm::DenseSet Macros; +- const SourceManager &SM; +- +- void add(SourceLocation Loc) { add(SM.getFileID(Loc), Loc); } +- +- void add(FileID FID, SourceLocation Loc) { +- if (FID.isInvalid()) +- return; +- assert(SM.isInFileID(Loc, FID)); +- if (Loc.isFileID()) { +- Files.insert(FID); +- return; +- } +- // Don't process the same macro FID twice. +- if (!Macros.insert(FID).second) +- return; +- const auto &Exp = SM.getSLocEntry(FID).getExpansion(); +- add(Exp.getSpellingLoc()); +- add(Exp.getExpansionLocStart()); +- add(Exp.getExpansionLocEnd()); +- } +-}; +- + // Returns the range starting at '#' and ending at EOL. Escaped newlines are not + // handled. + clangd::Range getDiagnosticRange(llvm::StringRef Code, unsigned HashOffset) { +@@ -231,10 +58,10 @@ clangd::Range getDiagnosticRange(llvm::StringRef Code, unsigned HashOffset) { + + // Finds locations of macros referenced from within the main file. That includes + // references that were not yet expanded, e.g `BAR` in `#define FOO BAR`. +-void findReferencedMacros(const SourceManager &SM, Preprocessor &PP, +- const syntax::TokenBuffer *Tokens, +- ReferencedLocations &Result) { ++std::vector ++findReferencedMacros(ParsedAST &AST, include_cleaner::AnalysisContext &Ctx) { + trace::Span Tracer("IncludeCleaner::findReferencedMacros"); ++ std::vector Result; + // FIXME(kirillbobyrev): The macros from the main file are collected in + // ParsedAST's MainFileMacros. However, we can't use it here because it + // doesn't handle macro references that were not expanded, e.g. in macro +@@ -244,15 +71,19 @@ void findReferencedMacros(const SourceManager &SM, Preprocessor &PP, + // this mechanism (as opposed to iterating through all tokens) will improve + // the performance of findReferencedMacros and also improve other features + // relying on MainFileMacros. +- for (const syntax::Token &Tok : Tokens->spelledTokens(SM.getMainFileID())) { +- auto Macro = locateMacroAt(Tok, PP); ++ for (const syntax::Token &Tok : ++ AST.getTokens().spelledTokens(AST.getSourceManager().getMainFileID())) { ++ auto Macro = locateMacroAt(Tok, AST.getPreprocessor()); + if (!Macro) + continue; + auto Loc = Macro->Info->getDefinitionLoc(); + if (Loc.isValid()) +- Result.User.insert(Loc); +- // FIXME: support stdlib macros ++ Result.push_back(include_cleaner::SymbolReference{ ++ Tok.location(), ++ Ctx.macro(AST.getPreprocessor().getIdentifierInfo(Macro->Name), ++ Loc)}); + } ++ return Result; + } + + static bool mayConsiderUnused(const Inclusion &Inc, ParsedAST &AST, +@@ -296,110 +127,8 @@ static bool mayConsiderUnused(const Inclusion &Inc, ParsedAST &AST, + } + return true; + } +- +-// In case symbols are coming from non self-contained header, we need to find +-// its first includer that is self-contained. This is the header users can +-// include, so it will be responsible for bringing the symbols from given +-// header into the scope. +-FileID headerResponsible(FileID ID, const SourceManager &SM, +- const IncludeStructure &Includes) { +- // Unroll the chain of non self-contained headers until we find the one that +- // can be included. +- for (const FileEntry *FE = SM.getFileEntryForID(ID); ID != SM.getMainFileID(); +- FE = SM.getFileEntryForID(ID)) { +- // If FE is nullptr, we consider it to be the responsible header. +- if (!FE) +- break; +- auto HID = Includes.getID(FE); +- assert(HID && "We're iterating over headers already existing in " +- "IncludeStructure"); +- if (Includes.isSelfContained(*HID)) +- break; +- // The header is not self-contained: put the responsibility for its symbols +- // on its includer. +- ID = SM.getFileID(SM.getIncludeLoc(ID)); +- } +- return ID; +-} +- + } // namespace + +-ReferencedLocations findReferencedLocations(ASTContext &Ctx, Preprocessor &PP, +- const syntax::TokenBuffer *Tokens) { +- trace::Span Tracer("IncludeCleaner::findReferencedLocations"); +- ReferencedLocations Result; +- const auto &SM = Ctx.getSourceManager(); +- ReferencedLocationCrawler Crawler(Result, SM); +- Crawler.TraverseAST(Ctx); +- if (Tokens) +- findReferencedMacros(SM, PP, Tokens, Result); +- return Result; +-} +- +-ReferencedLocations findReferencedLocations(ParsedAST &AST) { +- return findReferencedLocations(AST.getASTContext(), AST.getPreprocessor(), +- &AST.getTokens()); +-} +- +-ReferencedFiles findReferencedFiles( +- const ReferencedLocations &Locs, const SourceManager &SM, +- llvm::function_ref HeaderResponsible, +- llvm::function_ref(FileID)> UmbrellaHeader) { +- std::vector Sorted{Locs.User.begin(), Locs.User.end()}; +- llvm::sort(Sorted); // Group by FileID. +- ReferencedFilesBuilder Builder(SM); +- for (auto It = Sorted.begin(); It < Sorted.end();) { +- FileID FID = SM.getFileID(*It); +- Builder.add(FID, *It); +- // Cheaply skip over all the other locations from the same FileID. +- // This avoids lots of redundant Loc->File lookups for the same file. +- do +- ++It; +- while (It != Sorted.end() && SM.isInFileID(*It, FID)); +- } +- +- // If a header is not self-contained, we consider its symbols a logical part +- // of the including file. Therefore, mark the parents of all used +- // non-self-contained FileIDs as used. Perform this on FileIDs rather than +- // HeaderIDs, as each inclusion of a non-self-contained file is distinct. +- llvm::DenseSet UserFiles; +- llvm::StringSet<> PublicHeaders; +- for (FileID ID : Builder.Files) { +- UserFiles.insert(HeaderResponsible(ID)); +- if (auto PublicHeader = UmbrellaHeader(ID)) { +- PublicHeaders.insert(*PublicHeader); +- } +- } +- +- llvm::DenseSet StdlibFiles; +- for (const auto &Symbol : Locs.Stdlib) +- for (const auto &Header : Symbol.headers()) +- StdlibFiles.insert(Header); +- +- return {std::move(UserFiles), std::move(StdlibFiles), +- std::move(PublicHeaders)}; +-} +- +-ReferencedFiles findReferencedFiles(const ReferencedLocations &Locs, +- const IncludeStructure &Includes, +- const CanonicalIncludes &CanonIncludes, +- const SourceManager &SM) { +- return findReferencedFiles( +- Locs, SM, +- [&SM, &Includes](FileID ID) { +- return headerResponsible(ID, SM, Includes); +- }, +- [&SM, &CanonIncludes](FileID ID) -> Optional { +- auto Entry = SM.getFileEntryRefForID(ID); +- if (!Entry) +- return llvm::None; +- auto PublicHeader = CanonIncludes.mapHeader(*Entry); +- if (PublicHeader.empty()) +- return llvm::None; +- return PublicHeader; +- }); +-} +- + std::vector + getUnused(ParsedAST &AST, + const llvm::DenseSet &ReferencedFiles, +@@ -426,51 +155,50 @@ getUnused(ParsedAST &AST, + return Unused; + } + +-#ifndef NDEBUG +-// Is FID a , etc? +-static bool isSpecialBuffer(FileID FID, const SourceManager &SM) { +- const SrcMgr::FileInfo &FI = SM.getSLocEntry(FID).getFile(); +- return FI.getName().startswith("<"); +-} +-#endif +- +-llvm::DenseSet +-translateToHeaderIDs(const ReferencedFiles &Files, +- const IncludeStructure &Includes, +- const SourceManager &SM) { +- trace::Span Tracer("IncludeCleaner::translateToHeaderIDs"); +- llvm::DenseSet TranslatedHeaderIDs; +- TranslatedHeaderIDs.reserve(Files.User.size()); +- for (FileID FID : Files.User) { +- const FileEntry *FE = SM.getFileEntryForID(FID); +- if (!FE) { +- assert(isSpecialBuffer(FID, SM)); +- continue; +- } +- const auto File = Includes.getID(FE); +- assert(File); +- TranslatedHeaderIDs.insert(*File); +- } +- for (tooling::stdlib::Header StdlibUsed : Files.Stdlib) +- for (auto HID : Includes.StdlibHeaders.lookup(StdlibUsed)) +- TranslatedHeaderIDs.insert(HID); +- return TranslatedHeaderIDs; ++bool match(const include_cleaner::Header &H, const Inclusion &I, ++ const IncludeStructure &S) { ++ switch (H.kind()) { ++ case include_cleaner::Header::Physical: ++ if (auto HID = S.getID(H.getPhysical())) ++ if (static_cast(*HID) == I.HeaderID) ++ return true; ++ break; ++ case include_cleaner::Header::StandardLibrary: ++ return I.Written == H.getStandardLibrary().name(); ++ case include_cleaner::Header::Verbatim: ++ return llvm::StringRef(I.Written).trim("\"<>") == H.getVerbatimSpelling(); ++ case include_cleaner::Header::Builtin: ++ case include_cleaner::Header::MainFile: ++ break; ++ } ++ return false; + } + + std::vector computeUnusedIncludes(ParsedAST &AST) { +- const auto &SM = AST.getSourceManager(); +- +- auto Refs = findReferencedLocations(AST); +- auto ReferencedFiles = +- findReferencedFiles(Refs, AST.getIncludeStructure(), +- AST.getCanonicalIncludes(), AST.getSourceManager()); +- auto ReferencedHeaders = +- translateToHeaderIDs(ReferencedFiles, AST.getIncludeStructure(), SM); +- return getUnused(AST, ReferencedHeaders, ReferencedFiles.SpelledUmbrellas); ++ include_cleaner::AnalysisContext Ctx(include_cleaner::Policy{}, ++ AST.getPreprocessor()); ++ llvm::DenseSet Used; ++ include_cleaner::walkUsed( ++ Ctx, AST.getLocalTopLevelDecls(), ++ /*MacroRefs=*/findReferencedMacros(AST, Ctx), ++ [&](SourceLocation Loc, include_cleaner::Symbol Sym, ++ llvm::ArrayRef Headers) { ++ for (const auto &I : AST.getIncludeStructure().MainFileIncludes) ++ for (const auto &H : Headers) ++ if (match(H, I, AST.getIncludeStructure())) ++ Used.insert(&I); ++ }); ++ std::vector Unused; ++ const Config &Cfg = Config::current(); ++ for (const auto &I : AST.getIncludeStructure().MainFileIncludes) { ++ if (!Used.contains(&I) && mayConsiderUnused(I, AST, Cfg)) ++ Unused.push_back(&I); ++ } ++ return Unused; + } + +-std::vector issueUnusedIncludesDiagnostics(ParsedAST &AST, +- llvm::StringRef Code) { ++auto issueUnusedIncludesDiagnostics(ParsedAST &AST, ++ llvm::StringRef Code) -> std::vector { + const Config &Cfg = Config::current(); + if (Cfg.Diagnostics.UnusedIncludes != Config::UnusedIncludesPolicy::Strict || + Cfg.Diagnostics.SuppressAll || +diff --git a/clang-tools-extra/clangd/IncludeCleaner.h b/clang-tools-extra/clangd/IncludeCleaner.h +index 4ce31baaa067..c858a60c5db7 100644 +--- a/clang-tools-extra/clangd/IncludeCleaner.h ++++ b/clang-tools-extra/clangd/IncludeCleaner.h +@@ -23,6 +23,7 @@ + #include "index/CanonicalIncludes.h" + #include "clang/Basic/SourceLocation.h" + #include "clang/Tooling/Inclusions/StandardLibrary.h" ++#include "clang-include-cleaner/Types.h" + #include "llvm/ADT/DenseSet.h" + #include "llvm/ADT/STLFunctionalExtras.h" + #include "llvm/ADT/StringSet.h" +@@ -100,6 +101,10 @@ std::vector computeUnusedIncludes(ParsedAST &AST); + std::vector issueUnusedIncludesDiagnostics(ParsedAST &AST, + llvm::StringRef Code); + ++// Does an include-cleaner header spec match a clangd recorded inclusion? ++bool match(const include_cleaner::Header &H, const Inclusion &I, ++ const IncludeStructure &S); ++ + /// Affects whether standard library includes should be considered for + /// removal. This is off by default for now due to implementation limitations: + /// - macros are not tracked +diff --git a/clang-tools-extra/include-cleaner/CMakeLists.txt b/clang-tools-extra/include-cleaner/CMakeLists.txt +index 0550b02f603b..325186879a47 100644 +--- a/clang-tools-extra/include-cleaner/CMakeLists.txt ++++ b/clang-tools-extra/include-cleaner/CMakeLists.txt +@@ -1,4 +1,8 @@ ++include_directories(include) ++include_directories(${CMAKE_CURRENT_BINARY_DIR}/include) + add_subdirectory(lib) ++add_subdirectory(tool) ++ + if(CLANG_INCLUDE_TESTS) + add_subdirectory(test) + add_subdirectory(unittests) +diff --git a/clang-tools-extra/include-cleaner/README.md b/clang-tools-extra/include-cleaner/README.md +deleted file mode 100644 +index e69de29bb2d1..000000000000 +diff --git a/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Analysis.h b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Analysis.h +new file mode 100644 +index 000000000000..4e5cc8d03814 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Analysis.h +@@ -0,0 +1,77 @@ ++//===--- Analysis.h - Analyze used files --------------------------- C++-*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#ifndef CLANG_INCLUDE_CLEANER_ANALYSIS_H ++#define CLANG_INCLUDE_CLEANER_ANALYSIS_H ++ ++#include "clang-include-cleaner/Policy.h" ++#include "clang-include-cleaner/Types.h" ++ ++namespace clang { ++namespace include_cleaner { ++class Cache; ++ ++// Bundles the policy, compiler state, and caches for one include-cleaner run. ++// (This is needed everywhere, but shouldn't be used to propagate state around!) ++class AnalysisContext { ++public: ++ AnalysisContext(const Policy &, const Preprocessor &); ++ AnalysisContext(AnalysisContext &&) = delete; ++ AnalysisContext &operator=(AnalysisContext &&) = delete; ++ ~AnalysisContext(); ++ ++ const Policy &policy() const { return P; } ++ ++ const SourceManager &sourceManager() const { return *SM; } ++ const Preprocessor &preprocessor() const { return *PP; } ++ ++ // Only for internal use (the Cache class definition is not exposed). ++ // This allows us to reuse e.g. mappings from symbols to their locations. ++ Cache &cache() { return *C; } ++ // FIXME: does this need to be public? ++ Symbol macro(const IdentifierInfo *, SourceLocation); ++ ++private: ++ Policy P; ++ const SourceManager *SM; ++ const Preprocessor *PP; ++ std::unique_ptr C; ++}; ++ ++// A UsedSymbolVisitor is a callback invoked for each symbol reference seen. ++// ++// References occur at a particular location, refer to a single symbol, and ++// that symbol may be provided by any of several headers. ++// ++// The first element of ProvidedBy is the *preferred* header, e.g. to insert. ++using UsedSymbolVisitor = ++ llvm::function_ref ProvidedBy)>; ++ ++// Find and report all references to symbols in a region of code. ++// ++// The AST traversal is rooted at ASTRoots - typically top-level declarations ++// of a single source file. MacroRefs are additional recorded references to ++// macros, which do not appear in the AST. ++// ++// This is the main entrypoint of the include-cleaner library, and can be used: ++// - to diagnose missing includes: a referenced symbol is provided by ++// headers which don't match any #include in the main file ++// - to diagnose unused includes: an #include in the main file does not match ++// the headers for any referenced symbol ++// ++// Mapping between Header and #include directives is not provided here, but see ++// RecordedPP::Includes::match() in Hooks.h. ++void walkUsed(AnalysisContext &, llvm::ArrayRef ASTRoots, ++ llvm::ArrayRef MacroRefs, ++ UsedSymbolVisitor Callback); ++ ++} // namespace include_cleaner ++} // namespace clang ++ ++#endif +diff --git a/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Hooks.h b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Hooks.h +new file mode 100644 +index 000000000000..39e11653b210 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Hooks.h +@@ -0,0 +1,87 @@ ++//===--- Hooks.h - Record compiler events -------------------------- C++-*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++// ++// Where Analysis.h analyzes AST nodes and recorded preprocessor events, this ++// file defines ways to capture AST and preprocessor information from a parse. ++// ++// These are the simplest way to connect include-cleaner logic to the parser, ++// but other ways are possible (for example clangd records includes separately). ++// ++//===----------------------------------------------------------------------===// ++ ++#ifndef CLANG_INCLUDE_CLEANER_HOOKS_H ++#define CLANG_INCLUDE_CLEANER_HOOKS_H ++ ++#include "Analysis.h" ++#include "Types.h" ++#include "clang/Basic/FileEntry.h" ++#include "clang/Basic/SourceLocation.h" ++#include "llvm/ADT/DenseMap.h" ++#include "llvm/ADT/SmallVector.h" ++#include "llvm/ADT/StringMap.h" ++#include "llvm/ADT/StringRef.h" ++#include ++ ++namespace clang { ++class FileEntry; ++class PPCallbacks; ++namespace include_cleaner { ++class PPRecorder; ++ ++// Contains recorded preprocessor events relevant to include-cleaner. ++struct RecordedPP { ++ // The callback (when installed into clang) tracks macros/includes in this. ++ std::unique_ptr record(AnalysisContext &Ctx); ++ // FIXME: probably also want a comment handler to capture IWYU pragmas. ++ ++ // Describes where macros were used from the main file. ++ std::vector MacroReferences; ++ ++ // A single #include directive from the main file. ++ struct Include { ++ llvm::StringRef Spelled; // e.g. vector ++ const FileEntry *Resolved; // e.g. /path/to/c++/v1/vector ++ SourceLocation Location; // of hash in #include ++ unsigned Line; // 1-based line number for #include ++ }; ++ // The set of includes recorded from the main file. ++ class RecordedIncludes { ++ public: ++ // All #includes seen, in the order they appear. ++ llvm::ArrayRef all() const { return All; } ++ // Determine #includes that match a header (that provides a used symbol). ++ // ++ // Matching is based on the type of Header specified: ++ // - for a physical file like /path/to/foo.h, we check Resolved ++ // - for a logical file like , we check Spelled ++ llvm::SmallVector match(Header H) const; ++ ++ private: ++ std::vector All; ++ llvm::StringMap> BySpelling; ++ llvm::DenseMap> ByFile; ++ friend PPRecorder; ++ } Includes; ++}; ++ ++// Contains recorded parser events relevant to include-cleaner. ++struct RecordedAST { ++ // The consumer (when installed into clang) tracks declarations in this. ++ std::unique_ptr record(AnalysisContext &Ctx); ++ ++ // The set of declarations written at file scope inside the main file. ++ // ++ // These are the roots of the subtrees that should be traversed to find uses. ++ // (Traversing the TranslationUnitDecl would find uses inside headers!) ++ std::vector TopLevelDecls; ++}; ++ ++} // namespace include_cleaner ++} // namespace clang ++ ++#endif +diff --git a/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Policy.h b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Policy.h +new file mode 100644 +index 000000000000..142887b85529 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Policy.h +@@ -0,0 +1,35 @@ ++//===--- Policy.h - Tuning what is considered used ----------------- C++-*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#ifndef CLANG_INCLUDE_CLEANER_POLICY_H ++#define CLANG_INCLUDE_CLEANER_POLICY_H ++ ++namespace clang { ++namespace include_cleaner { ++ ++// Provides some fine-tuning of include-cleaner's choices about what is used. ++// ++// Changing the policy serves two purposes: ++// - marking more things used reduces the false-positives for "unused include", ++// while marking fewer things improves "missing include" in the same way. ++// - different coding styles may make different decisions about which includes ++// are required. ++struct Policy { ++ // Does construction count as use of the type, when the type is not named? ++ // e.g. printVector({x, y, z}); - is std::vector used? ++ bool Construction = false; ++ // Is member access tracked as a reference? ++ bool Members = false; ++ // Are operator calls tracked as references? ++ bool Operators = false; ++}; ++ ++} // namespace include_cleaner ++} // namespace clang ++ ++#endif +diff --git a/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Types.h b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Types.h +new file mode 100644 +index 000000000000..2a91473b926e +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Types.h +@@ -0,0 +1,219 @@ ++//===--- Types.h - Data structures for used-symbol analysis -------- C++-*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++// ++// Find referenced files is mostly a matter of translating: ++// AST Node => declaration => source location => file ++// ++// clang has types for these (DynTypedNode, Decl, SourceLocation, FileID), but ++// there are special cases: macros are not declarations, the concrete file where ++// a standard library symbol was defined doesn't matter, etc. ++// ++// We define some slightly more abstract sum types to handle these cases while ++// keeping the API clean. For example, Symbol is Decl+DefinedMacro. ++// ++//===----------------------------------------------------------------------===// ++ ++#ifndef CLANG_INCLUDE_CLEANER_TYPES_H ++#define CLANG_INCLUDE_CLEANER_TYPES_H ++ ++#include "clang/AST/DeclBase.h" ++#include "clang/Tooling/Inclusions/StandardLibrary.h" ++#include "llvm/ADT/BitmaskEnum.h" ++#include "llvm/ADT/PointerSumType.h" ++ ++namespace clang { ++class IdentifierInfo; ++class MacroDirective; ++namespace include_cleaner { ++ ++// Identifies a macro, along with a particular definition of it. ++// We generally consider redefined macros to be different symbols. ++struct DefinedMacro { ++ const IdentifierInfo *Name; ++ const SourceLocation Definition; ++}; ++ ++// A Symbol is an entity that can be referenced. ++// It is either a declaration (NamedDecl) or a macro (DefinedMacro). ++class Symbol { ++public: ++ enum Kind { ++ Macro, ++ Declaration, ++ }; ++ Symbol(NamedDecl *ND) : Target(ND) {} ++ Symbol(const DefinedMacro *M) : Target(M) {} ++ ++ std::string name() const; ++ std::string nodeName() const; ++ Kind kind() const { return Target.is() ? Declaration : Macro; } ++ ++ NamedDecl *getDeclaration() const { return Target.get(); } ++ const DefinedMacro *getMacro() const { ++ return Target.get(); ++ } ++ ++private: ++ llvm::PointerUnion Target; ++}; ++ ++// A usage of a Symbol seen in our source code. ++struct SymbolReference { ++ // The point in the code where the reference occurred. ++ // We could track the DynTypedNode we found it in if it's important. ++ SourceLocation Location; ++ Symbol Target; ++}; ++ ++// A Location is a place where a symbol can be provided. ++// It is either a physical part of the TU (SourceLocation) or a logical location ++// in the standard library (stdlib::Symbol). ++class Location { ++public: ++ enum Kind : uint8_t { ++ Physical, ++ StandardLibrary, ++ }; ++ ++ Location(SourceLocation S) : K(Physical), SrcLoc(S) {} ++ Location(tooling::stdlib::Symbol S) : K(StandardLibrary), StdlibSym(S) {} ++ ++ std::string name(const SourceManager &SM) const; ++ Kind kind() const { return K; } ++ ++ SourceLocation getPhysical() const { ++ assert(kind() == Physical); ++ return SrcLoc; ++ }; ++ tooling::stdlib::Symbol getStandardLibrary() const { ++ assert(kind() == StandardLibrary); ++ return StdlibSym; ++ }; ++ ++private: ++ Kind K; ++ union { ++ SourceLocation SrcLoc; ++ tooling::stdlib::Symbol StdlibSym; ++ }; ++}; ++ ++// A Header is an includable file that can provide access to Locations. ++// It is either a physical file (FileEntry), a logical location in the standard ++// library (stdlib::Header), or a verbatim header spelling (StringRef). ++class Header { ++public: ++ enum Kind : uint8_t { ++ Physical, ++ StandardLibrary, ++ Verbatim, ++ Builtin, ++ MainFile, ++ }; ++ ++ Header(const FileEntry *FE) : K(Physical), PhysicalFile(FE) {} ++ Header(tooling::stdlib::Header H) : K(StandardLibrary), StdlibHeader(H) {} ++ Header(const char *V) : K(Verbatim), VerbatimSpelling(V) {} ++ static Header builtin() { return Header{Builtin}; }; ++ static Header mainFile() { return Header{MainFile}; }; ++ ++ std::string name() const; ++ Kind kind() const { return K; } ++ ++ const FileEntry *getPhysical() const { ++ assert(kind() == Physical); ++ return PhysicalFile; ++ }; ++ tooling::stdlib::Header getStandardLibrary() const { ++ assert(kind() == StandardLibrary); ++ return StdlibHeader; ++ }; ++ llvm::StringRef getVerbatimSpelling() const { ++ assert(kind() == Verbatim); ++ return VerbatimSpelling; ++ }; ++ ++private: ++ Header(Kind K) : K(K) {} ++ ++ Kind K; ++ union { ++ const FileEntry *PhysicalFile; ++ tooling::stdlib::Header StdlibHeader; ++ const char *VerbatimSpelling; ++ }; ++ ++ friend bool operator==(const Header &L, const Header &R) { ++ if (L.kind() != R.kind()) ++ return false; ++ switch (L.kind()) { ++ case Physical: ++ return L.getPhysical() == R.getPhysical(); ++ case StandardLibrary: ++ return L.getStandardLibrary() == R.getStandardLibrary(); ++ case Verbatim: ++ return L.getVerbatimSpelling() == R.getVerbatimSpelling(); ++ case Builtin: ++ case MainFile: ++ return true; // no payload ++ } ++ llvm_unreachable("unhandled Header kind"); ++ } ++ ++ friend bool operator<(const Header &L, const Header &R) { ++ if (L.kind() != R.kind()) ++ return L.kind() < R.kind(); ++ switch (L.kind()) { ++ case Physical: ++ return L.getPhysical() == R.getPhysical(); ++ case StandardLibrary: ++ return L.getStandardLibrary() < R.getStandardLibrary(); ++ case Verbatim: ++ return L.getVerbatimSpelling() < R.getVerbatimSpelling(); ++ case Builtin: ++ case MainFile: ++ return false; // no payload ++ } ++ llvm_unreachable("unhandled Header kind"); ++ } ++ ++ friend llvm::hash_code hash_value(const Header &H) { ++ switch (H.K) { ++ case Header::Physical: ++ return llvm::hash_combine(H.K, H.getPhysical()); ++ case Header::StandardLibrary: ++ // FIXME: make StdlibHeader hashable instead. ++ return llvm::hash_combine(H.K, H.getStandardLibrary().name()); ++ case Header::Verbatim: ++ return llvm::hash_combine(H.K, llvm::StringRef(H.VerbatimSpelling)); ++ case Header::Builtin: ++ case Header::MainFile: ++ return llvm::hash_value(H.K); ++ } ++ } ++}; ++ ++template struct DefaultDenseMapInfo { ++ static T isEqual(const T &L, const T &R) { return L == R; } ++ static unsigned getHashValue(const T &V) { return hash_value(V); } ++}; ++ ++} // namespace include_cleaner ++} // namespace clang ++ ++namespace llvm { ++template <> struct DenseMapInfo { ++ using Header = clang::include_cleaner::Header; ++ static Header getTombstoneKey() { return Header("__tombstone__"); } ++ static Header getEmptyKey() { return Header("__empty__"); } ++ static bool isEqual(const Header &L, const Header &R) { return L == R; } ++ static unsigned getHashValue(const Header &V) { return hash_value(V); } ++}; ++} // namespace llvm ++ ++#endif +diff --git a/clang-tools-extra/include-cleaner/lib/Analysis.cpp b/clang-tools-extra/include-cleaner/lib/Analysis.cpp +new file mode 100644 +index 000000000000..5ac0008b07e8 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/lib/Analysis.cpp +@@ -0,0 +1,101 @@ ++//===--- Analysis.cpp - Analyze used files --------------------------------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "clang-include-cleaner/Analysis.h" ++#include "AnalysisInternal.h" ++#include "clang/Lex/Preprocessor.h" ++ ++namespace clang { ++namespace include_cleaner { ++ ++AnalysisContext::AnalysisContext(const Policy &P, const Preprocessor &PP) ++ : P(P), SM(&PP.getSourceManager()), PP(&PP), C(std::make_unique()) {} ++AnalysisContext::~AnalysisContext() = default; ++ ++static bool prefer(AnalysisContext &Ctx, Hint L, Hint R) { ++ return std::make_tuple(bool(L & Hint::NameMatch), bool(L & Hint::Complete)) > ++ std::make_tuple(bool(R & Hint::NameMatch), bool(R & Hint::Complete)); ++} ++ ++// Is this hint actually useful? ++static void addNameMatchHint(const IdentifierInfo *II, ++ llvm::SmallVector> &H) { ++ if (!II) ++ return; ++ for (auto &HH : H) ++ if (HH->kind() == Header::Physical && ++ II->getName().equals_insensitive(HH->getPhysical()->getName())) ++ HH.Hint |= Hint::NameMatch; ++} ++ ++static llvm::SmallVector
++rank(AnalysisContext &Ctx, llvm::SmallVector> &Candidates) { ++ // Sort by Header, so we can deduplicate (and combine flags). ++ llvm::stable_sort(Candidates, ++ [&](const Hinted
&L, const Hinted
&R) { ++ return *L < *R; ++ }); ++ // Like unique(), but merge hints. ++ auto *Write = Candidates.begin(); ++ for (auto *Read = Candidates.begin(); Read != Candidates.end(); ++Write) { ++ *Write = *Read; ++ for (++Read; Read != Candidates.end() && Read->Value == Write->Value; ++ ++Read) ++ Write->Hint |= Read->Hint; ++ } ++ Candidates.erase(Write, Candidates.end()); ++ // Now sort by hints. ++ llvm::stable_sort(Candidates, ++ [&](const Hinted
&L, const Hinted
&R) { ++ return prefer(Ctx, L.Hint, R.Hint); ++ }); ++ // Drop hints to return clean result list. ++ llvm::SmallVector
Result; ++ for (const auto &H : Candidates) ++ Result.push_back(*H); ++ return Result; ++} ++ ++template void addHint(Hint H, T &Items) { ++ for (auto &Item : Items) ++ Item.Hint |= H; ++} ++ ++void walkUsed(AnalysisContext &Ctx, llvm::ArrayRef ASTRoots, ++ llvm::ArrayRef MacroRefs, ++ UsedSymbolVisitor Callback) { ++ for (Decl *Root : ASTRoots) { ++ walkAST(Ctx, *Root, [&](SourceLocation RefLoc, Hinted ND) { ++ auto Locations = locateDecl(Ctx, *ND); ++ llvm::SmallVector> Headers; ++ for (const auto &Loc : Locations) { ++ auto LocHeaders = includableHeader(Ctx, *Loc); ++ addHint(Loc.Hint, LocHeaders); ++ Headers.append(std::move(LocHeaders)); ++ } ++ addHint(ND.Hint, Headers); ++ addNameMatchHint(ND.Value.getDeclName().getAsIdentifierInfo(), Headers); ++ Callback(RefLoc, &ND.Value, rank(Ctx, Headers)); ++ }); ++ } ++ for (const SymbolReference &MacroRef : MacroRefs) { ++ assert(MacroRef.Target.kind() == Symbol::Macro); ++ auto Loc = locateMacro(Ctx, *MacroRef.Target.getMacro()); ++ auto Headers = includableHeader(Ctx, *Loc); ++ addHint(Loc.Hint, Headers); ++ addNameMatchHint(MacroRef.Target.getMacro()->Name, Headers); ++ Callback(MacroRef.Location, MacroRef.Target, rank(Ctx, Headers)); ++ } ++} ++ ++Symbol AnalysisContext::macro(const IdentifierInfo *II, SourceLocation Loc) { ++ return cache().macro(II, Loc); ++} ++ ++} // namespace include_cleaner ++} // namespace clang +diff --git a/clang-tools-extra/include-cleaner/lib/AnalysisInternal.h b/clang-tools-extra/include-cleaner/lib/AnalysisInternal.h +index 8b0c73fe7997..31b1ad8039d8 100644 +--- a/clang-tools-extra/include-cleaner/lib/AnalysisInternal.h ++++ b/clang-tools-extra/include-cleaner/lib/AnalysisInternal.h +@@ -21,6 +21,95 @@ + #ifndef CLANG_INCLUDE_CLEANER_ANALYSISINTERNAL_H + #define CLANG_INCLUDE_CLEANER_ANALYSISINTERNAL_H + ++#include "clang-include-cleaner/Analysis.h" ++#include "clang-include-cleaner/Types.h" ++#include "clang/Tooling/Inclusions/StandardLibrary.h" ++ ++namespace clang { ++namespace include_cleaner { ++ ++// FIXME: Right now we cache nothing, this is just used as an arena for macros. ++// Verify we're burning time in repeated analysis and cache partial operations. ++class Cache { ++public: ++ Symbol macro(const IdentifierInfo *Name, const SourceLocation Def) { ++ auto &DMS = DefinedMacros[Name->getName()]; ++ // Linear search. We probably only saw ~1 definition of each macro name. ++ for (const DefinedMacro &DM : DMS) ++ if (DM.Definition == Def) ++ return &DM; ++ DMS.push_back(DefinedMacro{Name, Def}); ++ return &DMS.back(); ++ } ++ ++ tooling::stdlib::Recognizer StdlibRecognizer; ++ ++private: ++ llvm::StringMap> DefinedMacros; ++}; ++ ++enum class Hint : uint16_t { ++ None = 0, ++ Complete = 1, // Provides a complete definition that is often needed. ++ // e.g. classes, templates. ++ NameMatch = 1, // Header name matches the symbol name. ++ LLVM_MARK_AS_BITMASK_ENUM(Hint::Complete) ++}; ++LLVM_ENABLE_BITMASK_ENUMS_IN_NAMESPACE(); ++ ++template struct Hinted { ++ Hinted(T Value, Hint H = Hint::None) : Value(Value), Hint(H) {} ++ T Value; ++ include_cleaner::Hint Hint; ++ ++ T &operator*() { return Value; } ++ const T &operator*() const { return Value; } ++ std::remove_reference_t *operator->() { return &Value; } ++ const std::remove_reference_t *operator->() const { return &Value; } ++}; ++ ++// Traverses a subtree of the AST, reporting declarations referenced. ++void walkAST(AnalysisContext &, Decl &Root, ++ llvm::function_ref)>); ++ ++// Finds the locations where a declaration is provided. ++llvm::SmallVector> locateDecl(AnalysisContext &, ++ const NamedDecl &); ++ ++// Finds the locations where a macro is provided. ++Hinted locateMacro(AnalysisContext &, const DefinedMacro &); ++ ++// Finds the headers that provide a location. ++llvm::SmallVector> includableHeader(AnalysisContext &, ++ const Location &); ++ ++} // namespace include_cleaner ++} // namespace clang ++ ++#endif ++//===--- AnalysisInternal.h - Analysis building blocks ------------- C++-*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++// ++// This file provides smaller, testable pieces of the used-header analysis. ++// We find the headers by chaining together several mappings. ++// ++// AST => AST node => Symbol => Location => Header ++// / ++// Macro expansion => ++// ++// The individual steps are declared here. ++// (AST => AST Node => Symbol is one API to avoid materializing DynTypedNodes). ++// ++//===----------------------------------------------------------------------===// ++ ++#ifndef CLANG_INCLUDE_CLEANER_ANALYSISINTERNAL_H ++#define CLANG_INCLUDE_CLEANER_ANALYSISINTERNAL_H ++ + #include "clang/Basic/SourceLocation.h" + #include "llvm/ADT/STLFunctionalExtras.h" + +diff --git a/clang-tools-extra/include-cleaner/lib/CMakeLists.txt b/clang-tools-extra/include-cleaner/lib/CMakeLists.txt +index 5e2807332f94..25d66b4f30df 100644 +--- a/clang-tools-extra/include-cleaner/lib/CMakeLists.txt ++++ b/clang-tools-extra/include-cleaner/lib/CMakeLists.txt +@@ -1,10 +1,15 @@ + set(LLVM_LINK_COMPONENTS Support) + + add_clang_library(clangIncludeCleaner ++ Analysis.cpp ++ Headers.cpp ++ Hooks.cpp ++ Locations.cpp ++ Types.cpp + WalkAST.cpp + + LINK_LIBS + clangBasic ++ clangLex + clangAST + ) +- +diff --git a/clang-tools-extra/include-cleaner/lib/Headers.cpp b/clang-tools-extra/include-cleaner/lib/Headers.cpp +new file mode 100644 +index 000000000000..f41bbe4c59c8 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/lib/Headers.cpp +@@ -0,0 +1,46 @@ ++//===--- Headers.cpp - Find headers that provide locations ----------------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "AnalysisInternal.h" ++#include "clang/Basic/SourceManager.h" ++#include "clang/Lex/Preprocessor.h" ++ ++namespace clang { ++namespace include_cleaner { ++ ++llvm::SmallVector> includableHeader(AnalysisContext &Ctx, ++ const Location &Loc) { ++ switch (Loc.kind()) { ++ case Location::Physical: { ++ FileID FID = Ctx.sourceManager().getFileID( ++ Ctx.sourceManager().getExpansionLoc(Loc.getPhysical())); ++ if (FID == Ctx.sourceManager().getMainFileID()) ++ return {Header::mainFile()}; ++ if (FID == Ctx.preprocessor().getPredefinesFileID()) ++ return {Header::builtin()}; ++ // FIXME: if the file is not self-contained, find its umbrella header: ++ // - files that lack header guards (e.g. *.def) ++ // - IWYU private pragmas (and maybe export?) ++ // - #pragma clang include_instead ++ // - headers containing "#error ... include" clangd isDontIncludeMeHeader ++ // - apple framework header layout ++ if (auto *FE = Ctx.sourceManager().getFileEntryForID(FID)) ++ return {{FE}}; ++ return {}; ++ } ++ case Location::StandardLibrary: ++ // FIXME: some symbols are provided by multiple stdlib headers: ++ // - for historical reasons, like size_t ++ // - some headers are guaranteed to include others () ++ // - ::printf is de facto provided by cstdio and stdio.h, etc ++ return {{Loc.getStandardLibrary().header()}}; ++ } ++} ++ ++} // namespace include_cleaner ++} // namespace clang +diff --git a/clang-tools-extra/include-cleaner/lib/Hooks.cpp b/clang-tools-extra/include-cleaner/lib/Hooks.cpp +new file mode 100644 +index 000000000000..decb83110c65 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/lib/Hooks.cpp +@@ -0,0 +1,166 @@ ++//===--- Hooks.cpp - Record events from the compiler --------------- C++-*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "clang-include-cleaner/Hooks.h" ++#include "AnalysisInternal.h" ++#include "clang-include-cleaner/Analysis.h" ++#include "clang/AST/ASTConsumer.h" ++#include "clang/AST/DeclCXX.h" ++#include "clang/AST/DeclGroup.h" ++#include "clang/AST/DeclObjC.h" ++#include "clang/Lex/MacroInfo.h" ++#include "clang/Lex/PPCallbacks.h" ++#include "clang/Lex/Preprocessor.h" ++#include "clang/Lex/Token.h" ++ ++namespace clang { ++namespace include_cleaner { ++ ++class PPRecorder : public PPCallbacks { ++public: ++ PPRecorder(AnalysisContext &Ctx, RecordedPP &Recorded) ++ : Ctx(Ctx), Recorded(Recorded) {} ++ ++ virtual void FileChanged(SourceLocation Loc, FileChangeReason Reason, ++ SrcMgr::CharacteristicKind FileType, ++ FileID PrevFID) override { ++ Active = Ctx.sourceManager().isWrittenInMainFile(Loc); ++ } ++ ++ void InclusionDirective(SourceLocation Hash, const Token &IncludeTok, ++ StringRef SpelledFilename, bool IsAngled, ++ CharSourceRange FilenameRange, Optional File, ++ StringRef SearchPath, StringRef RelativePath, ++ const Module *, SrcMgr::CharacteristicKind) override { ++ if (!Active) ++ return; ++ ++ unsigned Index = Recorded.Includes.All.size(); ++ Recorded.Includes.All.emplace_back(); ++ RecordedPP::Include &I = Recorded.Includes.All.back(); ++ const auto *const RawFile = &(*File).getFileEntry(); ++ I.Location = Hash; ++ I.Resolved = RawFile; ++ I.Line = Ctx.sourceManager().getSpellingLineNumber(Hash); ++ auto BySpellingIt = ++ Recorded.Includes.BySpelling.try_emplace(SpelledFilename).first; ++ I.Spelled = BySpellingIt->first(); ++ ++ BySpellingIt->second.push_back(Index); ++ Recorded.Includes.ByFile[RawFile].push_back(Index); ++ } ++ ++ void MacroExpands(const Token &MacroName, const MacroDefinition &MD, ++ SourceRange Range, const MacroArgs *Args) override { ++ if (!Active) ++ return; ++ recordMacroRef(MacroName, *MD.getMacroInfo()); ++ } ++ ++ void MacroDefined(const Token &MacroName, const MacroDirective *MD) override { ++ if (!Active) ++ return; ++ ++ const auto *MI = MD->getMacroInfo(); ++ // The tokens of a macro definition could refer to a macro. ++ // Formally this reference isn't resolved until this macro is expanded, ++ // but we want to treat it as a reference anyway. ++ for (const auto &Tok : MI->tokens()) { ++ auto *II = Tok.getIdentifierInfo(); ++ // Could this token be a reference to a macro? (Not param to this macro). ++ if (!II || !II->hadMacroDefinition() || ++ llvm::is_contained(MI->params(), II)) ++ continue; ++ if (const MacroInfo *MI = Ctx.preprocessor().getMacroInfo(II)) ++ recordMacroRef(Tok, *MI); ++ } ++ } ++ ++private: ++ void recordMacroRef(const Token &Tok, const MacroInfo &MI) { ++ if (MI.isBuiltinMacro()) ++ return; // __FILE__ is not a reference. ++ Recorded.MacroReferences.push_back(SymbolReference{ ++ Tok.getLocation(), ++ Ctx.cache().macro(Tok.getIdentifierInfo(), MI.getDefinitionLoc())}); ++ } ++ ++ bool Active = false; ++ AnalysisContext &Ctx; ++ RecordedPP &Recorded; ++}; ++ ++llvm::SmallVector ++RecordedPP::RecordedIncludes::match(Header H) const { ++ llvm::SmallVector Result; ++ switch (H.kind()) { ++ case Header::Physical: ++ for (unsigned I : ByFile.lookup(H.getPhysical())) ++ Result.push_back(&All[I]); ++ break; ++ case Header::StandardLibrary: ++ for (unsigned I : ++ BySpelling.lookup(H.getStandardLibrary().name().trim("<>"))) ++ Result.push_back(&All[I]); ++ break; ++ case Header::Verbatim: ++ for (unsigned I : BySpelling.lookup(H.getVerbatimSpelling())) ++ Result.push_back(&All[I]); ++ break; ++ case Header::Builtin: ++ case Header::MainFile: ++ break; ++ } ++ llvm::sort(Result); ++ Result.erase(std::unique(Result.begin(), Result.end()), Result.end()); ++ return Result; ++} ++ ++class ASTRecorder : public ASTConsumer { ++public: ++ ASTRecorder(AnalysisContext &Ctx, RecordedAST &Recorded) ++ : Ctx(Ctx), Recorded(Recorded) {} ++ ++ bool HandleTopLevelDecl(DeclGroupRef DG) override { ++ for (Decl *D : DG) { ++ if (!Ctx.sourceManager().isWrittenInMainFile( ++ Ctx.sourceManager().getExpansionLoc(D->getLocation()))) ++ continue; ++ if (const auto *T = llvm::dyn_cast(D)) ++ if (T->getTemplateSpecializationKind() == TSK_ImplicitInstantiation) ++ continue; ++ if (const auto *T = llvm::dyn_cast(D)) ++ if (T->getTemplateSpecializationKind() == TSK_ImplicitInstantiation) ++ continue; ++ if (const auto *T = llvm::dyn_cast(D)) ++ if (T->getTemplateSpecializationKind() == TSK_ImplicitInstantiation) ++ continue; ++ // ObjCMethodDecl are not actually top-level! ++ if (isa(D)) ++ continue; ++ ++ Recorded.TopLevelDecls.push_back(D); ++ } ++ return true; ++ } ++ ++private: ++ AnalysisContext &Ctx; ++ RecordedAST &Recorded; ++}; ++ ++std::unique_ptr RecordedPP::record(AnalysisContext &Ctx) { ++ return std::make_unique(Ctx, *this); ++} ++ ++std::unique_ptr RecordedAST::record(AnalysisContext &Ctx) { ++ return std::make_unique(Ctx, *this); ++} ++ ++} // namespace include_cleaner ++} // namespace clang +\ No newline at end of file +diff --git a/clang-tools-extra/include-cleaner/lib/Locations.cpp b/clang-tools-extra/include-cleaner/lib/Locations.cpp +new file mode 100644 +index 000000000000..7e23c56c1dfc +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/lib/Locations.cpp +@@ -0,0 +1,60 @@ ++//===--- Locations.cpp - Find the locations that provide symbols ----------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "AnalysisInternal.h" ++#include "clang-include-cleaner/Analysis.h" ++#include "clang-include-cleaner/Types.h" ++#include "clang/AST/Decl.h" ++#include "clang/AST/DeclBase.h" ++#include "clang/AST/DeclTemplate.h" ++#include "clang/Basic/SourceLocation.h" ++#include "llvm/ADT/SmallVector.h" ++ ++namespace clang { ++namespace include_cleaner { ++ ++Hint declHint(const NamedDecl &D) { ++ Hint H = Hint::None; ++ if (auto *TD = llvm::dyn_cast(&D)) ++ if (TD->isThisDeclarationADefinition()) ++ H |= Hint::Complete; ++ if (auto *CTD = llvm::dyn_cast(&D)) ++ if (CTD->isThisDeclarationADefinition()) ++ H |= Hint::Complete; ++ // A function template being defined is similar to a class being defined. ++ if (auto *FTD = llvm::dyn_cast(&D)) ++ if (FTD->isThisDeclarationADefinition()) ++ H |= Hint::Complete; ++ return H; ++} ++ ++llvm::SmallVector> locateDecl(AnalysisContext &Ctx, ++ const NamedDecl &ND) { ++ if (auto StdlibSym = Ctx.cache().StdlibRecognizer(&ND)) ++ return {{*StdlibSym}}; ++ ++ llvm::SmallVector> Result; ++ // Is accepting all the redecls too naive? ++ for (const Decl *RD : ND.redecls()) { ++ // `friend X` is not an interesting location for X unless it's acting as a ++ // forward-declaration. ++ if (RD->getFriendObjectKind() == Decl::FOK_Declared) ++ continue; ++ SourceLocation Loc = RD->getLocation(); ++ if (Loc.isValid()) ++ Result.push_back({Loc, declHint(*cast(RD))}); ++ } ++ return Result; ++} ++ ++Hinted locateMacro(AnalysisContext &Ctx, const DefinedMacro &M) { ++ return {M.Definition}; ++} ++ ++} // namespace include_cleaner ++} // namespace clang +diff --git a/clang-tools-extra/include-cleaner/lib/Types.cpp b/clang-tools-extra/include-cleaner/lib/Types.cpp +new file mode 100644 +index 000000000000..6b79c603a70d +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/lib/Types.cpp +@@ -0,0 +1,61 @@ ++//===--- Types.cpp - Data structures for used-symbol analysis -------------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "clang-include-cleaner/Types.h" ++#include "clang/AST/Decl.h" ++#include "clang/Basic/FileEntry.h" ++#include "clang/Basic/IdentifierTable.h" ++#include "clang/Tooling/Inclusions/StandardLibrary.h" ++ ++namespace clang { ++namespace include_cleaner { ++ ++std::string Symbol::name() const { ++ switch (kind()) { ++ case Macro: ++ return getMacro()->Name->getName().str(); ++ case Declaration: ++ return getDeclaration()->getNameAsString(); ++ } ++ llvm_unreachable("Unhandled Symbol kind"); ++} ++ ++std::string Symbol::nodeName() const { ++ if (kind() == Macro) ++ return "macro"; ++ return getDeclaration()->getDeclKindName(); ++} ++ ++std::string Location::name(const SourceManager &SM) const { ++ switch (K) { ++ case Physical: ++ return SrcLoc.printToString(SM); ++ case StandardLibrary: ++ return StdlibSym.name().str(); ++ } ++ llvm_unreachable("Unhandled Location kind"); ++} ++ ++std::string Header::name() const { ++ switch (K) { ++ case Physical: ++ return PhysicalFile->getName().str(); ++ case StandardLibrary: ++ return StdlibHeader.name().str(); ++ case Verbatim: ++ return VerbatimSpelling; ++ case Builtin: ++ return ""; ++ case MainFile: ++ return ""; ++ } ++ llvm_unreachable("Unhandled Header kind"); ++} ++ ++} // namespace include_cleaner ++} // namespace clang +diff --git a/clang-tools-extra/include-cleaner/lib/WalkAST.cpp b/clang-tools-extra/include-cleaner/lib/WalkAST.cpp +index b7354fe300e0..02a27977005f 100644 +--- a/clang-tools-extra/include-cleaner/lib/WalkAST.cpp ++++ b/clang-tools-extra/include-cleaner/lib/WalkAST.cpp +@@ -7,40 +7,132 @@ + //===----------------------------------------------------------------------===// + + #include "AnalysisInternal.h" ++#include "clang-include-cleaner/Analysis.h" + #include "clang/AST/RecursiveASTVisitor.h" ++#include "clang/Basic/SourceManager.h" ++#include "llvm/Support/SaveAndRestore.h" + + namespace clang { + namespace include_cleaner { + namespace { +-using DeclCallback = llvm::function_ref; + ++using DeclCallback = ++ llvm::function_ref)>; ++ ++// Traverses part of the AST, looking for references and reporting them. + class ASTWalker : public RecursiveASTVisitor { +- DeclCallback Callback; ++public: ++ ASTWalker(AnalysisContext &Ctx, DeclCallback Callback) ++ : Ctx(Ctx), Callback(Callback) {} + +- void report(SourceLocation Loc, NamedDecl *ND) { +- if (!ND || Loc.isInvalid()) +- return; +- Callback(Loc, *cast(ND->getCanonicalDecl())); ++ bool VisitDeclRefExpr(DeclRefExpr *E) { ++ if (!Ctx.policy().Operators) ++ if (auto *FD = E->getDecl()->getAsFunction()) ++ if (FD->isOverloadedOperator()) ++ return true; ++ report(E->getLocation(), E->getFoundDecl()); ++ return true; + } + +-public: +- ASTWalker(DeclCallback Callback) : Callback(Callback) {} ++ bool VisitMemberExpr(MemberExpr *ME) { ++ if (Ctx.policy().Members) ++ report(ME->getMemberLoc(), ME->getFoundDecl().getDecl()); ++ return true; ++ } ++ ++ bool VisitTagType(TagType *TT) { ++ report(LocationOfType, TT->getDecl()); ++ return true; ++ } ++ ++ bool VisitFunctionDecl(FunctionDecl *FD) { ++ // Count function definitions as a reference to their declarations. ++ if (FD->isThisDeclarationADefinition() && FD->getCanonicalDecl() != FD) ++ report(FD->getLocation(), FD->getCanonicalDecl()); ++ return true; ++ } ++ ++ bool VisitCXXConstructExpr(CXXConstructExpr *E) { ++ if (!Ctx.policy().Construction) ++ return true; ++ SaveAndRestore Loc(LocationOfType, E->getLocation()); ++ LocationOfType = E->getLocation(); ++ return TraverseType(E->getType()); ++ } ++ ++ // We handle TypeLocs by saving their loc and consuming it in Visit*Type(). ++ // ++ // Handling Visit*TypeLoc() directly would be simpler, but sometimes unwritten ++ // types count as references (e.g. implicit conversions, with no TypeLoc). ++ // Stashing the location and visiting the contained type lets us handle both ++ // cases in VisitTagType() etc. ++ bool TraverseTypeLoc(TypeLoc TL) { ++ SaveAndRestore Loc(LocationOfType, TL.getBeginLoc()); ++ // The base implementation calls: ++ // - Visit*TypeLoc() - does nothing ++ // - Visit*Type() - where we handle type references ++ // - TraverseTypeLoc for each lexically nested type. ++ return Base::TraverseTypeLoc(TL); ++ } + +- bool VisitTagTypeLoc(TagTypeLoc TTL) { +- report(TTL.getNameLoc(), TTL.getDecl()); ++ bool VisitTemplateSpecializationType(TemplateSpecializationType *TST) { ++ report(LocationOfType, ++ TST->getTemplateName().getAsTemplateDecl()); // Primary template. ++ report(LocationOfType, TST->getAsCXXRecordDecl()); // Specialization + return true; + } + +- bool VisitDeclRefExpr(DeclRefExpr *DRE) { +- report(DRE->getLocation(), DRE->getFoundDecl()); ++ bool VisitUsingType(UsingType *UT) { ++ report(LocationOfType, UT->getFoundDecl()); + return true; + } ++ ++ bool VisitTypedefType(TypedefType *TT) { ++ report(LocationOfType, TT->getDecl()); ++ return true; ++ } ++ ++ bool VisitUsingDecl(UsingDecl *UD) { ++ for (const auto *USD : UD->shadows()) ++ report(UD->getLocation(), USD->getTargetDecl()); ++ return true; ++ } ++ ++ bool VisitOverloadExpr(OverloadExpr *E) { ++ if (llvm::isa(E) && !Ctx.policy().Members) ++ return true; ++ for (auto *Candidate : E->decls()) ++ report(E->getExprLoc(), Candidate); ++ return true; ++ } ++ ++private: ++ void report(SourceLocation Loc, NamedDecl *ND) { ++ while (Loc.isMacroID()) { ++ auto DecLoc = Ctx.sourceManager().getDecomposedLoc(Loc); ++ const SrcMgr::ExpansionInfo &Expansion = ++ Ctx.sourceManager().getSLocEntry(DecLoc.first).getExpansion(); ++ if (!Expansion.isMacroArgExpansion()) ++ return; // Names within macro bodies are not considered references. ++ Loc = Expansion.getSpellingLoc().getLocWithOffset(DecLoc.second); ++ } ++ // FIXME: relevant ranking hints? ++ if (ND) ++ Callback(Loc, *cast(ND->getCanonicalDecl())); ++ } ++ ++ using Base = RecursiveASTVisitor; ++ ++ AnalysisContext &Ctx; ++ DeclCallback Callback; ++ ++ SourceLocation LocationOfType; + }; + + } // namespace + +-void walkAST(Decl &Root, DeclCallback Callback) { +- ASTWalker(Callback).TraverseDecl(&Root); ++void walkAST(AnalysisContext &Ctx, Decl &Root, DeclCallback Callback) { ++ ASTWalker(Ctx, Callback).TraverseDecl(&Root); + } + + } // namespace include_cleaner +diff --git a/clang-tools-extra/include-cleaner/tool/CMakeLists.txt b/clang-tools-extra/include-cleaner/tool/CMakeLists.txt +new file mode 100644 +index 000000000000..f8f7c81c761b +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/tool/CMakeLists.txt +@@ -0,0 +1,17 @@ ++set(LLVM_LINK_COMPONENTS support) ++ ++add_clang_tool(clang-include-cleaner ++ ClangIncludeCleaner.cpp ++ ) ++ ++clang_target_link_libraries(clang-include-cleaner ++ PRIVATE ++ clangBasic ++ clangFrontend ++ clangTooling ++ ) ++ ++target_link_libraries(clang-include-cleaner ++ PRIVATE ++ clangIncludeCleaner ++ ) +\ No newline at end of file +diff --git a/clang-tools-extra/include-cleaner/tool/ClangIncludeCleaner.cpp b/clang-tools-extra/include-cleaner/tool/ClangIncludeCleaner.cpp +new file mode 100644 +index 000000000000..aad70eabdae9 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/tool/ClangIncludeCleaner.cpp +@@ -0,0 +1,187 @@ ++//===--- ClangIncludeCleaner.cpp - Standalone used-header analysis --------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++// ++// clang-include-cleaner finds violations of include-what-you-use policy. ++// ++// It scans a file, finding referenced symbols and headers providing them. ++// - if a reference is satisfied only by indirect #include dependencies, ++// this violates the policy and direct #includes are suggested. ++// - if some #include directive doesn't satisfy any references, this violates ++// the policy (don't include what you don't use!) and removal is suggested. ++// ++// With the -satisfied flag, it will also explain things that were OK: ++// satisfied references and used #includes. ++// ++// This tool doesn't fix broken code where missing #includes prevent parsing, ++// try clang-include-fixer for this instead. ++// ++//===----------------------------------------------------------------------===// ++ ++#include "clang-include-cleaner/Analysis.h" ++#include "clang-include-cleaner/Hooks.h" ++#include "clang/Basic/Diagnostic.h" ++#include "clang/Frontend/CompilerInstance.h" ++#include "clang/Frontend/FrontendAction.h" ++#include "clang/Tooling/CommonOptionsParser.h" ++#include "clang/Tooling/Tooling.h" ++#include "llvm/Support/CommandLine.h" ++#include "llvm/Support/InitLLVM.h" ++ ++llvm::cl::OptionCategory OptionsCat{"clang-include-cleaner"}; ++llvm::cl::opt ShowSatisfied{ ++ "satisfied", ++ llvm::cl::cat(OptionsCat), ++ llvm::cl::desc( ++ "Show references whose header is included, and used includes"), ++ llvm::cl::init(false), ++}; ++llvm::cl::opt Recover{ ++ "recover", ++ llvm::cl::cat(OptionsCat), ++ llvm::cl::desc("Suppress further errors for the same header"), ++ llvm::cl::init(true), ++}; ++ ++namespace clang { ++namespace include_cleaner { ++namespace { ++ ++class Action : public clang::ASTFrontendAction { ++public: ++ bool BeginSourceFileAction(CompilerInstance &CI) override { ++ Diag = &CI.getDiagnostics(); ++ ID.emplace(Diag); ++ Ctx.emplace(Policy{}, CI.getPreprocessor()); ++ CI.getPreprocessor().addPPCallbacks(PP.record(*Ctx)); ++ return true; ++ } ++ ++ void EndSourceFile() override { ++ llvm::DenseSet
Recovered; ++ llvm::DenseMap Used; ++ walkUsed(*Ctx, AST.TopLevelDecls, PP.MacroReferences, ++ [&](SourceLocation Loc, Symbol Sym, ArrayRef
Headers) { ++ diagnoseReference(Loc, Sym, Headers, Recovered, Used); ++ }); ++ diagnoseIncludes(PP.Includes.all(), Used); ++ Ctx.reset(); ++ ++ ASTFrontendAction::EndSourceFile(); ++ } ++ ++ virtual std::unique_ptr ++ CreateASTConsumer(CompilerInstance &CI, StringRef InFile) override { ++ return AST.record(*Ctx); ++ } ++ ++private: ++ // The diagnostics that we issue. ++ struct CustomDiagnosticIDs { ++ // References ++ unsigned Satisfied; ++ unsigned Unsatisfied; ++ unsigned NoHeader; ++ unsigned NoteHeader; ++ // #includes ++ unsigned Used; ++ unsigned Unused; ++ ++ CustomDiagnosticIDs(DiagnosticsEngine *D) { ++ auto SatisfiedLevel = ShowSatisfied ? DiagnosticsEngine::Remark ++ : DiagnosticsEngine::Ignored; ++ auto Error = DiagnosticsEngine::Error; ++ auto Note = DiagnosticsEngine::Note; ++ auto Warn = DiagnosticsEngine::Warning; ++ ++ Satisfied = D->getCustomDiagID(SatisfiedLevel, "%0 '%1' provided by %2"); ++ Unsatisfied = D->getCustomDiagID(Error, "no header included for %0 '%1'"); ++ NoHeader = D->getCustomDiagID(Warn, "unknown header provides %0 '%1'"); ++ NoteHeader = D->getCustomDiagID(Note, "provided by %0"); ++ Used = D->getCustomDiagID(SatisfiedLevel, "include provides %0 '%1'"); ++ Unused = D->getCustomDiagID(Error, "include is unused"); ++ } ++ }; ++ ++ void ++ diagnoseReference(SourceLocation Loc, Symbol Sym, ArrayRef
Headers, ++ llvm::DenseSet
&Recovered, ++ llvm::DenseMap &Used) { ++ bool Diagnosed = false; ++ for (const auto &H : Headers) { ++ if (H.kind() == Header::Builtin || H.kind() == Header::MainFile) { ++ if (!Diagnosed) { ++ Diag->Report(Loc, ID->Satisfied) ++ << Sym.nodeName() << Sym.name() << H.name(); ++ Diagnosed = true; ++ } ++ } ++ for (const auto *I : PP.Includes.match(H)) { ++ Used.try_emplace(I, Sym); ++ if (!Diagnosed) { ++ Diag->Report(Loc, ID->Satisfied) ++ << Sym.nodeName() << Sym.name() << I->Spelled; ++ Diagnosed = true; ++ } ++ } ++ } ++ if (Diagnosed) ++ return; ++ for (const auto &H : Headers) { ++ if (Recovered.contains(H)) { ++ Diag->Report(Loc, ID->Satisfied) ++ << Sym.nodeName() << Sym.name() << H.name(); ++ return; ++ } ++ } ++ Diag->Report(Loc, Headers.empty() ? ID->NoHeader : ID->Unsatisfied) ++ << Sym.nodeName() << Sym.name(); ++ for (const auto &H : Headers) { ++ Recovered.insert(H); ++ Diag->Report(ID->NoteHeader) << H.name(); ++ } ++ } ++ ++ void diagnoseIncludes( ++ ArrayRef Includes, ++ const llvm::DenseMap &Used) { ++ for (const auto &I : Includes) { ++ auto It = Used.find(&I); ++ if (It == Used.end()) ++ Diag->Report(I.Location, ID->Unused); ++ else ++ Diag->Report(I.Location, ID->Used) ++ << It->second.nodeName() << It->second.name(); ++ } ++ } ++ ++ llvm::Optional Ctx; ++ RecordedPP PP; ++ RecordedAST AST; ++ DiagnosticsEngine *Diag; ++ llvm::Optional ID; ++}; ++ ++} // namespace ++} // namespace include_cleaner ++} // namespace clang ++ ++int main(int Argc, const char **Argv) { ++ llvm::InitLLVM X(Argc, Argv); ++ auto OptionsParser = ++ clang::tooling::CommonOptionsParser::create(Argc, Argv, OptionsCat); ++ if (!OptionsParser) { ++ llvm::errs() << toString(OptionsParser.takeError()); ++ return 1; ++ } ++ ++ return clang::tooling::ClangTool(OptionsParser->getCompilations(), ++ OptionsParser->getSourcePathList()) ++ .run(clang::tooling::newFrontendActionFactory< ++ clang::include_cleaner::Action>() ++ .get()); ++} +diff --git a/clang/include/clang/Tooling/Inclusions/StandardLibrary.h b/clang/include/clang/Tooling/Inclusions/StandardLibrary.h +index c6ce2780dae6..e94a7fb9304a 100644 +--- a/clang/include/clang/Tooling/Inclusions/StandardLibrary.h ++++ b/clang/include/clang/Tooling/Inclusions/StandardLibrary.h +@@ -49,6 +49,9 @@ private: + friend bool operator==(const Header &L, const Header &R) { + return L.ID == R.ID; + } ++ friend bool operator<(const Header &L, const Header &R) { ++ return L.ID < R.ID; ++ } + }; + + // A top-level standard library symbol, such as std::vector From 9fcf807281b07797a0aa00dd171b98f9aafba6d1 Mon Sep 17 00:00:00 2001 From: Mark Banner Date: Thu, 24 Nov 2022 16:17:46 +0000 Subject: [PATCH 04/37] Bug 1801368 - Drop unused mutator argument on StructuredLogger. r=ahal Differential Revision: https://phabricator.services.mozilla.com/D162489 --- testing/modules/StructuredLog.jsm | 11 +---------- testing/modules/tests/xpcshell/test_structuredlog.js | 8 +------- 2 files changed, 2 insertions(+), 17 deletions(-) diff --git a/testing/modules/StructuredLog.jsm b/testing/modules/StructuredLog.jsm index 7aadc4574d68..17751a680242 100644 --- a/testing/modules/StructuredLog.jsm +++ b/testing/modules/StructuredLog.jsm @@ -15,15 +15,10 @@ var EXPORTED_SYMBOLS = ["StructuredLogger", "StructuredFormatter"]; * @param dumpFun * An underlying function to be used to log raw messages. This function * will receive the complete serialized json string to log. - * @param mutators - * An array of functions used to add global context to log messages. - * These will each be called with the complete object to log as an - * argument. */ -var StructuredLogger = function(name, dumpFun = dump, mutators = []) { +var StructuredLogger = function(name, dumpFun = dump) { this.name = name; this._dumpFun = dumpFun; - this._mutatorFuns = mutators; }; /** @@ -215,10 +210,6 @@ StructuredLogger.prototype = { allData[field] = data[field]; } - for (var fun of this._mutatorFuns) { - fun(allData); - } - this._dumpFun(allData); }, diff --git a/testing/modules/tests/xpcshell/test_structuredlog.js b/testing/modules/tests/xpcshell/test_structuredlog.js index 79f4162ccbf1..f615d722080a 100644 --- a/testing/modules/tests/xpcshell/test_structuredlog.js +++ b/testing/modules/tests/xpcshell/test_structuredlog.js @@ -21,15 +21,9 @@ function run_test() { } // The logger should always set the source to the logger name. equal(lastMsg.source, "test_log"); - // The source_file field is always set by the mutator function. - equal(lastMsg.source_file, "test_structuredlog.js"); }; - let addFileName = function(data) { - data.source_file = "test_structuredlog.js"; - }; - - let logger = new StructuredLogger("test_log", appendBuffer, [addFileName]); + let logger = new StructuredLogger("test_log", appendBuffer); // Test unstructured logging logger.info("Test message"); From 55d53c8bb7b0b7bf280662ac6e233fa616dd034f Mon Sep 17 00:00:00 2001 From: Mark Banner Date: Thu, 24 Nov 2022 16:17:47 +0000 Subject: [PATCH 05/37] Bug 1801368 - Change StructuredLogger and StructuredFormatter to use class definitions, improve private properties. r=ahal Depends on D162489 Differential Revision: https://phabricator.services.mozilla.com/D162490 --- layout/tools/reftest/reftest.jsm | 2 +- .../mochitest/tests/SimpleTest/TestRunner.js | 4 +- testing/modules/StructuredLog.jsm | 113 +++++++++--------- 3 files changed, 60 insertions(+), 59 deletions(-) diff --git a/layout/tools/reftest/reftest.jsm b/layout/tools/reftest/reftest.jsm index a7aa1be926b9..9727a1501e43 100644 --- a/layout/tools/reftest/reftest.jsm +++ b/layout/tools/reftest/reftest.jsm @@ -915,7 +915,7 @@ function DoneTests() g.suiteStarted = false logger.suiteEnd({'results': g.testResults}); } else { - logger._logData('results', {results: g.testResults}); + logger.logData('results', {results: g.testResults}); } logger.info("Slowest test took " + g.slowestTestTime + "ms (" + g.slowestTestURL + ")"); logger.info("Total canvas count = " + g.recycledCanvases.length); diff --git a/testing/mochitest/tests/SimpleTest/TestRunner.js b/testing/mochitest/tests/SimpleTest/TestRunner.js index dbbd49d46f38..400698444a74 100644 --- a/testing/mochitest/tests/SimpleTest/TestRunner.js +++ b/testing/mochitest/tests/SimpleTest/TestRunner.js @@ -340,10 +340,10 @@ TestRunner.structuredLogger = new StructuredLogger( TestRunner._dumpMessage ); TestRunner.structuredLogger.deactivateBuffering = function() { - TestRunner.structuredLogger._logData("buffering_off"); + TestRunner.structuredLogger.logData("buffering_off"); }; TestRunner.structuredLogger.activateBuffering = function() { - TestRunner.structuredLogger._logData("buffering_on"); + TestRunner.structuredLogger.logData("buffering_on"); }; TestRunner.log = function(msg) { diff --git a/testing/modules/StructuredLog.jsm b/testing/modules/StructuredLog.jsm index 17751a680242..e92bb0e3a675 100644 --- a/testing/modules/StructuredLog.jsm +++ b/testing/modules/StructuredLog.jsm @@ -16,19 +16,19 @@ var EXPORTED_SYMBOLS = ["StructuredLogger", "StructuredFormatter"]; * An underlying function to be used to log raw messages. This function * will receive the complete serialized json string to log. */ -var StructuredLogger = function(name, dumpFun = dump) { - this.name = name; - this._dumpFun = dumpFun; -}; +class StructuredLogger { + name = null; + #dumpFun = null; + + constructor(name, dumpFun = dump) { + this.name = name; + this.#dumpFun = dumpFun; + } -/** - * Log functions producing messages in the format specified by mozlog - */ -StructuredLogger.prototype = { testStart(test) { - var data = { test: this._testId(test) }; - this._logData("test_start", data); - }, + var data = { test: this.#testId(test) }; + this.logData("test_start", data); + } testStatus( test, @@ -45,7 +45,7 @@ StructuredLogger.prototype = { } var data = { - test: this._testId(test), + test: this.#testId(test), subtest, status, }; @@ -63,8 +63,8 @@ StructuredLogger.prototype = { data.extra = extra; } - this._logData("test_status", data); - }, + this.logData("test_status", data); + } testEnd( test, @@ -74,7 +74,7 @@ StructuredLogger.prototype = { stack = null, extra = null ) { - var data = { test: this._testId(test), status }; + var data = { test: this.#testId(test), status }; if (expected != status && status != "SKIP") { data.expected = expected; @@ -89,19 +89,19 @@ StructuredLogger.prototype = { data.extra = extra; } - this._logData("test_end", data); - }, + this.logData("test_end", data); + } assertionCount(test, count, minExpected = 0, maxExpected = 0) { var data = { - test: this._testId(test), + test: this.#testId(test), min_expected: minExpected, max_expected: maxExpected, count, }; - this._logData("assertion_count", data); - }, + this.logData("assertion_count", data); + } suiteStart( ids, @@ -112,7 +112,7 @@ StructuredLogger.prototype = { extra = null ) { Object.keys(ids).map(function(manifest) { - ids[manifest] = ids[manifest].map(x => this._testId(x)); + ids[manifest] = ids[manifest].map(x => this.#testId(x)); }, this); var data = { tests: ids }; @@ -136,8 +136,8 @@ StructuredLogger.prototype = { data.extra = extra; } - this._logData("suite_start", data); - }, + this.logData("suite_start", data); + } suiteEnd(extra = null) { var data = {}; @@ -146,8 +146,8 @@ StructuredLogger.prototype = { data.extra = extra; } - this._logData("suite_end", data); - }, + this.logData("suite_end", data); + } /** * Unstructured logging functions. The "extra" parameter can always by used to @@ -167,37 +167,37 @@ StructuredLogger.prototype = { } } - this._logData("log", data); - }, + this.logData("log", data); + } debug(message, extra = null) { this.log("DEBUG", message, extra); - }, + } info(message, extra = null) { this.log("INFO", message, extra); - }, + } warning(message, extra = null) { this.log("WARNING", message, extra); - }, + } error(message, extra = null) { this.log("ERROR", message, extra); - }, + } critical(message, extra = null) { this.log("CRITICAL", message, extra); - }, + } processOutput(thread, message) { - this._logData("process_output", { + this.logData("process_output", { message, thread, }); - }, + } - _logData(action, data = {}) { + logData(action, data = {}) { var allData = { action, time: Date.now(), @@ -210,39 +210,40 @@ StructuredLogger.prototype = { allData[field] = data[field]; } - this._dumpFun(allData); - }, + this.#dumpFun(allData); + } - _testId(test) { + #testId(test) { if (Array.isArray(test)) { return test.join(" "); } return test; - }, -}; + } +} /** * StructuredFormatter: Formatter class turning structured messages * into human-readable messages. */ -var StructuredFormatter = function() { - this.testStartTimes = {}; -}; +class StructuredFormatter { + // The time at which the whole suite of tests started. + #suiteStartTime = null; + + #testStartTimes = new Map(); -StructuredFormatter.prototype = { log(message) { return message.message; - }, + } suite_start(message) { - this.suiteStartTime = message.time; + this.#suiteStartTime = message.time; return "SUITE-START | Running " + message.tests.length + " tests"; - }, + } test_start(message) { - this.testStartTimes[message.test] = new Date().getTime(); + this.#testStartTimes.set(message.test, new Date().getTime()); return "TEST-START | " + message.test; - }, + } test_status(message) { var statusInfo = @@ -261,11 +262,11 @@ StructuredFormatter.prototype = { ); } return "TEST-" + message.status + " | " + statusInfo; - }, + } test_end(message) { - var startTime = this.testStartTimes[message.test]; - delete this.testStartTimes[message.test]; + var startTime = this.#testStartTimes.get(message.test); + this.#testStartTimes.delete(message.test); var statusInfo = message.test + (message.message ? " | " + String(message.message) : ""); var result; @@ -282,9 +283,9 @@ StructuredFormatter.prototype = { } result = result + " | took " + message.time - startTime + "ms"; return result; - }, + } suite_end(message) { - return "SUITE-END | took " + message.time - this.suiteStartTime + "ms"; - }, -}; + return "SUITE-END | took " + message.time - this.#suiteStartTime + "ms"; + } +} From 4a8185fb14bed9db6506a9d9d76961438e43bc99 Mon Sep 17 00:00:00 2001 From: Mark Banner Date: Thu, 24 Nov 2022 16:17:47 +0000 Subject: [PATCH 06/37] Bug 1801368 - Migrate StructuredLog.jsm to an ES module. r=ahal Depends on D162490 Differential Revision: https://phabricator.services.mozilla.com/D162491 --- js/xpconnect/tests/unit/test_xpcomutils.js | 2 +- layout/tools/reftest/jar.mn | 2 +- layout/tools/reftest/reftest.jsm | 4 +-- testing/mochitest/moz.build | 2 +- testing/mochitest/server.js | 4 --- .../mochitest/tests/SimpleTest/TestRunner.js | 12 +++++++-- ...tructuredLog.jsm => StructuredLog.sys.mjs} | 25 +++++++++++-------- testing/modules/moz.build | 4 +-- .../tests/xpcshell/test_structuredlog.js | 4 +-- testing/xpcshell/head.js | 4 +-- .../test_ext_scripting_startupCache.js | 4 +-- 11 files changed, 38 insertions(+), 29 deletions(-) rename testing/modules/{StructuredLog.jsm => StructuredLog.sys.mjs} (92%) diff --git a/js/xpconnect/tests/unit/test_xpcomutils.js b/js/xpconnect/tests/unit/test_xpcomutils.js index ff11b60de70e..6aa73cb67025 100644 --- a/js/xpconnect/tests/unit/test_xpcomutils.js +++ b/js/xpconnect/tests/unit/test_xpcomutils.js @@ -156,7 +156,7 @@ add_test(function test_categoryRegistration() const XULAPPINFO_CID = Components.ID("{fc937916-656b-4fb3-a395-8c63569e27a8}"); // Create a fake app entry for our category registration apps filter. - let { newAppInfo } = ChromeUtils.import("resource://testing-common/AppInfo.jsm"); + let { newAppInfo } = ChromeUtils.importESModule("resource://testing-common/AppInfo.sys.mjs"); let XULAppInfo = newAppInfo({ name: "catRegTest", ID: "{adb42a9a-0d19-4849-bf4d-627614ca19be}", diff --git a/layout/tools/reftest/jar.mn b/layout/tools/reftest/jar.mn index a2ed6f8b226d..2d6ea891e2a5 100644 --- a/layout/tools/reftest/jar.mn +++ b/layout/tools/reftest/jar.mn @@ -53,7 +53,7 @@ reftest.jar: res/ReftestFissionChild.jsm (ReftestFissionChild.jsm) res/AsyncSpellCheckTestHelper.jsm (../../../editor/AsyncSpellCheckTestHelper.jsm) res/httpd.jsm (../../../netwerk/test/httpserver/httpd.js) - res/StructuredLog.jsm (../../../testing/modules/StructuredLog.jsm) + res/StructuredLog.sys.mjs (../../../testing/modules/StructuredLog.sys.mjs) res/PerTestCoverageUtils.jsm (../../../tools/code-coverage/PerTestCoverageUtils.jsm) res/input.css (../../../editor/reftests/xul/input.css) res/progress.css (../../../layout/reftests/forms/progress/style.css) diff --git a/layout/tools/reftest/reftest.jsm b/layout/tools/reftest/reftest.jsm index 9727a1501e43..90c49a1bacc9 100644 --- a/layout/tools/reftest/reftest.jsm +++ b/layout/tools/reftest/reftest.jsm @@ -48,8 +48,8 @@ const { HttpServer } = ChromeUtils.import("resource://reftest/httpd.jsm"); const { ReadTopManifest, CreateUrls } = ChromeUtils.import( "resource://reftest/manifest.jsm" ); -const { StructuredLogger } = ChromeUtils.import( - "resource://reftest/StructuredLog.jsm" +const { StructuredLogger } = ChromeUtils.importESModule( + "resource://reftest/StructuredLog.sys.mjs" ); const { PerTestCoverageUtils } = ChromeUtils.import( "resource://reftest/PerTestCoverageUtils.jsm" diff --git a/testing/mochitest/moz.build b/testing/mochitest/moz.build index 3251059fa0e8..edae41386eb3 100644 --- a/testing/mochitest/moz.build +++ b/testing/mochitest/moz.build @@ -45,7 +45,7 @@ FINAL_TARGET_FILES.content.static += [ FINAL_TARGET_FILES.content.tests.SimpleTest += [ "../../docshell/test/chrome/docshell_helpers.js", - "../modules/StructuredLog.jsm", + "../modules/StructuredLog.sys.mjs", "tests/SimpleTest/AccessibilityUtils.js", "tests/SimpleTest/EventUtils.js", "tests/SimpleTest/ExtensionTestUtils.js", diff --git a/testing/mochitest/server.js b/testing/mochitest/server.js index 7bcffb7e133c..e8e514d34497 100644 --- a/testing/mochitest/server.js +++ b/testing/mochitest/server.js @@ -766,10 +766,6 @@ function testListing(metadata, response) { type: "text/css", href: "/static/harness.css", }), - SCRIPT({ - type: "text/javascript", - src: "/tests/SimpleTest/StructuredLog.jsm", - }), SCRIPT({ type: "text/javascript", src: "/tests/SimpleTest/LogController.js", diff --git a/testing/mochitest/tests/SimpleTest/TestRunner.js b/testing/mochitest/tests/SimpleTest/TestRunner.js index 400698444a74..2c67b4673b7a 100644 --- a/testing/mochitest/tests/SimpleTest/TestRunner.js +++ b/testing/mochitest/tests/SimpleTest/TestRunner.js @@ -7,7 +7,6 @@ */ // This file expects the following files to be loaded. -/* import-globals-from ../../../modules/StructuredLog.jsm */ /* import-globals-from LogController.js */ /* import-globals-from MemoryStats.js */ /* import-globals-from MozillaLogger.js */ @@ -16,6 +15,13 @@ "use strict"; +const { + StructuredLogger, + StructuredFormatter, +} = SpecialPowers.ChromeUtils.importESModule( + "resource://testing-common/StructuredLog.sys.mjs" +); + function getElement(id) { return typeof id == "string" ? document.getElementById(id) : id; } @@ -337,7 +343,9 @@ TestRunner._dumpMessage = function(message) { // From https://searchfox.org/mozilla-central/source/testing/modules/StructuredLog.jsm TestRunner.structuredLogger = new StructuredLogger( "mochitest", - TestRunner._dumpMessage + TestRunner._dumpMessage, + [], + TestRunner ); TestRunner.structuredLogger.deactivateBuffering = function() { TestRunner.structuredLogger.logData("buffering_off"); diff --git a/testing/modules/StructuredLog.jsm b/testing/modules/StructuredLog.sys.mjs similarity index 92% rename from testing/modules/StructuredLog.jsm rename to testing/modules/StructuredLog.sys.mjs index e92bb0e3a675..a4524841a380 100644 --- a/testing/modules/StructuredLog.jsm +++ b/testing/modules/StructuredLog.sys.mjs @@ -2,27 +2,28 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -"use strict"; - -var EXPORTED_SYMBOLS = ["StructuredLogger", "StructuredFormatter"]; - /** * TestLogger: Logger class generating messages compliant with the * structured logging protocol for tests exposed by mozlog * - * @param name + * @param {string} name * The name of the logger to instantiate. - * @param dumpFun + * @param {function} [dumpFun] * An underlying function to be used to log raw messages. This function * will receive the complete serialized json string to log. + * @param {object} [scope] + * The scope that the dumpFun is loaded in, so that messages are cloned + * into that scope before passing them. */ -class StructuredLogger { +export class StructuredLogger { name = null; #dumpFun = null; + #dumpScope = null; - constructor(name, dumpFun = dump) { + constructor(name, dumpFun = dump, scope = null) { this.name = name; this.#dumpFun = dumpFun; + this.#dumpScope = scope; } testStart(test) { @@ -210,7 +211,11 @@ class StructuredLogger { allData[field] = data[field]; } - this.#dumpFun(allData); + if (this.#dumpScope) { + this.#dumpFun(Cu.cloneInto(allData, this.#dumpScope)); + } else { + this.#dumpFun(allData); + } } #testId(test) { @@ -225,7 +230,7 @@ class StructuredLogger { * StructuredFormatter: Formatter class turning structured messages * into human-readable messages. */ -class StructuredFormatter { +export class StructuredFormatter { // The time at which the whole suite of tests started. #suiteStartTime = null; diff --git a/testing/modules/moz.build b/testing/modules/moz.build index 9326347a3fe5..d201d9dd524c 100644 --- a/testing/modules/moz.build +++ b/testing/modules/moz.build @@ -16,7 +16,7 @@ TESTING_JS_MODULES += [ "MockRegistrar.sys.mjs", "sinon-7.2.7.js", "Sinon.jsm", - "StructuredLog.jsm", + "StructuredLog.sys.mjs", "TestUtils.sys.mjs", "XPCShellContentUtils.sys.mjs", ] @@ -27,7 +27,7 @@ if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": ] -TEST_HARNESS_FILES.testing.mochitest.tests.SimpleTest += ["StructuredLog.jsm"] +TEST_HARNESS_FILES.testing.mochitest.tests.SimpleTest += ["StructuredLog.sys.mjs"] with Files("**"): BUG_COMPONENT = ("Testing", "General") diff --git a/testing/modules/tests/xpcshell/test_structuredlog.js b/testing/modules/tests/xpcshell/test_structuredlog.js index f615d722080a..3802fcc0980e 100644 --- a/testing/modules/tests/xpcshell/test_structuredlog.js +++ b/testing/modules/tests/xpcshell/test_structuredlog.js @@ -2,8 +2,8 @@ http://creativecommons.org/publicdomain/zero/1.0/ */ function run_test() { - const { StructuredLogger } = ChromeUtils.import( - "resource://testing-common/StructuredLog.jsm" + const { StructuredLogger } = ChromeUtils.importESModule( + "resource://testing-common/StructuredLog.sys.mjs" ); let testBuffer = []; diff --git a/testing/xpcshell/head.js b/testing/xpcshell/head.js index b458d937876a..646c036badf1 100644 --- a/testing/xpcshell/head.js +++ b/testing/xpcshell/head.js @@ -90,8 +90,8 @@ var _dumpLog = function(raw_msg) { dump("\n" + JSON.stringify(raw_msg) + "\n"); }; -var { StructuredLogger: _LoggerClass } = ChromeUtils.import( - "resource://testing-common/StructuredLog.jsm" +var { StructuredLogger: _LoggerClass } = ChromeUtils.importESModule( + "resource://testing-common/StructuredLog.sys.mjs" ); var _testLogger = new _LoggerClass("xpcshell/head.js", _dumpLog, [_add_params]); diff --git a/toolkit/components/extensions/test/xpcshell/test_ext_scripting_startupCache.js b/toolkit/components/extensions/test/xpcshell/test_ext_scripting_startupCache.js index acea225a31b7..4b35e9153272 100644 --- a/toolkit/components/extensions/test/xpcshell/test_ext_scripting_startupCache.js +++ b/toolkit/components/extensions/test/xpcshell/test_ext_scripting_startupCache.js @@ -15,8 +15,8 @@ AddonTestUtils.createAppInfo( const { ExtensionScriptingStore } = ChromeUtils.import( "resource://gre/modules/ExtensionScriptingStore.jsm" ); -const { TestUtils } = ChromeUtils.import( - "resource://testing-common/TestUtils.jsm" +const { TestUtils } = ChromeUtils.importESModule( + "resource://testing-common/TestUtils.sys.mjs" ); const { sinon } = ChromeUtils.import("resource://testing-common/Sinon.jsm"); From e59ef8d6fd808f6e37c8577ad775960ac641defb Mon Sep 17 00:00:00 2001 From: Mark Banner Date: Thu, 24 Nov 2022 16:18:14 +0000 Subject: [PATCH 07/37] Bug 1801283 - Fix eslint-build tester running from cron, and make sure it doesn't run at other times. r=releng-reviewers,ahal Differential Revision: https://phabricator.services.mozilla.com/D162385 --- taskcluster/ci/source-test/mozlint.yml | 4 ++++ taskcluster/gecko_taskgraph/target_tasks.py | 25 +++++++++------------ toolkit/mozapps/installer/packager.mk | 2 +- 3 files changed, 15 insertions(+), 16 deletions(-) diff --git a/taskcluster/ci/source-test/mozlint.yml b/taskcluster/ci/source-test/mozlint.yml index d6798ba35e8d..f994dd88ef69 100644 --- a/taskcluster/ci/source-test/mozlint.yml +++ b/taskcluster/ci/source-test/mozlint.yml @@ -120,6 +120,10 @@ eslint: eslint-build: description: ESLint checks with build data + always-target: false + run-on-projects: [] + attributes: + code-review: false treeherder: symbol: js(ES-B) tier: 3 diff --git a/taskcluster/gecko_taskgraph/target_tasks.py b/taskcluster/gecko_taskgraph/target_tasks.py index 01a3cb61b6e5..d35ddcc5997b 100644 --- a/taskcluster/gecko_taskgraph/target_tasks.py +++ b/taskcluster/gecko_taskgraph/target_tasks.py @@ -4,24 +4,22 @@ import copy -from datetime import datetime, timedelta import os import re +from datetime import datetime, timedelta +from gecko_taskgraph import GECKO, try_option_syntax +from gecko_taskgraph.util.attributes import ( + match_run_on_hg_branches, + match_run_on_projects, +) +from gecko_taskgraph.util.hg import find_hg_revision_push_info, get_hg_commit_message +from gecko_taskgraph.util.platforms import platform_family from redo import retry from taskgraph.parameters import Parameters from taskgraph.target_tasks import _target_task, get_method from taskgraph.util.taskcluster import find_task_id -from gecko_taskgraph import try_option_syntax, GECKO -from gecko_taskgraph.util.attributes import ( - match_run_on_projects, - match_run_on_hg_branches, -) -from gecko_taskgraph.util.platforms import platform_family -from gecko_taskgraph.util.hg import find_hg_revision_push_info, get_hg_commit_message - - # Some tasks show up in the target task set, but are possibly special cases, # uncommon tasks, or tasks running against limited hardware set that they # should only be selectable with --full. @@ -1256,10 +1254,7 @@ def target_tasks_backfill_all_browsertime(full_task_graph, parameters, graph_con and landed the day before the cron is running. Trigger backfill-all-browsertime action task on each of them. """ - from gecko_taskgraph.actions.util import ( - get_decision_task_id, - get_pushes, - ) + from gecko_taskgraph.actions.util import get_decision_task_id, get_pushes def date_is_yesterday(date): yesterday = datetime.today() - timedelta(days=1) @@ -1398,5 +1393,5 @@ def target_tasks_eslint_build(full_task_graph, parameters, graph_config): for name, task in full_task_graph.tasks.items(): if task.kind != "source-test": continue - if name == "eslint-build": + if "eslint-build" in name: yield name diff --git a/toolkit/mozapps/installer/packager.mk b/toolkit/mozapps/installer/packager.mk index 0010a2692b6a..001512b1bdb0 100644 --- a/toolkit/mozapps/installer/packager.mk +++ b/toolkit/mozapps/installer/packager.mk @@ -114,7 +114,7 @@ ifndef MOZ_ARTIFACT_BUILDS else @echo 'Packaging existing XPT artifacts from artifact build into archive ($(XPT_ARTIFACTS_ARCHIVE_BASENAME).zip)' $(call py_action,zip,-C $(ABS_DIST)/xpt_artifacts '$(ABS_DIST)/$(PKG_PATH)$(XPT_ARTIFACTS_ARCHIVE_BASENAME).zip' '*.xpt') -endif # COMPILE_ENVIRONMENT +endif # MOZ_ARTIFACT_BUILDS prepare-package: stage-package From c71e992186feb69d1a1ac5e0f3e80100676eb75a Mon Sep 17 00:00:00 2001 From: Cristian Tuns Date: Thu, 24 Nov 2022 11:50:23 -0500 Subject: [PATCH 08/37] Backed out changeset 516ad0cab188 (bug 1801512) for causing reftest failures on 411334-1.xml CLOSED TREE --- gfx/thebes/gfxDWriteCommon.h | 3 --- gfx/thebes/gfxDWriteFonts.cpp | 13 ------------- 2 files changed, 16 deletions(-) diff --git a/gfx/thebes/gfxDWriteCommon.h b/gfx/thebes/gfxDWriteCommon.h index f0a2fc1ec6a2..8daf02fd1458 100644 --- a/gfx/thebes/gfxDWriteCommon.h +++ b/gfx/thebes/gfxDWriteCommon.h @@ -35,9 +35,6 @@ #define ENHANCED_CONTRAST_VALUE_NAME L"EnhancedContrastLevel" -#define CONTROL_PANEL_REGISTRY_KEY HKEY_CURRENT_USER, L"Control Panel\\Desktop" -#define FONT_SMOOTHING_GAMMA_VALUE L"FontSmoothingGamma" - // FIXME: This shouldn't look at constants probably. static inline DWRITE_FONT_STRETCH DWriteFontStretchFromStretch( mozilla::FontStretch aStretch) { diff --git a/gfx/thebes/gfxDWriteFonts.cpp b/gfx/thebes/gfxDWriteFonts.cpp index 9600383dbae4..4d9cabbdc626 100644 --- a/gfx/thebes/gfxDWriteFonts.cpp +++ b/gfx/thebes/gfxDWriteFonts.cpp @@ -211,19 +211,6 @@ void gfxDWriteFont::UpdateClearTypeVars() { gamma = defaultRenderingParams->GetGamma(); pixelGeometry = defaultRenderingParams->GetPixelGeometry(); renderingMode = defaultRenderingParams->GetRenderingMode(); - - if (RegOpenKeyExW(CONTROL_PANEL_REGISTRY_KEY, 0, KEY_READ, &hKey) == - ERROR_SUCCESS) { - DWORD type; - DWORD value; - DWORD valueSize = sizeof(value); - if (RegQueryValueExW(hKey, FONT_SMOOTHING_GAMMA_VALUE, nullptr, &type, - (LPBYTE)&value, &valueSize) == ERROR_SUCCESS && - type == REG_DWORD) { - gamma = value / 1000.0f; - } - RegCloseKey(hKey); - } } else { gfxWarning() << "Failed to create default rendering params"; } From a78ecb23ebd67742004c1943807bf179ab1704e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Emilio=20Cobos=20=C3=81lvarez?= Date: Thu, 24 Nov 2022 16:42:52 +0000 Subject: [PATCH 09/37] Bug 1801840 - Go back to using .browserStack rather than .browserContainer for tab dialog box. r=Gijs,nchevobbe This makes alert take the same area as the status panel, partially backing out the regressing bug. .browserStack is also relatively-positioned, so this works too. I think I didn't realize this while writing bug 1791972 because the rule was in a UA sheet (all s are relatively positioned, apparently). This restores the behavior when devtools is toggled vertically. On responsive mode this still covers the top toolbar, but that was the pre-existing behavior. Could be fixed in a follow-up with some z-index tweaking... Differential Revision: https://phabricator.services.mozilla.com/D162739 --- browser/base/content/browser.css | 2 +- browser/base/content/browser.js | 2 +- .../test/tabPrompts/browser_openPromptInBackgroundTab.js | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/browser/base/content/browser.css b/browser/base/content/browser.css index 9223a21ebcc3..b8cdd7effa45 100644 --- a/browser/base/content/browser.css +++ b/browser/base/content/browser.css @@ -1683,7 +1683,7 @@ toolbar[keyNav=true]:not([collapsed=true], [customizing=true]) toolbartabstop { } /* Hide tab-modal dialogs when a window-modal one is up. */ -:root[window-modal-open] .browserContainer > .dialogStack { +:root[window-modal-open] .browserStack > .dialogStack { visibility: hidden; } diff --git a/browser/base/content/browser.js b/browser/base/content/browser.js index e0beb73dea5f..481791be5c08 100644 --- a/browser/base/content/browser.js +++ b/browser/base/content/browser.js @@ -9116,7 +9116,7 @@ const SafeBrowsingNotificationBox = { */ class TabDialogBox { static _containerFor(browser) { - return browser.closest(".browserContainer, .webextension-popup-stack"); + return browser.closest(".browserStack, .webextension-popup-stack"); } constructor(browser) { diff --git a/browser/base/content/test/tabPrompts/browser_openPromptInBackgroundTab.js b/browser/base/content/test/tabPrompts/browser_openPromptInBackgroundTab.js index a63def6a599a..4280e88bef8f 100644 --- a/browser/base/content/test/tabPrompts/browser_openPromptInBackgroundTab.js +++ b/browser/base/content/test/tabPrompts/browser_openPromptInBackgroundTab.js @@ -53,7 +53,7 @@ add_task(async function test_old_modal_ui() { // switch tab back, and check the checkbox is displayed: await BrowserTestUtils.switchTab(gBrowser, openedTab); // check the prompt is there, and the extra row is present - let promptElements = openedTab.linkedBrowser.parentNode.parentNode.querySelectorAll( + let promptElements = openedTab.linkedBrowser.parentNode.querySelectorAll( "tabmodalprompt" ); is(promptElements.length, 1, "There should be 1 prompt"); @@ -164,7 +164,7 @@ add_task(async function test_new_modal_ui() { // switch tab back, and check the checkbox is displayed: await BrowserTestUtils.switchTab(gBrowser, openedTab); // check the prompt is there - let promptElements = openedTab.linkedBrowser.parentNode.parentNode.querySelectorAll( + let promptElements = openedTab.linkedBrowser.parentNode.querySelectorAll( ".content-prompt-dialog" ); From 1ed700de369d147eddbfbcf6a8e7d6828be0822f Mon Sep 17 00:00:00 2001 From: Henrik Skupin Date: Thu, 24 Nov 2022 17:09:51 +0000 Subject: [PATCH 10/37] Bug 1802103 - [remote] Disable location change rate limit. r=webdriver-reviewers,Sasha,jdescottes Differential Revision: https://phabricator.services.mozilla.com/D162826 --- remote/shared/RecommendedPreferences.sys.mjs | 3 +++ .../marionette/client/marionette_driver/geckoinstance.py | 8 ++++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/remote/shared/RecommendedPreferences.sys.mjs b/remote/shared/RecommendedPreferences.sys.mjs index b286129e91c7..44b816c7c340 100644 --- a/remote/shared/RecommendedPreferences.sys.mjs +++ b/remote/shared/RecommendedPreferences.sys.mjs @@ -178,6 +178,9 @@ const COMMON_PREFERENCES = new Map([ ["dom.max_chrome_script_run_time", 0], ["dom.max_script_run_time", 0], + // Disable location change rate limitation + ["dom.navigation.locationChangeRateLimit.count", 0], + // DOM Push ["dom.push.connection.enabled", false], diff --git a/testing/marionette/client/marionette_driver/geckoinstance.py b/testing/marionette/client/marionette_driver/geckoinstance.py index f53a835e3dd2..83ca08295852 100644 --- a/testing/marionette/client/marionette_driver/geckoinstance.py +++ b/testing/marionette/client/marionette_driver/geckoinstance.py @@ -18,14 +18,12 @@ import sys import tempfile import time import traceback - from copy import deepcopy import mozversion - -from mozprofile import Profile -from mozrunner import Runner, FennecEmulatorRunner import six +from mozprofile import Profile +from mozrunner import FennecEmulatorRunner, Runner from six import reraise from . import errors @@ -69,6 +67,8 @@ class GeckoInstance(object): # No slow script dialogs "dom.max_chrome_script_run_time": 0, "dom.max_script_run_time": 0, + # Disable location change rate limitation + "dom.navigation.locationChangeRateLimit.count": 0, # DOM Push "dom.push.connection.enabled": False, # Disable dialog abuse if alerts are triggered too quickly From b76e8951076d079be9f00252d29a864693a412f8 Mon Sep 17 00:00:00 2001 From: Valentin Gosu Date: Thu, 24 Nov 2022 17:17:35 +0000 Subject: [PATCH 11/37] Bug 1797370 - Do not access nsInputStream::mListener without holding mutex r=jesup,necko-reviewers Differential Revision: https://phabricator.services.mozilla.com/D162192 --- netwerk/base/nsInputStreamPump.cpp | 44 ++++++++++++++++-------------- netwerk/base/nsInputStreamPump.h | 4 +-- 2 files changed, 25 insertions(+), 23 deletions(-) diff --git a/netwerk/base/nsInputStreamPump.cpp b/netwerk/base/nsInputStreamPump.cpp index 678f1c9beca1..750e724aa18c 100644 --- a/netwerk/base/nsInputStreamPump.cpp +++ b/netwerk/base/nsInputStreamPump.cpp @@ -491,15 +491,15 @@ uint32_t nsInputStreamPump::OnStateStart() { } { + nsCOMPtr listener = mListener; + // We're on the writing thread + AssertOnThread(); + // Note: Must exit mutex for call to OnStartRequest to avoid // deadlocks when calls to RetargetDeliveryTo for multiple // nsInputStreamPumps are needed (e.g. nsHttpChannel). RecursiveMutexAutoUnlock unlock(mMutex); - // We're on the writing thread - MOZ_PUSH_IGNORE_THREAD_SAFETY - AssertOnThread(); - rv = mListener->OnStartRequest(this); - MOZ_POP_THREAD_SAFETY + rv = listener->OnStartRequest(this); } // an error returned from OnStartRequest should cause us to abort; however, @@ -562,6 +562,15 @@ uint32_t nsInputStreamPump::OnStateTransfer() { mStreamOffset, avail, odaAvail)); { + // We may be called on non-MainThread even if mOffMainThread is + // false, due to RetargetDeliveryTo(), so don't use AssertOnThread() + if (mTargetThread) { + MOZ_ASSERT(mTargetThread->IsOnCurrentThread()); + } else { + MOZ_ASSERT(NS_IsMainThread()); + } + + nsCOMPtr listener = mListener; // Note: Must exit mutex for call to OnStartRequest to avoid // deadlocks when calls to RetargetDeliveryTo for multiple // nsInputStreamPumps are needed (e.g. nsHttpChannel). @@ -570,16 +579,9 @@ uint32_t nsInputStreamPump::OnStateTransfer() { // mStreamOffset is only touched in OnStateTransfer, and AsyncRead // shouldn't be called during OnDataAvailable() - // We may be called on non-MainThread even if mOffMainThread is - // false, due to RetargetDeliveryTo(), so don't use AssertOnThread() MOZ_PUSH_IGNORE_THREAD_SAFETY - if (mTargetThread) { - MOZ_ASSERT(mTargetThread->IsOnCurrentThread()); - } else { - MOZ_ASSERT(NS_IsMainThread()); - } - rv = mListener->OnDataAvailable(this, mAsyncStream, mStreamOffset, - odaAvail); + rv = listener->OnDataAvailable(this, mAsyncStream, mStreamOffset, + odaAvail); MOZ_POP_THREAD_SAFETY } @@ -678,16 +680,18 @@ uint32_t nsInputStreamPump::OnStateStop() { mAsyncStream = nullptr; mIsPending = false; { + // We're on the writing thread. + // We believe that mStatus can't be changed on us here. + AssertOnThread(); + + nsCOMPtr listener = mListener; + nsresult status = mStatus; // Note: Must exit mutex for call to OnStartRequest to avoid // deadlocks when calls to RetargetDeliveryTo for multiple // nsInputStreamPumps are needed (e.g. nsHttpChannel). RecursiveMutexAutoUnlock unlock(mMutex); - // We're on the writing thread. - // We believe that mStatus can't be changed on us here. - MOZ_PUSH_IGNORE_THREAD_SAFETY - AssertOnThread(); - mListener->OnStopRequest(this, mStatus); - MOZ_POP_THREAD_SAFETY + + listener->OnStopRequest(this, status); } mTargetThread = nullptr; mListener = nullptr; diff --git a/netwerk/base/nsInputStreamPump.h b/netwerk/base/nsInputStreamPump.h index e9a3ae6b5981..7248364a6f94 100644 --- a/netwerk/base/nsInputStreamPump.h +++ b/netwerk/base/nsInputStreamPump.h @@ -80,14 +80,12 @@ class nsInputStreamPump final : public nsIInputStreamPump, nsresult CreateBufferedStreamIfNeeded() MOZ_REQUIRES(mMutex); // This should optimize away in non-DEBUG builds - MOZ_ALWAYS_INLINE void AssertOnThread() const { - MOZ_PUSH_IGNORE_THREAD_SAFETY + MOZ_ALWAYS_INLINE void AssertOnThread() const MOZ_REQUIRES(mMutex) { if (mOffMainThread) { MOZ_ASSERT(mTargetThread->IsOnCurrentThread()); } else { MOZ_ASSERT(NS_IsMainThread()); } - MOZ_POP_THREAD_SAFETY } uint32_t mState MOZ_GUARDED_BY(mMutex){STATE_IDLE}; From 5c8c82f4e50618cab72a76b66b17c5e3ed5c6fff Mon Sep 17 00:00:00 2001 From: Marco Castelluccio Date: Thu, 24 Nov 2022 17:22:21 +0000 Subject: [PATCH 12/37] Bug 1790816 - Reformat security/ with isort. r=linter-reviewers,ahal DONTBUILD # ignore-this-changeset Differential Revision: https://phabricator.services.mozilla.com/D162666 --- security/ct/tests/gtest/createSTHTestData.py | 4 ++-- security/generate_certdata.py | 3 ++- .../crtshToIdentifyingStruct.py | 13 +++++-------- security/manager/tools/getCTKnownLogs.py | 5 +++-- security/manager/tools/mach_commands.py | 7 +------ security/manager/tools/pycert.py | 11 +++++------ security/manager/tools/pycms.py | 10 +++++----- security/manager/tools/pyct.py | 4 ++-- security/manager/tools/pykey.py | 11 ++++++----- security/sandbox/test/mac_register_font.py | 5 +++-- 10 files changed, 34 insertions(+), 39 deletions(-) diff --git a/security/ct/tests/gtest/createSTHTestData.py b/security/ct/tests/gtest/createSTHTestData.py index c6fd7588e68c..ab61d4ba0eef 100755 --- a/security/ct/tests/gtest/createSTHTestData.py +++ b/security/ct/tests/gtest/createSTHTestData.py @@ -24,12 +24,12 @@ hash: The name of a hash algorithm to use when signing. Optional. Defaults to 'sha256'. """ -from pyasn1.codec.der import encoder import binascii - import os import sys +from pyasn1.codec.der import encoder + sys.path.append( os.path.join(os.path.dirname(__file__), "..", "..", "..", "manager", "tools") ) diff --git a/security/generate_certdata.py b/security/generate_certdata.py index 3dda68ec8af2..fee6b009ccf5 100644 --- a/security/generate_certdata.py +++ b/security/generate_certdata.py @@ -7,10 +7,11 @@ # This exists to paper over differences between gyp's `action` definitions # and moz.build `GENERATED_FILES` semantics. -import buildconfig import os import subprocess +import buildconfig + def main(output, *inputs): env = dict(os.environ) diff --git a/security/manager/tools/crtshToIdentifyingStruct/crtshToIdentifyingStruct.py b/security/manager/tools/crtshToIdentifyingStruct/crtshToIdentifyingStruct.py index f9748eff7ce3..05e0842e2a45 100644 --- a/security/manager/tools/crtshToIdentifyingStruct/crtshToIdentifyingStruct.py +++ b/security/manager/tools/crtshToIdentifyingStruct/crtshToIdentifyingStruct.py @@ -13,20 +13,17 @@ duplicates. Requires Python 3. """ import argparse -import re -import requests -import sys import io +import re +import sys -from pyasn1.codec.der import decoder -from pyasn1.codec.der import encoder -from pyasn1_modules import pem -from pyasn1_modules import rfc5280 - +import requests from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes from cryptography.x509.oid import NameOID +from pyasn1.codec.der import decoder, encoder +from pyasn1_modules import pem, rfc5280 assert sys.version_info >= (3, 2), "Requires Python 3.2 or later" diff --git a/security/manager/tools/getCTKnownLogs.py b/security/manager/tools/getCTKnownLogs.py index a50be4f9de14..3270f567ed16 100755 --- a/security/manager/tools/getCTKnownLogs.py +++ b/security/manager/tools/getCTKnownLogs.py @@ -15,7 +15,7 @@ https://cs.chromium.org/chromium/src/net/cert/ct_known_logs_static-inc.h """ from __future__ import print_function -from string import Template + import argparse import base64 import datetime @@ -23,9 +23,10 @@ import json import os.path import sys import textwrap -import urllib2 +from string import Template import six +import urllib2 def decodebytes(s): diff --git a/security/manager/tools/mach_commands.py b/security/manager/tools/mach_commands.py index bd1d300fd629..58b5e972d321 100644 --- a/security/manager/tools/mach_commands.py +++ b/security/manager/tools/mach_commands.py @@ -4,17 +4,12 @@ import os +from mach.decorators import Command, CommandArgument from mach.util import UserError from mozpack.files import FileFinder from mozpack.path import basedir -from mach.decorators import ( - CommandArgument, - Command, -) - - def run_module_main_on(module, input_filename): """Run the given module (pycert or pykey) on the given file.""" diff --git a/security/manager/tools/pycert.py b/security/manager/tools/pycert.py index 896c1d20336a..cac01daff74a 100755 --- a/security/manager/tools/pycert.py +++ b/security/manager/tools/pycert.py @@ -84,21 +84,20 @@ If a serial number is not explicitly specified, it is automatically generated based on the contents of the certificate. """ -from pyasn1.codec.der import decoder -from pyasn1.codec.der import encoder -from pyasn1.type import constraint, tag, univ, useful -from pyasn1_modules import rfc2459 -from struct import pack import base64 import datetime import hashlib import re import socket -import six import sys +from struct import pack import pyct import pykey +import six +from pyasn1.codec.der import decoder, encoder +from pyasn1.type import constraint, tag, univ, useful +from pyasn1_modules import rfc2459 class Error(Exception): diff --git a/security/manager/tools/pycms.py b/security/manager/tools/pycms.py index befe68e34696..1717513fdf9a 100755 --- a/security/manager/tools/pycms.py +++ b/security/manager/tools/pycms.py @@ -26,15 +26,15 @@ information). The certificate specification must come last. """ -from pyasn1.codec.der import decoder -from pyasn1.codec.der import encoder -from pyasn1.type import tag, univ -from pyasn1_modules import rfc2315, rfc2459 import base64 +import sys from io import StringIO + import pycert import pykey -import sys +from pyasn1.codec.der import decoder, encoder +from pyasn1.type import tag, univ +from pyasn1_modules import rfc2315, rfc2459 class Error(Exception): diff --git a/security/manager/tools/pyct.py b/security/manager/tools/pyct.py index 125b626fc2cc..8f9d61b72b06 100644 --- a/security/manager/tools/pyct.py +++ b/security/manager/tools/pyct.py @@ -10,13 +10,13 @@ details of a signing key, when to sign, and the certificate data to sign. Currently only supports precert_entry types. See RFC 6962. """ -from pyasn1.codec.der import encoder -from struct import pack import binascii import calendar import hashlib +from struct import pack import pykey +from pyasn1.codec.der import encoder class InvalidKeyError(Exception): diff --git a/security/manager/tools/pykey.py b/security/manager/tools/pykey.py index 05163adc3619..3f08c341de3b 100755 --- a/security/manager/tools/pykey.py +++ b/security/manager/tools/pykey.py @@ -30,17 +30,18 @@ secp384r1: an ECC key on the curve secp384r1 secp521r1: an ECC key on the curve secp521r1 """ -from pyasn1.codec.der import encoder -from pyasn1.type import univ, namedtype, tag -from pyasn1_modules import rfc2459 import base64 import binascii -import ecdsa import hashlib import math +import sys + +import ecdsa import rsa import six -import sys +from pyasn1.codec.der import encoder +from pyasn1.type import namedtype, tag, univ +from pyasn1_modules import rfc2459 # "constants" to make it easier for consumers to specify hash algorithms HASH_MD5 = "hash:md5" diff --git a/security/sandbox/test/mac_register_font.py b/security/sandbox/test/mac_register_font.py index e5996fcb9069..d537e5837609 100755 --- a/security/sandbox/test/mac_register_font.py +++ b/security/sandbox/test/mac_register_font.py @@ -11,11 +11,12 @@ Mac-specific utility command to register a font file with the OS. from __future__ import print_function -import CoreText -import Cocoa import argparse import sys +import Cocoa +import CoreText + def main(): parser = argparse.ArgumentParser() From d5ba50dc9f9dbae00de71dcd954492a5df6127ac Mon Sep 17 00:00:00 2001 From: Marco Castelluccio Date: Thu, 24 Nov 2022 17:23:27 +0000 Subject: [PATCH 13/37] Bug 1790816 - Reformat layout/ with isort. r=linter-reviewers,ahal DONTBUILD # ignore-this-changeset Differential Revision: https://phabricator.services.mozilla.com/D162669 --- layout/generic/FrameClasses.py | 2 +- layout/generic/GenerateFrameLists.py | 1 - layout/tools/reftest/mach_commands.py | 16 +++++----------- .../tools/reftest/mach_test_package_commands.py | 6 ++---- layout/tools/reftest/reftest/__init__.py | 3 ++- layout/tools/reftest/reftestcommandline.py | 2 +- layout/tools/reftest/remotereftest.py | 8 +++----- layout/tools/reftest/runreftest.py | 9 ++++----- .../reftest/selftest/test_reftest_output.py | 8 ++++---- 9 files changed, 22 insertions(+), 33 deletions(-) diff --git a/layout/generic/FrameClasses.py b/layout/generic/FrameClasses.py index 809842be62a8..61bb3e60d3fb 100644 --- a/layout/generic/FrameClasses.py +++ b/layout/generic/FrameClasses.py @@ -3,7 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Frame class definitions, used to generate FrameIdList.h and FrameTypeList.h -from FrameClass import Frame, AbstractFrame, LEAF, NOT_LEAF, DYNAMIC_LEAF +from FrameClass import DYNAMIC_LEAF, LEAF, NOT_LEAF, AbstractFrame, Frame FRAME_CLASSES = [ Frame("BRFrame", "Br", LEAF), diff --git a/layout/generic/GenerateFrameLists.py b/layout/generic/GenerateFrameLists.py index 32f8a2830849..af2c60922bcd 100644 --- a/layout/generic/GenerateFrameLists.py +++ b/layout/generic/GenerateFrameLists.py @@ -4,7 +4,6 @@ from FrameClasses import FRAME_CLASSES - HEADER = "// THIS IS AUTOGENERATED BY GenerateFrameLists.py. DO NOT EDIT\n" diff --git a/layout/tools/reftest/mach_commands.py b/layout/tools/reftest/mach_commands.py index baba9a940d0c..96595c3fe37a 100644 --- a/layout/tools/reftest/mach_commands.py +++ b/layout/tools/reftest/mach_commands.py @@ -2,22 +2,16 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from __future__ import absolute_import, unicode_literals, print_function +from __future__ import absolute_import, print_function, unicode_literals import os import re import sys from argparse import Namespace -from mozbuild.base import ( - MachCommandConditions as conditions, - MozbuildObject, -) - -from mach.decorators import ( - Command, -) - +from mach.decorators import Command +from mozbuild.base import MachCommandConditions as conditions +from mozbuild.base import MozbuildObject parser = None @@ -279,8 +273,8 @@ def _run_reftest(command_context, **kwargs): reftest.log_manager.enable_unstructured() if conditions.is_android(command_context): from mozrunner.devices.android_device import ( - verify_android_device, InstallIntent, + verify_android_device, ) install = InstallIntent.NO if kwargs.get("no_install") else InstallIntent.YES diff --git a/layout/tools/reftest/mach_test_package_commands.py b/layout/tools/reftest/mach_test_package_commands.py index 6b6f2f8fd93b..56599647c5e7 100644 --- a/layout/tools/reftest/mach_test_package_commands.py +++ b/layout/tools/reftest/mach_test_package_commands.py @@ -2,16 +2,14 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from __future__ import absolute_import, unicode_literals, print_function +from __future__ import absolute_import, print_function, unicode_literals import os import sys from argparse import Namespace from functools import partial -from mach.decorators import ( - Command, -) +from mach.decorators import Command here = os.path.abspath(os.path.dirname(__file__)) logger = None diff --git a/layout/tools/reftest/reftest/__init__.py b/layout/tools/reftest/reftest/__init__.py index 601014a6cac9..e3ed9b53e13d 100644 --- a/layout/tools/reftest/reftest/__init__.py +++ b/layout/tools/reftest/reftest/__init__.py @@ -2,11 +2,12 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from __future__ import unicode_literals, absolute_import, print_function +from __future__ import absolute_import, print_function, unicode_literals import io import os import re + import six RE_COMMENT = re.compile(r"\s+#") diff --git a/layout/tools/reftest/reftestcommandline.py b/layout/tools/reftest/reftestcommandline.py index a6aac3df791b..70958b2dc9c6 100644 --- a/layout/tools/reftest/reftestcommandline.py +++ b/layout/tools/reftest/reftestcommandline.py @@ -4,9 +4,9 @@ import argparse import os import sys from collections import OrderedDict + import mozinfo import mozlog - from six.moves.urllib.parse import urlparse here = os.path.abspath(os.path.dirname(__file__)) diff --git a/layout/tools/reftest/remotereftest.py b/layout/tools/reftest/remotereftest.py index 77a670519600..8b22fb2c7884 100644 --- a/layout/tools/reftest/remotereftest.py +++ b/layout/tools/reftest/remotereftest.py @@ -16,14 +16,12 @@ import time import traceback from contextlib import closing -from six.moves.urllib_request import urlopen - -from mozdevice import ADBDeviceFactory, RemoteProcessMonitor import mozcrash - +import reftestcommandline +from mozdevice import ADBDeviceFactory, RemoteProcessMonitor from output import OutputHandler from runreftest import RefTest, ReftestResolver, build_obj -import reftestcommandline +from six.moves.urllib_request import urlopen # We need to know our current directory so that we can serve our test files from it. SCRIPT_DIRECTORY = os.path.abspath(os.path.realpath(os.path.dirname(__file__))) diff --git a/layout/tools/reftest/runreftest.py b/layout/tools/reftest/runreftest.py index b77e8567da2d..052f037693f5 100644 --- a/layout/tools/reftest/runreftest.py +++ b/layout/tools/reftest/runreftest.py @@ -5,8 +5,6 @@ """ Runs the reftest test harness. """ -from __future__ import print_function - from __future__ import absolute_import, print_function import json @@ -37,9 +35,10 @@ import mozlog import mozprocess import mozprofile import mozrunner -from manifestparser import TestManifest, filters as mpf +from manifestparser import TestManifest +from manifestparser import filters as mpf from mozrunner.utils import get_stack_fixer_function, test_environment -from mozscreenshot import printstatus, dump_screen +from mozscreenshot import dump_screen, printstatus from six import reraise, string_types from six.moves import range @@ -57,8 +56,8 @@ except ImportError as e: # noqa Marionette = reraise_ -from output import OutputHandler, ReftestFormatter import reftestcommandline +from output import OutputHandler, ReftestFormatter here = os.path.abspath(os.path.dirname(__file__)) diff --git a/layout/tools/reftest/selftest/test_reftest_output.py b/layout/tools/reftest/selftest/test_reftest_output.py index 15cffa925bd8..1ca48d42cdb5 100644 --- a/layout/tools/reftest/selftest/test_reftest_output.py +++ b/layout/tools/reftest/selftest/test_reftest_output.py @@ -12,14 +12,14 @@ try: except ImportError: # Python3 from io import StringIO + from functools import partial import mozunit import pytest -from moztest.selftest.output import get_mozharness_status, filter_action - -from mozharness.base.log import INFO, WARNING, ERROR -from mozharness.mozilla.automation import TBPL_SUCCESS, TBPL_WARNING, TBPL_FAILURE +from mozharness.base.log import ERROR, INFO, WARNING +from mozharness.mozilla.automation import TBPL_FAILURE, TBPL_SUCCESS, TBPL_WARNING +from moztest.selftest.output import filter_action, get_mozharness_status here = os.path.abspath(os.path.dirname(__file__)) get_mozharness_status = partial(get_mozharness_status, "reftest") From ecb54845b00d02adabb05fd4e88965ad6038dd2e Mon Sep 17 00:00:00 2001 From: Marco Castelluccio Date: Thu, 24 Nov 2022 17:23:47 +0000 Subject: [PATCH 14/37] Bug 1790816 - Reformat build/ with isort. r=linter-reviewers,ahal DONTBUILD # ignore-this-changeset Differential Revision: https://phabricator.services.mozilla.com/D162668 --- build/RunCbindgen.py | 8 +++++--- build/build-clang/build-clang.py | 15 +++++++-------- build/buildconfig.py | 3 ++- build/cargo-linker | 1 - build/checksums.py | 2 +- build/clang-plugin/ThreadAllows.py | 1 - build/clang-plugin/import_mozilla_checks.py | 6 +++--- build/compare-mozconfig/compare-mozconfigs.py | 2 +- build/gen_symverscript.py | 1 + build/gen_test_packages_manifest.py | 1 - build/mach_initialize.py | 5 +---- build/midl.py | 5 +++-- build/pgo/genpgocert.py | 6 +++--- build/pgo/profileserver.py | 8 ++++---- build/rust/mozbuild/generate_buildconfig.py | 5 +++-- build/unix/rewrite_sanitizer_dylib.py | 5 +++-- build/upload.py | 2 +- build/upload_generated_sources.py | 17 +++++++++-------- build/valgrind/mach_commands.py | 13 ++++--------- build/vs/generate_yaml.py | 8 ++++---- build/vs/pack_vs.py | 12 +++++------- build/win32/autowinchecksec.py | 3 ++- build/win32/dummy_libs.py | 1 + 23 files changed, 63 insertions(+), 67 deletions(-) diff --git a/build/RunCbindgen.py b/build/RunCbindgen.py index 4c5bc8ce6edb..835bea5621c6 100644 --- a/build/RunCbindgen.py +++ b/build/RunCbindgen.py @@ -3,12 +3,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import print_function + +import os +import subprocess + import buildconfig import mozpack.path as mozpath -import os -import six -import subprocess import pytoml +import six # Try to read the package name or otherwise assume same name as the crate path. diff --git a/build/build-clang/build-clang.py b/build/build-clang/build-clang.py index ec22b3057856..0e856e9574fb 100755 --- a/build/build-clang/build-clang.py +++ b/build/build-clang/build-clang.py @@ -6,21 +6,20 @@ # Only necessary for flake8 to be happy... from __future__ import print_function -import os -import os.path -import shutil -import subprocess -import platform -import json import argparse +import errno import fnmatch import glob -import errno +import json +import os +import os.path +import platform import re +import shutil +import subprocess import sys import tarfile from contextlib import contextmanager - from shutil import which import zstandard diff --git a/build/buildconfig.py b/build/buildconfig.py index 3fcc82ffdb5d..527690c2b0e8 100644 --- a/build/buildconfig.py +++ b/build/buildconfig.py @@ -3,8 +3,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. import sys -from mozbuild.base import MozbuildObject + from mozbuild.backend.configenvironment import PartialConfigEnvironment +from mozbuild.base import MozbuildObject config = MozbuildObject.from_environment() partial_config = PartialConfigEnvironment(config.topobjdir) diff --git a/build/cargo-linker b/build/cargo-linker index e7546365c381..94b05f821394 100755 --- a/build/cargo-linker +++ b/build/cargo-linker @@ -27,7 +27,6 @@ import os import sys - SANITIZERS = { "asan": "address", "hwasan": "hwaddress", diff --git a/build/checksums.py b/build/checksums.py index d82c7211175f..970e44d80323 100755 --- a/build/checksums.py +++ b/build/checksums.py @@ -5,10 +5,10 @@ from __future__ import with_statement -from optparse import OptionParser import hashlib import logging import os +from optparse import OptionParser logger = logging.getLogger("checksums.py") diff --git a/build/clang-plugin/ThreadAllows.py b/build/clang-plugin/ThreadAllows.py index 782f32c006fe..f3e1ee894c8e 100644 --- a/build/clang-plugin/ThreadAllows.py +++ b/build/clang-plugin/ThreadAllows.py @@ -4,7 +4,6 @@ import json import os import posixpath - from os import PathLike # `typing.Literal` not available until Python 3.8; diff --git a/build/clang-plugin/import_mozilla_checks.py b/build/clang-plugin/import_mozilla_checks.py index f18abfae647f..d573dafcf1bf 100755 --- a/build/clang-plugin/import_mozilla_checks.py +++ b/build/clang-plugin/import_mozilla_checks.py @@ -3,10 +3,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -import os -import glob -import shutil import errno +import glob +import os +import shutil import ThirdPartyPaths import ThreadAllows diff --git a/build/compare-mozconfig/compare-mozconfigs.py b/build/compare-mozconfig/compare-mozconfigs.py index 7e39d9c071c5..7a46d61d7623 100644 --- a/build/compare-mozconfig/compare-mozconfigs.py +++ b/build/compare-mozconfig/compare-mozconfigs.py @@ -7,9 +7,9 @@ from __future__ import unicode_literals +import difflib import logging import os -import difflib import unittest import buildconfig diff --git a/build/gen_symverscript.py b/build/gen_symverscript.py index f32554abc800..d1a5abd07d4e 100644 --- a/build/gen_symverscript.py +++ b/build/gen_symverscript.py @@ -5,6 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. import sys + from mozbuild.preprocessor import Preprocessor diff --git a/build/gen_test_packages_manifest.py b/build/gen_test_packages_manifest.py index e57bff3dd206..162f4e84209a 100644 --- a/build/gen_test_packages_manifest.py +++ b/build/gen_test_packages_manifest.py @@ -5,7 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. import json - from argparse import ArgumentParser ALL_HARNESSES = [ diff --git a/build/mach_initialize.py b/build/mach_initialize.py index 84da39f6fb29..13d5cac1483a 100644 --- a/build/mach_initialize.py +++ b/build/mach_initialize.py @@ -20,10 +20,8 @@ if sys.version_info[0] < 3: else: from importlib.abc import MetaPathFinder - from types import ModuleType - STATE_DIR_FIRST_RUN = """ Mach and the build system store shared state in a common directory on the filesystem. The following directory will be created: @@ -145,7 +143,7 @@ def initialize(topsrcdir): ) ] - from mach.util import setenv, get_state_dir + from mach.util import get_state_dir, setenv state_dir = _create_state_dir() @@ -157,7 +155,6 @@ def initialize(topsrcdir): import mach.base import mach.main - from mach.main import MachCommandReference # Centralized registry of available mach commands diff --git a/build/midl.py b/build/midl.py index 3480e8df3ad1..463a161113ad 100644 --- a/build/midl.py +++ b/build/midl.py @@ -2,12 +2,13 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -import buildconfig +import os import shutil import subprocess -import os import sys +import buildconfig + def relativize(path, base=None): # For absolute path in Unix builds, we need relative paths because diff --git a/build/pgo/genpgocert.py b/build/pgo/genpgocert.py index 98b05355bfb6..e3d2c4c88f63 100644 --- a/build/pgo/genpgocert.py +++ b/build/pgo/genpgocert.py @@ -7,18 +7,18 @@ # certificates used for SSL testing in Mochitest. The already generated # certs are located at $topsrcdir/build/pgo/certs/ . -import mozinfo import os import random import re import shutil import subprocess import sys +from distutils.spawn import find_executable -from mozbuild.base import MozbuildObject, BinaryNotFoundException +import mozinfo +from mozbuild.base import BinaryNotFoundException, MozbuildObject from mozfile import NamedTemporaryFile, TemporaryDirectory from mozprofile.permissions import ServerLocations -from distutils.spawn import find_executable dbFiles = [ re.compile("^cert[0-9]+\.db$"), diff --git a/build/pgo/profileserver.py b/build/pgo/profileserver.py index 3b112562ad03..94f54cbd1736 100755 --- a/build/pgo/profileserver.py +++ b/build/pgo/profileserver.py @@ -4,19 +4,19 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +import glob import json import os -import sys -import glob import subprocess +import sys import mozcrash -from mozbuild.base import MozbuildObject, BinaryNotFoundException +from mozbuild.base import BinaryNotFoundException, MozbuildObject from mozfile import TemporaryDirectory from mozhttpd import MozHttpd from mozprofile import FirefoxProfile, Preferences from mozprofile.permissions import ServerLocations -from mozrunner import FirefoxRunner, CLI +from mozrunner import CLI, FirefoxRunner from six import string_types PORT = 8888 diff --git a/build/rust/mozbuild/generate_buildconfig.py b/build/rust/mozbuild/generate_buildconfig.py index c5f8cce668f5..255135ae81a2 100644 --- a/build/rust/mozbuild/generate_buildconfig.py +++ b/build/rust/mozbuild/generate_buildconfig.py @@ -2,9 +2,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -import buildconfig -import textwrap import string +import textwrap + +import buildconfig def generate_bool(name): diff --git a/build/unix/rewrite_sanitizer_dylib.py b/build/unix/rewrite_sanitizer_dylib.py index 405b3a97c9aa..2fa8a92b27f2 100644 --- a/build/unix/rewrite_sanitizer_dylib.py +++ b/build/unix/rewrite_sanitizer_dylib.py @@ -2,13 +2,14 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from argparse import ArgumentParser import os -from pathlib import Path import re import shutil import subprocess import sys +from argparse import ArgumentParser +from pathlib import Path + from buildconfig import substs """ diff --git a/build/upload.py b/build/upload.py index 1a9a69c971fb..75eb163b24f0 100644 --- a/build/upload.py +++ b/build/upload.py @@ -15,9 +15,9 @@ # to indicate that files should be uploaded including their paths relative # to the base path. -import sys import os import shutil +import sys from optparse import OptionParser diff --git a/build/upload_generated_sources.py b/build/upload_generated_sources.py index 15439565d91c..abfef92e3cec 100644 --- a/build/upload_generated_sources.py +++ b/build/upload_generated_sources.py @@ -6,22 +6,23 @@ from __future__ import absolute_import, print_function, unicode_literals import argparse -from contextlib import contextmanager import gzip import io import logging +import os +import sys +import tarfile +import time +from contextlib import contextmanager +from threading import Event, Thread + +import requests from mozbuild.generated_sources import ( get_filename_with_digest, get_s3_region_and_bucket, ) -import os -from six.moves.queue import Queue -import requests -import sys -import tarfile from requests.packages.urllib3.util.retry import Retry -from threading import Event, Thread -import time +from six.moves.queue import Queue # Arbitrary, should probably measure this. NUM_WORKER_THREADS = 10 diff --git a/build/valgrind/mach_commands.py b/build/valgrind/mach_commands.py index 74d5b05b5396..c3b8eab49db6 100644 --- a/build/valgrind/mach_commands.py +++ b/build/valgrind/mach_commands.py @@ -6,18 +6,13 @@ from __future__ import absolute_import, unicode_literals import json import logging -import mozinfo import os import time -from mach.decorators import ( - Command, - CommandArgument, -) -from mozbuild.base import ( - MachCommandConditions as conditions, - BinaryNotFoundException, -) +import mozinfo +from mach.decorators import Command, CommandArgument +from mozbuild.base import BinaryNotFoundException +from mozbuild.base import MachCommandConditions as conditions def is_valgrind_build(cls): diff --git a/build/vs/generate_yaml.py b/build/vs/generate_yaml.py index c2c5af24bde5..2d6a429d3ead 100755 --- a/build/vs/generate_yaml.py +++ b/build/vs/generate_yaml.py @@ -3,17 +3,17 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +import sys + +import yaml from vsdownload import ( getArgsParser, getManifest, getPackages, getSelectedPackages, - setPackageSelection, lowercaseIgnores, + setPackageSelection, ) -import sys -import yaml - if __name__ == "__main__": parser = getArgsParser() diff --git a/build/vs/pack_vs.py b/build/vs/pack_vs.py index 9ad7f0a36d30..978b3840175b 100755 --- a/build/vs/pack_vs.py +++ b/build/vs/pack_vs.py @@ -3,17 +3,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from vsdownload import ( - downloadPackages, - extractPackages, -) -from pathlib import Path -from tempfile import TemporaryDirectory -from zstandard import ZstdCompressor import argparse import os import tarfile +from pathlib import Path +from tempfile import TemporaryDirectory + import yaml +from vsdownload import downloadPackages, extractPackages +from zstandard import ZstdCompressor def tzstd_path(path): diff --git a/build/win32/autowinchecksec.py b/build/win32/autowinchecksec.py index 1a394977dc14..5038dc56a68d 100644 --- a/build/win32/autowinchecksec.py +++ b/build/win32/autowinchecksec.py @@ -7,11 +7,12 @@ # run the Winchecksec tool (https://github.com/trailofbits/winchecksec) # against a given Windows binary. -import buildconfig import json import subprocess import sys +import buildconfig + # usage if len(sys.argv) != 2: print("""usage : autowinchecksec.by path_to_binary""") diff --git a/build/win32/dummy_libs.py b/build/win32/dummy_libs.py index 93faeef429b3..16947c6c846c 100644 --- a/build/win32/dummy_libs.py +++ b/build/win32/dummy_libs.py @@ -4,6 +4,7 @@ import os import subprocess + from buildconfig import substs From ad9df8b8db5de3ab96b12746336a44e36d66b29d Mon Sep 17 00:00:00 2001 From: Geoff Brown Date: Thu, 24 Nov 2022 17:44:44 +0000 Subject: [PATCH 15/37] Bug 1802337 - Bump mozcrash version to 2.2.0; r=releng-reviewers,jlorenzo Note that version 2.1.0 was released to pypi.org, but there is no record of that bump in mercurial or bugzilla. Differential Revision: https://phabricator.services.mozilla.com/D163006 --- testing/mozbase/mozcrash/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/testing/mozbase/mozcrash/setup.py b/testing/mozbase/mozcrash/setup.py index 4df75e5a9f7d..f3fb252a2039 100644 --- a/testing/mozbase/mozcrash/setup.py +++ b/testing/mozbase/mozcrash/setup.py @@ -7,7 +7,7 @@ from __future__ import absolute_import from setuptools import setup PACKAGE_NAME = "mozcrash" -PACKAGE_VERSION = "2.0.0" +PACKAGE_VERSION = "2.2.0" # dependencies deps = ["mozfile >= 1.0", "mozlog >= 6.0"] From c2bbc1bf264135cfa41edd17f5e8253dcd17d5d5 Mon Sep 17 00:00:00 2001 From: Barret Rennie Date: Thu, 24 Nov 2022 17:50:02 +0000 Subject: [PATCH 16/37] Bug 1798732 - Port osfile.jsm usage to IOUtils in FirefoxProfileMigrator.sys.mjs r=Gijs Differential Revision: https://phabricator.services.mozilla.com/D162948 --- .../migration/FirefoxProfileMigrator.sys.mjs | 21 +++++++------------ 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/browser/components/migration/FirefoxProfileMigrator.sys.mjs b/browser/components/migration/FirefoxProfileMigrator.sys.mjs index d387ba102a3c..7b39d68bc238 100644 --- a/browser/components/migration/FirefoxProfileMigrator.sys.mjs +++ b/browser/components/migration/FirefoxProfileMigrator.sys.mjs @@ -28,7 +28,6 @@ ChromeUtils.defineModuleGetter( "SessionMigration", "resource:///modules/sessionstore/SessionMigration.jsm" ); -ChromeUtils.defineModuleGetter(lazy, "OS", "resource://gre/modules/osfile.jsm"); export function FirefoxProfileMigrator() { this.wrappedJSObject = this; // for testing... @@ -243,32 +242,28 @@ FirefoxProfileMigrator.prototype._getResourcesInternal = function( // if we can, copy it to the new profile and set sync's username pref // (which acts as a de-facto flag to indicate if sync is configured) try { - let oldPath = lazy.OS.Path.join( + let oldPath = PathUtils.join( sourceProfileDir.path, "signedInUser.json" ); - let exists = await lazy.OS.File.exists(oldPath); + let exists = await IOUtils.exists(oldPath); if (exists) { - let raw = await lazy.OS.File.read(oldPath, { encoding: "utf-8" }); - let data = JSON.parse(raw); + let data = await IOUtils.readJSON(oldPath); if (data && data.accountData && data.accountData.email) { let username = data.accountData.email; // copy the file itself. - await lazy.OS.File.copy( + await IOUtils.copy( oldPath, - lazy.OS.Path.join(currentProfileDir.path, "signedInUser.json") + PathUtils.join(currentProfileDir.path, "signedInUser.json") ); // Now we need to know whether Sync is actually configured for this // user. The only way we know is by looking at the prefs file from // the old profile. We avoid trying to do a full parse of the prefs // file and even avoid parsing the single string value we care // about. - let prefsPath = lazy.OS.Path.join( - sourceProfileDir.path, - "prefs.js" - ); - if (await lazy.OS.File.exists(oldPath)) { - let rawPrefs = await lazy.OS.File.read(prefsPath, { + let prefsPath = PathUtils.join(sourceProfileDir.path, "prefs.js"); + if (await IOUtils.exists(oldPath)) { + let rawPrefs = await IOUtils.readUTF8(prefsPath, { encoding: "utf-8", }); if (/^user_pref\("services\.sync\.username"/m.test(rawPrefs)) { From f567bfbd5b9af74a208d8122d39a5579ab7f4dc1 Mon Sep 17 00:00:00 2001 From: Barret Rennie Date: Thu, 24 Nov 2022 17:50:19 +0000 Subject: [PATCH 17/37] Bug 1798731 - Port osfile.jsm usage to IOUtils in EdgeProfileMigrator.sys.mjs r=Gijs Differential Revision: https://phabricator.services.mozilla.com/D162949 --- .../components/migration/EdgeProfileMigrator.sys.mjs | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/browser/components/migration/EdgeProfileMigrator.sys.mjs b/browser/components/migration/EdgeProfileMigrator.sys.mjs index 0644dcf73921..e0d47eddf5ac 100644 --- a/browser/components/migration/EdgeProfileMigrator.sys.mjs +++ b/browser/components/migration/EdgeProfileMigrator.sys.mjs @@ -4,7 +4,6 @@ import { AppConstants } from "resource://gre/modules/AppConstants.sys.mjs"; -const { OS } = ChromeUtils.import("resource://gre/modules/osfile.jsm"); import { XPCOMUtils } from "resource://gre/modules/XPCOMUtils.sys.mjs"; import { @@ -506,7 +505,7 @@ EdgeProfileMigrator.prototype.getLastUsedDate = async function() { if (sourceProfiles !== null || !lazy.gEdgeDatabase) { return Promise.resolve(new Date(0)); } - let logFilePath = OS.Path.join( + let logFilePath = PathUtils.join( lazy.gEdgeDatabase.parent.path, "LogFiles", "edb.log" @@ -517,11 +516,9 @@ EdgeProfileMigrator.prototype.getLastUsedDate = async function() { ); let cookiePaths = cookieMigrator._cookiesFolders.map(f => f.path); let datePromises = [logFilePath, dbPath, ...cookiePaths].map(path => { - return OS.File.stat(path) - .catch(() => null) - .then(info => { - return info ? info.lastModificationDate : 0; - }); + return IOUtils.stat(path) + .then(info => info.lastModified) + .catch(() => 0); }); datePromises.push( new Promise(resolve => { From a31caf082579f50e488714c4e9cf314f37de79df Mon Sep 17 00:00:00 2001 From: Chris H-C Date: Thu, 24 Nov 2022 17:59:21 +0000 Subject: [PATCH 18/37] Bug 1802207 - Wait to send `component_init`-reason "newtab" ping until newtab has been categorized r=nanj Differential Revision: https://phabricator.services.mozilla.com/D162906 --- browser/components/newtab/lib/TelemetryFeed.jsm | 9 +++------ browser/components/newtab/pings.yaml | 3 ++- .../newtab/test/browser/browser_newtab_ping.js | 4 ++-- .../newtab/test/unit/lib/TelemetryFeed.test.js | 14 +++++++------- 4 files changed, 14 insertions(+), 16 deletions(-) diff --git a/browser/components/newtab/lib/TelemetryFeed.jsm b/browser/components/newtab/lib/TelemetryFeed.jsm index 39daa4af5847..20206e4ac5cf 100644 --- a/browser/components/newtab/lib/TelemetryFeed.jsm +++ b/browser/components/newtab/lib/TelemetryFeed.jsm @@ -1059,6 +1059,9 @@ class TelemetryFeed { } Glean.newtab.newtabCategory.set(newtabCategory); Glean.newtab.homepageCategory.set(homePageCategory); + if (lazy.NimbusFeatures.glean.getVariable("newtabPingEnabled") ?? true) { + GleanPings.newtab.submit("component_init"); + } } } @@ -1275,12 +1278,6 @@ class TelemetryFeed { } setNewtabPrefMetrics(); Glean.pocket.isSignedIn.set(lazy.pktApi.isUserLoggedIn()); - if ( - this.telemetryEnabled && - (lazy.NimbusFeatures.glean.getVariable("newtabPingEnabled") ?? true) - ) { - GleanPings.newtab.submit("component_init"); - } } uninit() { diff --git a/browser/components/newtab/pings.yaml b/browser/components/newtab/pings.yaml index 80b690097e1b..d9cf6151a492 100644 --- a/browser/components/newtab/pings.yaml +++ b/browser/components/newtab/pings.yaml @@ -15,7 +15,8 @@ newtab: The newtab visit ended. Could be by navigation, being closed, etc. component_init: | - The newtab component init'd. + The newtab component init'd, + and the newtab and homepage settings have been categorized. This is mostly to ensure we hear at least once from clients configured to not show a newtab UI. include_client_id: true diff --git a/browser/components/newtab/test/browser/browser_newtab_ping.js b/browser/components/newtab/test/browser/browser_newtab_ping.js index 039bcb7a514b..f8d642a2a8c2 100644 --- a/browser/components/newtab/test/browser/browser_newtab_ping.js +++ b/browser/components/newtab/test/browser/browser_newtab_ping.js @@ -199,7 +199,7 @@ add_task(async function test_newtab_doesnt_send_nimbus() { await SpecialPowers.popPrefEnv(); }); -add_task(async function test_newtab_init_sends_ping() { +add_task(async function test_newtab_categorization_sends_ping() { await SpecialPowers.pushPrefEnv({ set: [["browser.newtabpage.activity-stream.telemetry", true]], }); @@ -214,7 +214,7 @@ add_task(async function test_newtab_init_sends_ping() { pingSent = true; Assert.equal(reason, "component_init"); }); - TelemetryFeed.init(); // INIT action doesn't happen by default. + await TelemetryFeed.sendPageTakeoverData(); Assert.ok(pingSent, "ping was sent"); await SpecialPowers.popPrefEnv(); diff --git a/browser/components/newtab/test/unit/lib/TelemetryFeed.test.js b/browser/components/newtab/test/unit/lib/TelemetryFeed.test.js index 544b784ac1d6..aa381cbbdd99 100644 --- a/browser/components/newtab/test/unit/lib/TelemetryFeed.test.js +++ b/browser/components/newtab/test/unit/lib/TelemetryFeed.test.js @@ -181,13 +181,6 @@ describe("TelemetryFeed", () => { assert.calledWithExactly(stub, "unload", instance.handleEvent); assert.calledWithExactly(stub, "TabPinned", instance.handleEvent); }); - it("should send a 'newtab' ping", () => { - instance._prefs.set(TELEMETRY_PREF, true); - sandbox.spy(GleanPings.newtab, "submit"); - instance.init(); - assert.calledOnce(GleanPings.newtab.submit); - assert.calledWithExactly(GleanPings.newtab.submit, "component_init"); - }); describe("telemetry pref changes from false to true", () => { beforeEach(() => { FakePrefs.prototype.prefs = {}; @@ -1725,6 +1718,13 @@ describe("TelemetryFeed", () => { assert.calledOnce(Glean.newtab.homepageCategory.set); assert.calledWith(Glean.newtab.homepageCategory.set, "disabled"); }); + it("should send a 'newtab' ping", async () => { + instance._prefs.set(TELEMETRY_PREF, true); + sandbox.spy(GleanPings.newtab, "submit"); + await instance.sendPageTakeoverData(); + assert.calledOnce(GleanPings.newtab.submit); + assert.calledWithExactly(GleanPings.newtab.submit, "component_init"); + }); }); describe("#sendDiscoveryStreamImpressions", () => { it("should not send impression pings if there is no impression data", () => { From 2af8b20ef59bcad84185ef7c9ab90ad7e1fd9f7c Mon Sep 17 00:00:00 2001 From: Chris H-C Date: Thu, 24 Nov 2022 18:00:15 +0000 Subject: [PATCH 19/37] Bug 1800081 - Report the legacy telemetry client_id with application lifetime r=perry.mcmanis Having the value present in more than just the first ping of an app session should help cross-system analyses. Differential Revision: https://phabricator.services.mozilla.com/D162883 --- toolkit/components/telemetry/metrics.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/toolkit/components/telemetry/metrics.yaml b/toolkit/components/telemetry/metrics.yaml index 4b92b6ff1738..a03170e266b0 100644 --- a/toolkit/components/telemetry/metrics.yaml +++ b/toolkit/components/telemetry/metrics.yaml @@ -14,6 +14,7 @@ $tags: legacy.telemetry: client_id: type: uuid + lifetime: application description: | The client_id according to Telemetry. Might not always have a value due to being too early for it to have From ed5352a0d88458b62cb7e9dc4599c3c14b1c3289 Mon Sep 17 00:00:00 2001 From: Andrew Halberstadt Date: Thu, 24 Nov 2022 18:02:09 +0000 Subject: [PATCH 20/37] Bug 1795042 - [ci] Migrate 'updatebot' tasks from AWS -> GCP, r=MasterWayZ Differential Revision: https://phabricator.services.mozilla.com/D162996 --- taskcluster/ci/updatebot/kind.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/taskcluster/ci/updatebot/kind.yml b/taskcluster/ci/updatebot/kind.yml index f4809297d66b..1cbdbaaa77c1 100644 --- a/taskcluster/ci/updatebot/kind.yml +++ b/taskcluster/ci/updatebot/kind.yml @@ -22,7 +22,7 @@ jobs: platform: updatebot/all symbol: cron tier: 1 - worker-type: b-linux + worker-type: b-linux-gcp worker: docker-image: {in-tree: updatebot} max-run-time: 3600 From 161a4eb3e856c5ad95a19c8d3d46d102f4ec752f Mon Sep 17 00:00:00 2001 From: Andrew Osmond Date: Thu, 24 Nov 2022 18:20:20 +0000 Subject: [PATCH 21/37] Bug 1798494. r=jfkthame Differential Revision: https://phabricator.services.mozilla.com/D162616 --- gfx/thebes/gfxUserFontSet.cpp | 131 +++++++++++++++++++++------------- gfx/thebes/gfxUserFontSet.h | 28 +++++--- layout/style/FontFaceImpl.cpp | 64 +++++++++++++++-- layout/style/FontFaceImpl.h | 30 ++++++-- 4 files changed, 183 insertions(+), 70 deletions(-) diff --git a/gfx/thebes/gfxUserFontSet.cpp b/gfx/thebes/gfxUserFontSet.cpp index a0981d991f0e..f9f12aeb1d6e 100644 --- a/gfx/thebes/gfxUserFontSet.cpp +++ b/gfx/thebes/gfxUserFontSet.cpp @@ -38,8 +38,8 @@ mozilla::LogModule* gfxUserFontSet::GetUserFontsLog() { static Atomic sFontSetGeneration(0); gfxUserFontEntry::gfxUserFontEntry( - gfxUserFontSet* aFontSet, const nsTArray& aFontFaceSrcList, - WeightRange aWeight, StretchRange aStretch, SlantStyleRange aStyle, + const nsTArray& aFontFaceSrcList, WeightRange aWeight, + StretchRange aStretch, SlantStyleRange aStyle, const nsTArray& aFeatureSettings, const nsTArray& aVariationSettings, uint32_t aLanguageOverride, gfxCharacterMap* aUnicodeRanges, @@ -52,8 +52,7 @@ gfxUserFontEntry::gfxUserFontEntry( mSeenLocalSource(false), mUnsupportedFormat(false), mFontDisplay(aFontDisplay), - mLoader(nullptr), - mFontSet(aFontSet) { + mLoader(nullptr) { mIsUserFontContainer = true; mSrcList = aFontFaceSrcList.Clone(); mCurrentSrcIndex = 0; @@ -410,6 +409,15 @@ static bool IgnorePrincipal(gfxFontSrcURI* aURI) { } void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { + RefPtr fontSet = GetUserFontSet(); + if (NS_WARN_IF(!fontSet)) { + LOG(("userfonts (%p) failed expired font set for (%s)\n", fontSet.get(), + mFamilyName.get())); + mFontDataLoadingState = LOADING_FAILED; + SetLoadState(STATUS_FAILED); + return; + } + uint32_t numSrc = mSrcList.Length(); // load each src entry in turn, until a local face is found @@ -425,7 +433,7 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { gfxFontEntry* fe = nullptr; if (!pfl->IsFontFamilyWhitelistActive()) { fe = gfxPlatform::GetPlatform()->LookupLocalFont( - mFontSet->GetPresContext(), currSrc.mLocalName, Weight(), Stretch(), + fontSet->GetPresContext(), currSrc.mLocalName, Weight(), Stretch(), SlantStyle()); // Note that we've attempted a local lookup, even if it failed, // as this means we are dependent on any updates to the font list. @@ -440,8 +448,8 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { } if (fe) { LOG(("userfonts (%p) [src %d] loaded local: (%s) for (%s) gen: %8.8x\n", - mFontSet, mCurrentSrcIndex, currSrc.mLocalName.get(), - mFamilyName.get(), uint32_t(mFontSet->mGeneration))); + fontSet.get(), mCurrentSrcIndex, currSrc.mLocalName.get(), + mFamilyName.get(), uint32_t(fontSet->mGeneration))); fe->mFeatureSettings.AppendElements(mFeatureSettings); fe->mVariationSettings.AppendElements(mVariationSettings); fe->mLanguageOverride = mLanguageOverride; @@ -462,8 +470,9 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { currSrc.mSourceType + 1); return; } else { - LOG(("userfonts (%p) [src %d] failed local: (%s) for (%s)\n", mFontSet, - mCurrentSrcIndex, currSrc.mLocalName.get(), mFamilyName.get())); + LOG(("userfonts (%p) [src %d] failed local: (%s) for (%s)\n", + fontSet.get(), mCurrentSrcIndex, currSrc.mLocalName.get(), + mFamilyName.get())); } } @@ -494,7 +503,7 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { LOG( ("userfonts (%p) [src %d] " "loaded uri from cache: (%s) for (%s)\n", - mFontSet, mCurrentSrcIndex, + fontSet.get(), mCurrentSrcIndex, currSrc.mURI->GetSpecOrDefault().get(), mFamilyName.get())); } return; @@ -510,7 +519,7 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { // record the principal we should use for the load for use when // creating a channel and when caching the loaded entry. - mPrincipal = currSrc.LoadPrincipal(*mFontSet); + mPrincipal = currSrc.LoadPrincipal(*fontSet); bool loadDoesntSpin = !aForceAsync && currSrc.mURI->SyncLoadIsOK(); @@ -520,7 +529,7 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { // sync load font immediately nsresult rv = - mFontSet->SyncLoadFontData(this, &currSrc, buffer, bufferLength); + fontSet->SyncLoadFontData(this, &currSrc, buffer, bufferLength); if (NS_SUCCEEDED(rv) && LoadPlatformFontSync(mCurrentSrcIndex, buffer, bufferLength)) { @@ -529,26 +538,26 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { currSrc.mSourceType + 1); return; } else { - mFontSet->LogMessage(this, mCurrentSrcIndex, "font load failed", - nsIScriptError::errorFlag, rv); + fontSet->LogMessage(this, mCurrentSrcIndex, "font load failed", + nsIScriptError::errorFlag, rv); } } else { // otherwise load font async - nsresult rv = mFontSet->StartLoad(this, mCurrentSrcIndex); + nsresult rv = fontSet->StartLoad(this, mCurrentSrcIndex); bool loadOK = NS_SUCCEEDED(rv); if (loadOK) { if (LOG_ENABLED()) { LOG(("userfonts (%p) [src %d] loading uri: (%s) for (%s)\n", - mFontSet, mCurrentSrcIndex, + fontSet.get(), mCurrentSrcIndex, currSrc.mURI->GetSpecOrDefault().get(), mFamilyName.get())); } return; } else { - mFontSet->LogMessage(this, mCurrentSrcIndex, - "failed to start download", - nsIScriptError::errorFlag, rv); + fontSet->LogMessage(this, mCurrentSrcIndex, + "failed to start download", + nsIScriptError::errorFlag, rv); } } } else { @@ -577,8 +586,8 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { currSrc.mSourceType + 1); return; } else { - mFontSet->LogMessage(this, mCurrentSrcIndex, "font load failed", - nsIScriptError::errorFlag); + fontSet->LogMessage(this, mCurrentSrcIndex, "font load failed", + nsIScriptError::errorFlag); } } @@ -586,12 +595,12 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { } if (mUnsupportedFormat) { - mFontSet->LogMessage(this, mCurrentSrcIndex, "no supported format found", - nsIScriptError::warningFlag); + fontSet->LogMessage(this, mCurrentSrcIndex, "no supported format found", + nsIScriptError::warningFlag); } // all src's failed; mark this entry as unusable (so fallback will occur) - LOG(("userfonts (%p) failed all src for (%s)\n", mFontSet, + LOG(("userfonts (%p) failed all src for (%s)\n", fontSet.get(), mFamilyName.get())); mFontDataLoadingState = LOADING_FAILED; SetLoadState(STATUS_FAILED); @@ -658,22 +667,27 @@ bool gfxUserFontEntry::LoadPlatformFont(uint32_t aSrcIndex, uint32_t aSanitizedLength, nsTArray&& aMessages) { MOZ_ASSERT(NS_IsMainThread()); + RefPtr fontSet = GetUserFontSet(); + if (NS_WARN_IF(!fontSet)) { + free((void*)aOriginalFontData); + return false; + } for (const auto& msg : aMessages) { - mFontSet->LogMessage(this, aSrcIndex, msg.mMessage.get(), - msg.mLevel > 0 ? nsIScriptError::warningFlag - : nsIScriptError::errorFlag); + fontSet->LogMessage(this, aSrcIndex, msg.mMessage.get(), + msg.mLevel > 0 ? nsIScriptError::warningFlag + : nsIScriptError::errorFlag); } if (!aSanitizedFontData) { - mFontSet->LogMessage(this, aSrcIndex, "rejected by sanitizer"); + fontSet->LogMessage(this, aSrcIndex, "rejected by sanitizer"); } else { // Check whether aSanitizedFontData is a known OpenType format; it might be // a TrueType Collection, which OTS would accept but we don't yet // know how to handle. If so, discard. if (gfxFontUtils::DetermineFontDataType( aSanitizedFontData, aSanitizedLength) != GFX_USERFONT_OPENTYPE) { - mFontSet->LogMessage(this, aSrcIndex, "not a supported OpenType format"); + fontSet->LogMessage(this, aSrcIndex, "not a supported OpenType format"); free((void*)aSanitizedFontData); aSanitizedFontData = nullptr; } @@ -721,7 +735,7 @@ bool gfxUserFontEntry::LoadPlatformFont(uint32_t aSrcIndex, mName, Weight(), Stretch(), SlantStyle(), aSanitizedFontData, aSanitizedLength); if (!fe) { - mFontSet->LogMessage(this, aSrcIndex, "not usable by platform"); + fontSet->LogMessage(this, aSrcIndex, "not usable by platform"); } } @@ -755,15 +769,15 @@ bool gfxUserFontEntry::LoadPlatformFont(uint32_t aSrcIndex, fe->mDescentOverride = mDescentOverride; fe->mLineGapOverride = mLineGapOverride; fe->mSizeAdjust = mSizeAdjust; - StoreUserFontData(fe, aSrcIndex, mFontSet->GetPrivateBrowsing(), + StoreUserFontData(fe, aSrcIndex, fontSet->GetPrivateBrowsing(), originalFullName, &metadata, metaOrigLen, compression); if (LOG_ENABLED()) { LOG(( "userfonts (%p) [src %d] loaded uri: (%s) for (%s) " "(%p) gen: %8.8x compress: %d%%\n", - mFontSet, aSrcIndex, + fontSet.get(), aSrcIndex, mSrcList[aSrcIndex].mURI->GetSpecOrDefault().get(), mFamilyName.get(), - this, uint32_t(mFontSet->mGeneration), fontCompressionRatio)); + this, uint32_t(fontSet->mGeneration), fontCompressionRatio)); } mPlatformFontEntry = fe; SetLoadState(STATUS_LOADED); @@ -773,7 +787,7 @@ bool gfxUserFontEntry::LoadPlatformFont(uint32_t aSrcIndex, LOG( ("userfonts (%p) [src %d] failed uri: (%s) for (%s)" " error making platform font\n", - mFontSet, aSrcIndex, + fontSet.get(), aSrcIndex, mSrcList[aSrcIndex].mURI->GetSpecOrDefault().get(), mFamilyName.get())); } @@ -830,14 +844,17 @@ void gfxUserFontEntry::FontDataDownloadComplete( return; } - // download failed or font-display timeout passed - if (mFontDataLoadingState == LOADING_TIMED_OUT) { - mFontSet->LogMessage(this, aSrcIndex, - "font-display timeout, webfont not used", - nsIScriptError::infoFlag, aDownloadStatus); - } else { - mFontSet->LogMessage(this, aSrcIndex, "download failed", - nsIScriptError::errorFlag, aDownloadStatus); + RefPtr fontSet = GetUserFontSet(); + if (fontSet) { + // download failed or font-display timeout passed + if (mFontDataLoadingState == LOADING_TIMED_OUT) { + fontSet->LogMessage(this, aSrcIndex, + "font-display timeout, webfont not used", + nsIScriptError::infoFlag, aDownloadStatus); + } else { + fontSet->LogMessage(this, aSrcIndex, "download failed", + nsIScriptError::errorFlag, aDownloadStatus); + } } if (aFontData) { @@ -860,8 +877,12 @@ void gfxUserFontEntry::LoadPlatformFontAsync( // We hold a strong reference to the gfxUserFontSet during this work, since // the document might be closed while we are OMT, and release it at the end // of ContinuePlatformFontLoadOnMainThread. + // + // If the set has already been freed, then the loading will fail when we + // resume on the main thread. - mFontSet->AddRef(); + MOZ_ASSERT(!mLoadingFontSet); + mLoadingFontSet = GetUserFontSet(); nsCOMPtr event = NewRunnableMethodRelease(); // for the AddRef in LoadPlatformFontAsync + // Set in LoadPlatformFontAsync. If it is null, then the font set should have + // already been freed and we would not succeed in loading the font. + MOZ_ASSERT_IF(loaded, mLoadingFontSet); + mLoadingFontSet = nullptr; } void gfxUserFontEntry::FontLoadFailed(nsIFontLoadCompleteCallback* aCallback) { @@ -919,7 +943,10 @@ void gfxUserFontEntry::FontLoadFailed(nsIFontLoadCompleteCallback* aCallback) { void gfxUserFontEntry::GetUserFontSets( nsTArray>& aResult) { aResult.Clear(); - aResult.AppendElement(mFontSet); + RefPtr fontSet = GetUserFontSet(); + if (fontSet) { + aResult.AppendElement(std::move(fontSet)); + } } gfxUserFontSet::gfxUserFontSet() @@ -1260,19 +1287,23 @@ void gfxUserFontSet::UserFontCache::ForgetFont(gfxFontEntry* aFontEntry) { gfxFontEntry* gfxUserFontSet::UserFontCache::GetFont( const gfxFontFaceSrc& aSrc, const gfxUserFontEntry& aUserFontEntry) { - if (!sUserFonts || aUserFontEntry.mFontSet->BypassCache() || + if (!sUserFonts || Preferences::GetBool("gfx.downloadable_fonts.disable_cache")) { return nullptr; } + RefPtr srcFontSet = aUserFontEntry.GetUserFontSet(); + if (NS_WARN_IF(!srcFontSet) || srcFontSet->BypassCache()) { + return nullptr; + } + // Ignore principal when looking up a data: URI. RefPtr principal = - IgnorePrincipal(aSrc.mURI) ? nullptr - : aSrc.LoadPrincipal(*aUserFontEntry.mFontSet); + IgnorePrincipal(aSrc.mURI) ? nullptr : aSrc.LoadPrincipal(*srcFontSet); Entry* entry = sUserFonts->GetEntry( Key(aSrc.mURI, principal, const_cast(&aUserFontEntry), - aUserFontEntry.mFontSet->GetPrivateBrowsing())); + srcFontSet->GetPrivateBrowsing())); if (!entry) { return nullptr; } @@ -1280,7 +1311,7 @@ gfxFontEntry* gfxUserFontSet::UserFontCache::GetFont( // We have to perform another content policy check here to prevent // cache poisoning. E.g. a.com loads a font into the cache but // b.com has a CSP not allowing any fonts to be loaded. - if (!aUserFontEntry.mFontSet->IsFontLoadAllowed(aSrc)) { + if (!srcFontSet->IsFontLoadAllowed(aSrc)) { return nullptr; } diff --git a/gfx/thebes/gfxUserFontSet.h b/gfx/thebes/gfxUserFontSet.h index 43b5063b78e7..c7b12c002768 100644 --- a/gfx/thebes/gfxUserFontSet.h +++ b/gfx/thebes/gfxUserFontSet.h @@ -56,7 +56,7 @@ enum class StyleFontDisplay : uint8_t; } // namespace mozilla class nsFontFaceLoader; -//#define DEBUG_USERFONT_CACHE +// #define DEBUG_USERFONT_CACHE class gfxFontFaceBufferSource { NS_INLINE_DECL_THREADSAFE_REFCOUNTING(gfxFontFaceBufferSource) @@ -561,7 +561,6 @@ class gfxUserFontEntry : public gfxFontEntry { }; gfxUserFontEntry( - gfxUserFontSet* aFontSet, const nsTArray& aFontFaceSrcList, WeightRange aWeight, StretchRange aStretch, SlantStyleRange aStyle, const nsTArray& aFeatureSettings, @@ -603,6 +602,8 @@ class gfxUserFontEntry : public gfxFontEntry { UserFontLoadState LoadState() const { return mUserFontLoadState; } void LoadCanceled() { + MOZ_ASSERT(NS_IsMainThread()); + mUserFontLoadState = STATUS_NOT_LOADED; mFontDataLoadingState = NOT_LOADING; mLoader = nullptr; @@ -647,8 +648,16 @@ class gfxUserFontEntry : public gfxFontEntry { // methods to expose some information to FontFaceSet::UserFontSet // since we can't make that class a friend - void SetLoader(nsFontFaceLoader* aLoader) { mLoader = aLoader; } - nsFontFaceLoader* GetLoader() const { return mLoader; } + void SetLoader(nsFontFaceLoader* aLoader) { + MOZ_ASSERT(NS_IsMainThread()); + mLoader = aLoader; + } + + nsFontFaceLoader* GetLoader() const { + MOZ_ASSERT(NS_IsMainThread()); + return mLoader; + } + gfxFontSrcPrincipal* GetPrincipal() const { return mPrincipal; } void GetFamilyNameAndURIForLogging(uint32_t aSrcIndex, nsACString& aFamilyName, nsACString& aURI); @@ -658,9 +667,7 @@ class gfxUserFontEntry : public gfxFontEntry { return nullptr; } -#ifdef DEBUG - gfxUserFontSet* GetUserFontSet() const { return mFontSet; } -#endif + virtual already_AddRefed GetUserFontSet() const = 0; const nsTArray& SourceList() const { return mSrcList; } @@ -752,8 +759,8 @@ class gfxUserFontEntry : public gfxFontEntry { uint32_t aMetaOrigLen, uint8_t aCompression); // Clears and then adds to aResult all of the user font sets that this user - // font entry has been added to. This will at least include mFontSet, the - // owner of this user font entry. + // font entry has been added to. This will at least include the owner of this + // user font entry. virtual void GetUserFontSets(nsTArray>& aResult); // Calls IncrementGeneration() on all user font sets that contain this @@ -789,8 +796,7 @@ class gfxUserFontEntry : public gfxFontEntry { // Cancel() methods of nsFontFaceLoader this reference is nulled out. nsFontFaceLoader* MOZ_NON_OWNING_REF mLoader; // current loader for this entry, if any - gfxUserFontSet* MOZ_NON_OWNING_REF - mFontSet; // font-set which owns this userfont entry + RefPtr mLoadingFontSet; RefPtr mPrincipal; }; diff --git a/layout/style/FontFaceImpl.cpp b/layout/style/FontFaceImpl.cpp index 87686e11f1b8..7b9d44bd73a1 100644 --- a/layout/style/FontFaceImpl.cpp +++ b/layout/style/FontFaceImpl.cpp @@ -519,14 +519,12 @@ void FontFaceImpl::SetUserFontEntry(gfxUserFontEntry* aEntry) { } if (mUserFontEntry) { - MutexAutoLock lock(mUserFontEntry->mMutex); - mUserFontEntry->mFontFaces.RemoveElement(this); + mUserFontEntry->RemoveFontFace(this); } auto* entry = static_cast(aEntry); if (entry) { - MutexAutoLock lock(entry->mMutex); - entry->mFontFaces.AppendElement(this); + entry->AddFontFace(this); } mUserFontEntry = entry; @@ -535,7 +533,7 @@ void FontFaceImpl::SetUserFontEntry(gfxUserFontEntry* aEntry) { return; } - MOZ_ASSERT(mUserFontEntry->GetUserFontSet() == mFontFaceSet, + MOZ_ASSERT(mUserFontEntry->HasUserFontSet(mFontFaceSet), "user font entry must be associated with the same user font set " "as the FontFace"); @@ -700,6 +698,11 @@ void FontFaceImpl::RemoveFontFaceSet(FontFaceSetImpl* aFontFaceSet) { } else { mOtherFontFaceSets.RemoveElement(aFontFaceSet); } + + // The caller should be holding a strong reference to the FontFaceSetImpl. + if (mUserFontEntry) { + mUserFontEntry->CheckUserFontSet(); + } } gfxCharacterMap* FontFaceImpl::GetUnicodeRangeAsCharacterMap() { @@ -766,6 +769,11 @@ void FontFaceImpl::Entry::GetUserFontSets( MutexAutoLock lock(mMutex); aResult.Clear(); + + if (mFontSet) { + aResult.AppendElement(mFontSet); + } + for (FontFaceImpl* f : mFontFaces) { if (f->mInFontFaceSet) { aResult.AppendElement(f->mFontFaceSet); @@ -781,6 +789,40 @@ void FontFaceImpl::Entry::GetUserFontSets( aResult.TruncateLength(it - aResult.begin()); } +/* virtual */ already_AddRefed +FontFaceImpl::Entry::GetUserFontSet() const { + MutexAutoLock lock(mMutex); + if (mFontSet) { + return do_AddRef(mFontSet); + } + if (NS_IsMainThread() && mLoadingFontSet) { + return do_AddRef(mLoadingFontSet); + } + return nullptr; +} + +void FontFaceImpl::Entry::CheckUserFontSetLocked() { + // If this is the last font containing a strong reference to the set, we need + // to clear the reference as there is no longer anything guaranteeing the set + // will be kept alive. + if (mFontSet) { + auto* set = static_cast(mFontSet); + for (FontFaceImpl* f : mFontFaces) { + if (f->mFontFaceSet == set || f->mOtherFontFaceSets.Contains(set)) { + return; + } + } + } + + // If possible, promote the most recently added FontFace and its owning + // FontFaceSetImpl as the primary set. + if (!mFontFaces.IsEmpty()) { + mFontSet = mFontFaces.LastElement()->mFontFaceSet; + } else { + mFontSet = nullptr; + } +} + void FontFaceImpl::Entry::FindFontFaceOwners(nsTHashSet& aOwners) { MutexAutoLock lock(mMutex); for (FontFaceImpl* f : mFontFaces) { @@ -790,5 +832,17 @@ void FontFaceImpl::Entry::FindFontFaceOwners(nsTHashSet& aOwners) { } } +void FontFaceImpl::Entry::AddFontFace(FontFaceImpl* aFontFace) { + MutexAutoLock lock(mMutex); + mFontFaces.AppendElement(aFontFace); + CheckUserFontSetLocked(); +} + +void FontFaceImpl::Entry::RemoveFontFace(FontFaceImpl* aFontFace) { + MutexAutoLock lock(mMutex); + mFontFaces.RemoveElement(aFontFace); + CheckUserFontSetLocked(); +} + } // namespace dom } // namespace mozilla diff --git a/layout/style/FontFaceImpl.h b/layout/style/FontFaceImpl.h index bb97a46cc1ec..eb5bd52395c6 100644 --- a/layout/style/FontFaceImpl.h +++ b/layout/style/FontFaceImpl.h @@ -56,19 +56,41 @@ class FontFaceImpl final { StyleFontDisplay aFontDisplay, RangeFlags aRangeFlags, float aAscentOverride, float aDescentOverride, float aLineGapOverride, float aSizeAdjust) - : gfxUserFontEntry(aFontSet, aFontFaceSrcList, aWeight, aStretch, - aStyle, aFeatureSettings, aVariationSettings, + : gfxUserFontEntry(aFontFaceSrcList, aWeight, aStretch, aStyle, + aFeatureSettings, aVariationSettings, aLanguageOverride, aUnicodeRanges, aFontDisplay, aRangeFlags, aAscentOverride, aDescentOverride, aLineGapOverride, aSizeAdjust), - mMutex("FontFaceImpl::Entry::mMutex") {} + mMutex("FontFaceImpl::Entry::mMutex"), + mFontSet(aFontSet) {} void SetLoadState(UserFontLoadState aLoadState) override; void GetUserFontSets(nsTArray>& aResult) override; + already_AddRefed GetUserFontSet() const override; + + void CheckUserFontSet() { + MutexAutoLock lock(mMutex); + CheckUserFontSetLocked(); + } + +#ifdef DEBUG + bool HasUserFontSet(gfxUserFontSet* aFontSet) const { + MutexAutoLock lock(mMutex); + return mFontSet == aFontSet; + } +#endif + + void AddFontFace(FontFaceImpl* aOwner); + void RemoveFontFace(FontFaceImpl* aOwner); void FindFontFaceOwners(nsTHashSet& aOwners); protected: - Mutex mMutex; + void CheckUserFontSetLocked() MOZ_REQUIRES(mMutex); + + mutable Mutex mMutex; + + // Font set which owns this entry; + gfxUserFontSet* MOZ_NON_OWNING_REF mFontSet; // The FontFace objects that use this user font entry. We need to store // an array of these, not just a single pointer, since the user font From bc3e16afed1059d12733051293b46c4c3be20ab0 Mon Sep 17 00:00:00 2001 From: Mike Conley Date: Thu, 24 Nov 2022 18:51:18 +0000 Subject: [PATCH 22/37] Bug 1800967 - Update internal OverflowableToolbar state when unpinning an extension button from the toolbar when overflowed. r=willdurand,Gijs We have some internal bookkeeping within OverflowableToolbar to remember the state of things that have overflowed, like how wide the window needs to be before they can be moved back, etc. When an item is removed from an overflowable toolbar while overflowed, we update that internal bookkeeping so that OverflowableToolbar doesn't accidentally try to move those items back into the toolbar when the window becomes wide enough again. We've added a new overflow list for extension buttons, but we weren't updating our internal accounting when items had been overflowed into that list. This patch fixes that. Differential Revision: https://phabricator.services.mozilla.com/D162434 --- .../customizableui/CustomizableUI.jsm | 55 +++++++++++-------- ...unified_extensions_overflowable_toolbar.js | 44 ++++++++++++++- 2 files changed, 74 insertions(+), 25 deletions(-) diff --git a/browser/components/customizableui/CustomizableUI.jsm b/browser/components/customizableui/CustomizableUI.jsm index b3f76d8acfbf..aa809760c503 100644 --- a/browser/components/customizableui/CustomizableUI.jsm +++ b/browser/components/customizableui/CustomizableUI.jsm @@ -5614,7 +5614,8 @@ class OverflowableToolbar { /** * Allows callers to query for the current parent of a toolbar item that may - * or may not be overflowed. That parent will either be #defaultList or #target. + * or may not be overflowed. That parent will either be #defaultList, + * #webExtList (if it's an extension button) or #target. * * Note: It is assumed that the caller has verified that aNode is placed * within the toolbar customizable area according to CustomizableUI. @@ -5625,7 +5626,9 @@ class OverflowableToolbar { */ getContainerFor(aNode) { if (aNode.getAttribute("overflowedItem") == "true") { - return this.#defaultList; + return CustomizableUI.isWebExtensionWidget(aNode.id) + ? this.#webExtList + : this.#defaultList; } return this.#target; } @@ -6003,6 +6006,17 @@ class OverflowableToolbar { return this.#webExtList; } + /** + * Returns true if aNode is not null and is one of either this.#webExtList or + * this.#defaultList. + * + * @param {DOMElement} aNode The node to test. + * @returns {boolean} + */ + #isOverflowList(aNode) { + return aNode && (aNode == this.#defaultList || aNode == this.#webExtList); + } + /** * Private event handlers start here. */ @@ -6075,30 +6089,25 @@ class OverflowableToolbar { // moved or removed from an area via the CustomizableUI API while // overflowed. It reorganizes the internal state of this OverflowableToolbar // to handle that change. - if ( - !this.#enabled || - (aContainer != this.#target && aContainer != this.#defaultList) - ) { + if (!this.#enabled || !this.#isOverflowList(aContainer)) { return; } // When we (re)move an item, update all the items that come after it in the list // with the minsize *of the item before the to-be-removed node*. This way, we // ensure that we try to move items back as soon as that's possible. - if (aNode.parentNode == this.#defaultList) { - let updatedMinSize; - if (aNode.previousElementSibling) { - updatedMinSize = this.#overflowedInfo.get( - aNode.previousElementSibling.id - ); - } else { - // Force (these) items to try to flow back into the bar: - updatedMinSize = 1; - } - let nextItem = aNode.nextElementSibling; - while (nextItem) { - this.#overflowedInfo.set(nextItem.id, updatedMinSize); - nextItem = nextItem.nextElementSibling; - } + let updatedMinSize; + if (aNode.previousElementSibling) { + updatedMinSize = this.#overflowedInfo.get( + aNode.previousElementSibling.id + ); + } else { + // Force (these) items to try to flow back into the bar: + updatedMinSize = 1; + } + let nextItem = aNode.nextElementSibling; + while (nextItem) { + this.#overflowedInfo.set(nextItem.id, updatedMinSize); + nextItem = nextItem.nextElementSibling; } } @@ -6109,12 +6118,12 @@ class OverflowableToolbar { // causes overflow or underflow of the toolbar. if ( !this.#enabled || - (aContainer != this.#target && aContainer != this.#defaultList) + (aContainer != this.#target && !this.#isOverflowList(aContainer)) ) { return; } - let nowOverflowed = aNode.parentNode == this.#defaultList; + let nowOverflowed = this.#isOverflowList(aNode.parentNode); let wasOverflowed = this.#overflowedInfo.has(aNode.id); // If this wasn't overflowed before... diff --git a/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js b/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js index 662d25df9d61..f89553613b02 100644 --- a/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js +++ b/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js @@ -10,6 +10,10 @@ loadTestSubscript("head_unified_extensions.js"); +const { ExtensionCommon } = ChromeUtils.import( + "resource://gre/modules/ExtensionCommon.jsm" +); + const NUM_EXTENSIONS = 5; const OVERFLOW_WINDOW_WIDTH_PX = 450; const DEFAULT_WIDGET_IDS = [ @@ -85,11 +89,13 @@ function getVisibleMenuItems(popup) { * {Element} unifiedExtensionList: The DOM element that holds overflowed * WebExtension browser_actions when Unified Extensions is enabled. * {string[]} extensionIDs: The IDs of the test WebExtensions. + * @param {Function} afterUnderflowFn An optional async function that will be run + * once the toolbar underflows, before the extensions are removed. * * The function is expected to return a Promise that does not resolve * with anything. */ -async function withWindowOverflowed(win, taskFn) { +async function withWindowOverflowed(win, taskFn, afterUnderflowFn) { const doc = win.document; doc.documentElement.removeAttribute("persist"); const navbar = doc.getElementById(CustomizableUI.AREA_NAVBAR); @@ -277,7 +283,13 @@ async function withWindowOverflowed(win, taskFn) { .hasAttribute("overflowedItem"); }); - await Promise.all(extensions.map(extension => extension.unload())); + try { + if (afterUnderflowFn) { + await afterUnderflowFn(); + } + } finally { + await Promise.all(extensions.map(extension => extension.unload())); + } } } @@ -394,6 +406,7 @@ async function verifyExtensionWidget(win, widget, unifiedExtensionsEnabled) { */ add_task(async function test_overflowable_toolbar() { let win = await promiseEnableUnifiedExtensions(); + let movedNode; await withWindowOverflowed( win, @@ -423,6 +436,33 @@ add_task(async function test_overflowable_toolbar() { ); await verifyExtensionWidget(win, child, true); } + + let extensionWidgetID = `${ExtensionCommon.makeWidgetId( + extensionIDs.at(-1) + )}-browser-action`; + movedNode = CustomizableUI.getWidget(extensionWidgetID).forWindow(win) + .node; + Assert.equal(movedNode.getAttribute("cui-areatype"), "toolbar"); + + CustomizableUI.addWidgetToArea( + extensionWidgetID, + CustomizableUI.AREA_ADDONS + ); + + Assert.equal( + movedNode.getAttribute("cui-areatype"), + "panel", + "The moved browser action button should have the right cui-areatype set." + ); + }, + async () => { + // Ensure that the moved node's parent is still the add-ons panel. + Assert.equal( + movedNode.parentElement.id, + CustomizableUI.AREA_ADDONS, + "The browser action should still be in the addons panel" + ); + CustomizableUI.addWidgetToArea(movedNode.id, CustomizableUI.AREA_NAVBAR); } ); From 5c458d3f99e99f4a7c62d5e45e33d72163b6e1f2 Mon Sep 17 00:00:00 2001 From: Mike Conley Date: Thu, 24 Nov 2022 18:51:18 +0000 Subject: [PATCH 23/37] Bug 1798377 - Don't display the overflow button unless there's at least 1 item overflowed with non-zero width. r=Gijs Differential Revision: https://phabricator.services.mozilla.com/D162576 --- .../customizableui/CustomizableUI.jsm | 23 +++- .../customizableui/test/browser.ini | 1 + .../test/browser_hidden_widget_overflow.js | 122 ++++++++++++++++++ 3 files changed, 143 insertions(+), 3 deletions(-) create mode 100644 browser/components/customizableui/test/browser_hidden_widget_overflow.js diff --git a/browser/components/customizableui/CustomizableUI.jsm b/browser/components/customizableui/CustomizableUI.jsm index aa809760c503..1aa07872b33a 100644 --- a/browser/components/customizableui/CustomizableUI.jsm +++ b/browser/components/customizableui/CustomizableUI.jsm @@ -5267,6 +5267,11 @@ class OverflowableToolbar { */ #overflowedInfo = new Map(); + /** + * The set of overflowed DOM nodes that were hidden at the time of overflowing. + */ + #hiddenOverflowedNodes = new WeakSet(); + /** * True if the overflowable toolbar is actively handling overflows and * underflows. This value is set internally by the private #enable() and @@ -5668,6 +5673,13 @@ class OverflowableToolbar { if (child.getAttribute("overflows") != "false") { this.#overflowedInfo.set(child.id, targetContentWidth); + let { width: childWidth } = win.windowUtils.getBoundsWithoutFlushing( + child + ); + if (!childWidth) { + this.#hiddenOverflowedNodes.add(child); + } + child.setAttribute("overflowedItem", true); CustomizableUIInternal.ensureButtonContextMenu( child, @@ -5693,7 +5705,7 @@ class OverflowableToolbar { child, this.#defaultList.firstElementChild ); - if (!CustomizableUI.isSpecialWidget(child.id)) { + if (!CustomizableUI.isSpecialWidget(child.id) && childWidth) { this.#toolbar.setAttribute("overflowing", "true"); } } @@ -5889,8 +5901,13 @@ class OverflowableToolbar { win.UpdateUrlbarSearchSplitterState(); let defaultListItems = Array.from(this.#defaultList.children); - let collapsedWidgetIds = defaultListItems.map(item => item.id); - if (collapsedWidgetIds.every(w => CustomizableUI.isSpecialWidget(w))) { + if ( + defaultListItems.every( + item => + CustomizableUI.isSpecialWidget(item.id) || + this.#hiddenOverflowedNodes.has(item) + ) + ) { this.#toolbar.removeAttribute("overflowing"); } } diff --git a/browser/components/customizableui/test/browser.ini b/browser/components/customizableui/test/browser.ini index 299f8855c373..5a132dcc1a1f 100644 --- a/browser/components/customizableui/test/browser.ini +++ b/browser/components/customizableui/test/browser.ini @@ -141,6 +141,7 @@ tags = overflowable-toolbar https_first_disabled = true [browser_flexible_space_area.js] [browser_help_panel_cloning.js] +[browser_hidden_widget_overflow.js] [browser_history_after_appMenu.js] [browser_history_recently_closed.js] [browser_history_recently_closed_middleclick.js] diff --git a/browser/components/customizableui/test/browser_hidden_widget_overflow.js b/browser/components/customizableui/test/browser_hidden_widget_overflow.js new file mode 100644 index 000000000000..c4adced590be --- /dev/null +++ b/browser/components/customizableui/test/browser_hidden_widget_overflow.js @@ -0,0 +1,122 @@ +/* Any copyright is dedicated to the Public Domain. + http://creativecommons.org/publicdomain/zero/1.0/ */ + +"use strict"; + +/** + * Tests that if only hidden widgets are overflowed that the + * OverflowableToolbar won't show the overflow panel anchor. + */ + +const kHiddenButtonID = "fake-hidden-button"; +const kDisplayNoneButtonID = "display-none-button"; +const kWebExtensionButtonID1 = "fake-webextension-button-1"; +const kWebExtensionButtonID2 = "fake-webextension-button-2"; +let gWin = null; + +add_setup(async function() { + await SpecialPowers.pushPrefEnv({ + set: [["extensions.unifiedExtensions.enabled", true]], + }); + + gWin = await BrowserTestUtils.openNewBrowserWindow(); + + // To make it easier to write a test where we can control overflowing + // for a test that can run in a bunch of environments with slightly + // different rules on when things will overflow, we'll go ahead and + // just remove everything removable from the nav-bar by default. Then + // we'll add our hidden item, and a single WebExtension item, and + // force toolbar overflow. + let widgetIDs = CustomizableUI.getWidgetIdsInArea(CustomizableUI.AREA_NAVBAR); + for (let widgetID of widgetIDs) { + if (CustomizableUI.isWidgetRemovable(widgetID)) { + CustomizableUI.removeWidgetFromArea(widgetID); + } + } + + CustomizableUI.createWidget({ + id: kWebExtensionButtonID1, + label: "Test WebExtension widget 1", + defaultArea: CustomizableUI.AREA_NAVBAR, + webExtension: true, + }); + + CustomizableUI.createWidget({ + id: kWebExtensionButtonID2, + label: "Test WebExtension widget 2", + defaultArea: CustomizableUI.AREA_NAVBAR, + webExtension: true, + }); + + // Let's force the WebExtension widgets to be significantly wider. This + // just makes it easier to ensure that both of these (which are to the left + // of the hidden widget) get overflowed. + for (let webExtID of [kWebExtensionButtonID1, kWebExtensionButtonID2]) { + let webExtNode = CustomizableUI.getWidget(webExtID).forWindow(gWin).node; + webExtNode.style.width = "100px"; + } + + CustomizableUI.createWidget({ + id: kHiddenButtonID, + label: "Test hidden=true widget", + defaultArea: CustomizableUI.AREA_NAVBAR, + }); + + // Now hide the button with hidden=true so that it has no dimensions. + let hiddenButtonNode = CustomizableUI.getWidget(kHiddenButtonID).forWindow( + gWin + ).node; + hiddenButtonNode.hidden = true; + + CustomizableUI.createWidget({ + id: kDisplayNoneButtonID, + label: "Test display:none widget", + defaultArea: CustomizableUI.AREA_NAVBAR, + }); + + // Now hide the button with display: none so that it has no dimensions. + let displayNoneButtonNode = CustomizableUI.getWidget( + kDisplayNoneButtonID + ).forWindow(gWin).node; + displayNoneButtonNode.style.display = "none"; + + registerCleanupFunction(async () => { + CustomizableUI.destroyWidget(kWebExtensionButtonID1); + CustomizableUI.destroyWidget(kWebExtensionButtonID2); + CustomizableUI.destroyWidget(kHiddenButtonID); + CustomizableUI.destroyWidget(kDisplayNoneButtonID); + await BrowserTestUtils.closeWindow(gWin); + await CustomizableUI.reset(); + }); +}); + +add_task(async function test_hidden_widget_overflow() { + gWin.resizeTo(kForceOverflowWidthPx, window.outerHeight); + + // Wait until the left-most fake WebExtension button is overflowing. + let webExtNode = CustomizableUI.getWidget(kWebExtensionButtonID1).forWindow( + gWin + ).node; + await BrowserTestUtils.waitForMutationCondition( + webExtNode, + { attributes: true }, + () => { + return webExtNode.hasAttribute("overflowedItem"); + } + ); + + let hiddenButtonNode = CustomizableUI.getWidget(kHiddenButtonID).forWindow( + gWin + ).node; + Assert.ok( + hiddenButtonNode.hasAttribute("overflowedItem"), + "Hidden button should be overflowed." + ); + + let overflowButton = gWin.document.getElementById("nav-bar-overflow-button"); + + Assert.ok( + !BrowserTestUtils.is_visible(overflowButton), + "Overflow panel button should be hidden." + ); +}); From 62b8b0976b94e7bbf30a7b9f3cc7d4f3b10e2704 Mon Sep 17 00:00:00 2001 From: Mike Conley Date: Thu, 24 Nov 2022 18:51:19 +0000 Subject: [PATCH 24/37] Bug 1801678 - Refactor OverflowableToolbar to turn #webExtList into a memoizing getter. r=Gijs The WebExtension overflow list is created lazily because the panel itself is lazy, and only gets inserted the first time it is needed. Originally, I had written a separate function `#getWebExtList()` to do the work of forcing the WebExtension overflow list to de-lazify, and then hold a reference to it in #webExtList. It turns out, that's a bit of a footgun. More than once, I've used #webExtList directly, forgetting that the value might turn out to be `null` because the list hasn't delazified yet. In order to smooth out that rough edge, this patch makes `#webExtList` a memoizing (evalutes once, caches the return value for subsequent calls) getter so that anything that uses it doesn't have to worry about the lazification. The reference member is now called #webExtListRef, and I've added documentation about how it's probably best not to use it directly. Differential Revision: https://phabricator.services.mozilla.com/D162585 --- .../customizableui/CustomizableUI.jsm | 29 +++++++++++-------- .../customizableui/test/browser.ini | 2 ++ .../components/customizableui/test/head.js | 2 ++ 3 files changed, 21 insertions(+), 12 deletions(-) diff --git a/browser/components/customizableui/CustomizableUI.jsm b/browser/components/customizableui/CustomizableUI.jsm index 1aa07872b33a..043f6296622d 100644 --- a/browser/components/customizableui/CustomizableUI.jsm +++ b/browser/components/customizableui/CustomizableUI.jsm @@ -5308,11 +5308,12 @@ class OverflowableToolbar { /** * A reference to the the element that overflowed extension browser action * toolbar items will be appended to as children upon overflow if the - * Unified Extension UI is enabled. + * Unified Extension UI is enabled. This is created lazily and might be null, + * so you should use the #webExtList memoizing getter instead to get this. * - * @type {Element} + * @type {Element|null} */ - #webExtList = null; + #webExtListRef = null; /** * An empty object that is created in #checkOverflow to identify individual @@ -5665,7 +5666,7 @@ class OverflowableToolbar { return; } - let webExtList = this.#getWebExtList(); + let webExtList = this.#webExtList; let child = this.#target.lastElementChild; while (child && isOverflowing) { @@ -6011,16 +6012,20 @@ class OverflowableToolbar { * buttons should go to if the Unified Extensions UI is enabled, or null * if no such list exists. */ - #getWebExtList() { - if (!this.#webExtList) { + get #webExtList() { + if (!this.#webExtListRef) { let targetID = this.#toolbar.getAttribute("addon-webext-overflowtarget"); - if (targetID) { - let win = this.#toolbar.ownerGlobal; - let { panel } = win.gUnifiedExtensions; - this.#webExtList = panel.querySelector(`#${targetID}`); + if (!targetID) { + throw new Error( + "addon-webext-overflowtarget was not defined on the " + + `overflowable toolbar with id: ${this.#toolbar.id}` + ); } + let win = this.#toolbar.ownerGlobal; + let { panel } = win.gUnifiedExtensions; + this.#webExtListRef = panel.querySelector(`#${targetID}`); } - return this.#webExtList; + return this.#webExtListRef; } /** @@ -6031,7 +6036,7 @@ class OverflowableToolbar { * @returns {boolean} */ #isOverflowList(aNode) { - return aNode && (aNode == this.#defaultList || aNode == this.#webExtList); + return aNode == this.#defaultList || aNode == this.#webExtList; } /** diff --git a/browser/components/customizableui/test/browser.ini b/browser/components/customizableui/test/browser.ini index 5a132dcc1a1f..1c1d856e7af6 100644 --- a/browser/components/customizableui/test/browser.ini +++ b/browser/components/customizableui/test/browser.ini @@ -83,10 +83,12 @@ tags = overflowable-toolbar skip-if = verify [browser_972267_customizationchange_events.js] [browser_976792_insertNodeInWindow.js] +tags = overflowable-toolbar skip-if = os == "linux" [browser_978084_dragEnd_after_move.js] skip-if = verify [browser_980155_add_overflow_toolbar.js] +tags = overflowable-toolbar skip-if = verify [browser_981305_separator_insertion.js] [browser_981418-widget-onbeforecreated-handler.js] diff --git a/browser/components/customizableui/test/head.js b/browser/components/customizableui/test/head.js index 9d5c52a41334..3f606abe6af3 100644 --- a/browser/components/customizableui/test/head.js +++ b/browser/components/customizableui/test/head.js @@ -94,6 +94,8 @@ function createOverflowableToolbarWithPlacements(id, placements) { tb.setAttribute("default-overflowpanel", overflowPanel.id); tb.setAttribute("default-overflowtarget", overflowList.id); tb.setAttribute("default-overflowbutton", chevron.id); + tb.setAttribute("addon-webext-overflowbutton", "unified-extensions-button"); + tb.setAttribute("addon-webext-overflowtarget", "overflowed-extensions-list"); gNavToolbox.appendChild(tb); CustomizableUI.registerToolbarNode(tb); From 1dea2f808d1792d1823fe65acd868cba3daacf3b Mon Sep 17 00:00:00 2001 From: Mike Conley Date: Thu, 24 Nov 2022 18:51:19 +0000 Subject: [PATCH 25/37] Bug 1801678 - Compute the right insertion point for a browser action widget when the toolbar is overflowed. r=Gijs We have a function, OverflowableToolbar.findOverflowedInsertionPoints, whose job it is to compute the right insertion point for a widget if the widget is being inserted into an overflowable toolbar. That function failed to account for the possibility that the widget was a browser action, and would return the #defaultList instead of the #webExtList if the item was overflowing. This patch checks the ID of the widget first, and then supplies the correct list. Differential Revision: https://phabricator.services.mozilla.com/D162586 --- browser/components/customizableui/CustomizableUI.jsm | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/browser/components/customizableui/CustomizableUI.jsm b/browser/components/customizableui/CustomizableUI.jsm index 043f6296622d..d43758e27586 100644 --- a/browser/components/customizableui/CustomizableUI.jsm +++ b/browser/components/customizableui/CustomizableUI.jsm @@ -5611,9 +5611,13 @@ class OverflowableToolbar { } } + let overflowList = CustomizableUI.isWebExtensionWidget(aNode.id) + ? this.#webExtList + : this.#defaultList; + let containerForAppending = this.#overflowedInfo.size && newNodeCanOverflow - ? this.#defaultList + ? overflowList : this.#target; return [containerForAppending, null]; } From 2fbb503a24361e0763892a3594d672ae81742483 Mon Sep 17 00:00:00 2001 From: Mike Conley Date: Thu, 24 Nov 2022 18:51:20 +0000 Subject: [PATCH 26/37] Bug 1801678 - Refactor the Unified Extensions overflowable toolbar test helper with more flexible lifetime functions. r=willdurand,extension-reviewers This is helpful when we need to insert optional async functions to run at particular times within withWindowOverflowed. Differential Revision: https://phabricator.services.mozilla.com/D162587 --- .../extensions/test/AppUiTestDelegate.jsm | 1 + ...unified_extensions_overflowable_toolbar.js | 206 ++++++++++-------- 2 files changed, 117 insertions(+), 90 deletions(-) diff --git a/browser/components/extensions/test/AppUiTestDelegate.jsm b/browser/components/extensions/test/AppUiTestDelegate.jsm index 710b4e03733d..f33669176def 100644 --- a/browser/components/extensions/test/AppUiTestDelegate.jsm +++ b/browser/components/extensions/test/AppUiTestDelegate.jsm @@ -218,6 +218,7 @@ async function removeTab(tab) { var AppUiTestInternals = { awaitBrowserLoaded, getBrowserActionWidget, + getBrowserActionWidgetId, getPageActionButton, getPageActionPopup, getPanelForNode, diff --git a/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js b/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js index f89553613b02..e9e4b0b0857d 100644 --- a/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js +++ b/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js @@ -10,10 +10,6 @@ loadTestSubscript("head_unified_extensions.js"); -const { ExtensionCommon } = ChromeUtils.import( - "resource://gre/modules/ExtensionCommon.jsm" -); - const NUM_EXTENSIONS = 5; const OVERFLOW_WINDOW_WIDTH_PX = 450; const DEFAULT_WIDGET_IDS = [ @@ -81,21 +77,40 @@ function getVisibleMenuItems(popup) { * 5. Unloads all of the test WebExtensions * * @param {DOMWindow} win The browser window to perform the test on. - * @param {Function} taskFn The async function to run once the window is in - * the overflow state. The function is called with the following arguments: + * @param {object} options Additional options when running this test. + * @param {Function} options.beforeOverflowed This optional async function will + * be run after the extensions are created and added to the toolbar, but + * before the toolbar overflows. The function is called with the following + * arguments: + * + * {string[]} extensionIDs: The IDs of the test WebExtensions. + * + * The return value of the function is ignored. + * @param {Function} options.whenOverflowed This optional async function will + * run once the window is in the overflow state. The function is called + * with the following arguments: * * {Element} defaultList: The DOM element that holds overflowed default * items. * {Element} unifiedExtensionList: The DOM element that holds overflowed * WebExtension browser_actions when Unified Extensions is enabled. * {string[]} extensionIDs: The IDs of the test WebExtensions. - * @param {Function} afterUnderflowFn An optional async function that will be run - * once the toolbar underflows, before the extensions are removed. * - * The function is expected to return a Promise that does not resolve - * with anything. + * The return value of the function is ignored. + * @param {Function} options.afterUnderflowed This optional async function will + * be run after the window is expanded and the toolbar has underflowed, but + * before the extensions are removed. This function is not passed any + * arguments. The return value of the function is ignored. + * */ -async function withWindowOverflowed(win, taskFn, afterUnderflowFn) { +async function withWindowOverflowed( + win, + { + beforeOverflowed = async () => {}, + whenOverflowed = async () => {}, + afterUnderflowed = async () => {}, + } = {} +) { const doc = win.document; doc.documentElement.removeAttribute("persist"); const navbar = doc.getElementById(CustomizableUI.AREA_NAVBAR); @@ -229,66 +244,82 @@ async function withWindowOverflowed(win, taskFn, afterUnderflowFn) { await listener.promise; CustomizableUI.removeListener(listener); - const originalWindowWidth = win.outerWidth; - - let widgetOverflowListener = { - _remainingOverflowables: NUM_EXTENSIONS + DEFAULT_WIDGET_IDS.length, - _deferred: PromiseUtils.defer(), - - get promise() { - return this._deferred.promise; - }, - - onWidgetOverflow(widgetNode, areaNode) { - this._remainingOverflowables--; - if (!this._remainingOverflowables) { - this._deferred.resolve(); - } - }, - }; - CustomizableUI.addListener(widgetOverflowListener); - - win.resizeTo(OVERFLOW_WINDOW_WIDTH_PX, win.outerHeight); - await widgetOverflowListener.promise; - CustomizableUI.removeListener(widgetOverflowListener); - - Assert.ok( - navbar.hasAttribute("overflowing"), - "Should have an overflowing toolbar." - ); - - const defaultList = doc.getElementById( - navbar.getAttribute("default-overflowtarget") - ); - - const unifiedExtensionList = doc.getElementById( - navbar.getAttribute("addon-webext-overflowtarget") - ); - const extensionIDs = extensions.map(extension => extension.id); try { - await taskFn(defaultList, unifiedExtensionList, extensionIDs); + info("Running beforeOverflowed task"); + await beforeOverflowed(extensionIDs); } finally { - win.resizeTo(originalWindowWidth, win.outerHeight); - await BrowserTestUtils.waitForEvent(win, "resize"); + const originalWindowWidth = win.outerWidth; - // Notably, we don't wait for the nav-bar to not have the "overflowing" - // attribute. This is because we might be running in an environment - // where the nav-bar was overflowing to begin with. Let's just hope that - // our sign-post widget has stopped overflowing. - await TestUtils.waitForCondition(() => { - return !doc - .getElementById(signpostWidgetID) - .hasAttribute("overflowedItem"); + // The beforeOverflowed task may have moved some items out from the navbar, + // so only listen for overflows for items still in there. + const browserActionIDs = extensionIDs.map(id => + AppUiTestInternals.getBrowserActionWidgetId(id) + ); + const browserActionsInNavBar = browserActionIDs.filter(widgetID => { + let placement = CustomizableUI.getPlacementOfWidget(widgetID); + return placement.area == CustomizableUI.AREA_NAVBAR; }); + let widgetOverflowListener = { + _remainingOverflowables: + browserActionsInNavBar.length + DEFAULT_WIDGET_IDS.length, + _deferred: PromiseUtils.defer(), + + get promise() { + return this._deferred.promise; + }, + + onWidgetOverflow(widgetNode, areaNode) { + this._remainingOverflowables--; + if (!this._remainingOverflowables) { + this._deferred.resolve(); + } + }, + }; + CustomizableUI.addListener(widgetOverflowListener); + + win.resizeTo(OVERFLOW_WINDOW_WIDTH_PX, win.outerHeight); + await widgetOverflowListener.promise; + CustomizableUI.removeListener(widgetOverflowListener); + + Assert.ok( + navbar.hasAttribute("overflowing"), + "Should have an overflowing toolbar." + ); + + const defaultList = doc.getElementById( + navbar.getAttribute("default-overflowtarget") + ); + + const unifiedExtensionList = doc.getElementById( + navbar.getAttribute("addon-webext-overflowtarget") + ); + try { - if (afterUnderflowFn) { - await afterUnderflowFn(); - } + info("Running whenOverflowed task"); + await whenOverflowed(defaultList, unifiedExtensionList, extensionIDs); } finally { - await Promise.all(extensions.map(extension => extension.unload())); + win.resizeTo(originalWindowWidth, win.outerHeight); + await BrowserTestUtils.waitForEvent(win, "resize"); + + // Notably, we don't wait for the nav-bar to not have the "overflowing" + // attribute. This is because we might be running in an environment + // where the nav-bar was overflowing to begin with. Let's just hope that + // our sign-post widget has stopped overflowing. + await TestUtils.waitForCondition(() => { + return !doc + .getElementById(signpostWidgetID) + .hasAttribute("overflowedItem"); + }); + + try { + info("Running afterUnderflowed task"); + await afterUnderflowed(); + } finally { + await Promise.all(extensions.map(extension => extension.unload())); + } } } } @@ -408,9 +439,8 @@ add_task(async function test_overflowable_toolbar() { let win = await promiseEnableUnifiedExtensions(); let movedNode; - await withWindowOverflowed( - win, - async (defaultList, unifiedExtensionList, extensionIDs) => { + await withWindowOverflowed(win, { + whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => { // Ensure that there are 5 items in the Unified Extensions overflow // list, and the default widgets should all be in the default overflow // list (though there might be more items from the nav-bar in there that @@ -437,9 +467,9 @@ add_task(async function test_overflowable_toolbar() { await verifyExtensionWidget(win, child, true); } - let extensionWidgetID = `${ExtensionCommon.makeWidgetId( + let extensionWidgetID = AppUiTestInternals.getBrowserActionWidgetId( extensionIDs.at(-1) - )}-browser-action`; + ); movedNode = CustomizableUI.getWidget(extensionWidgetID).forWindow(win) .node; Assert.equal(movedNode.getAttribute("cui-areatype"), "toolbar"); @@ -455,7 +485,7 @@ add_task(async function test_overflowable_toolbar() { "The moved browser action button should have the right cui-areatype set." ); }, - async () => { + afterUnderflowed: async () => { // Ensure that the moved node's parent is still the add-ons panel. Assert.equal( movedNode.parentElement.id, @@ -463,8 +493,8 @@ add_task(async function test_overflowable_toolbar() { "The browser action should still be in the addons panel" ); CustomizableUI.addWidgetToArea(movedNode.id, CustomizableUI.AREA_NAVBAR); - } - ); + }, + }); await BrowserTestUtils.closeWindow(win); }); @@ -476,9 +506,8 @@ add_task(async function test_overflowable_toolbar() { add_task(async function test_overflowable_toolbar_legacy() { let win = await promiseDisableUnifiedExtensions(); - await withWindowOverflowed( - win, - async (defaultList, unifiedExtensionList, extensionIDs) => { + await withWindowOverflowed(win, { + whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => { // First, ensure that all default items are in the default overflow list. // (though there might be more items from the nav-bar in there that // already existed in the nav-bar before we put the default widgets in @@ -505,8 +534,8 @@ add_task(async function test_overflowable_toolbar_legacy() { 0, "Unified Extension overflow list should be empty." ); - } - ); + }, + }); await BrowserTestUtils.closeWindow(win); await SpecialPowers.popPrefEnv(); @@ -515,9 +544,8 @@ add_task(async function test_overflowable_toolbar_legacy() { add_task(async function test_menu_button() { let win = await promiseEnableUnifiedExtensions(); - await withWindowOverflowed( - win, - async (defaultList, unifiedExtensionList, extensionIDs) => { + await withWindowOverflowed(win, { + whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => { Assert.ok( unifiedExtensionList.children.length, "Should have items in the Unified Extension list." @@ -671,8 +699,8 @@ add_task(async function test_menu_button() { ); await closeExtensionsPanel(win); - } - ); + }, + }); await BrowserTestUtils.closeWindow(win); }); @@ -680,9 +708,8 @@ add_task(async function test_menu_button() { add_task(async function test_context_menu() { let win = await promiseEnableUnifiedExtensions(); - await withWindowOverflowed( - win, - async (defaultList, unifiedExtensionList, extensionIDs) => { + await withWindowOverflowed(win, { + whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => { Assert.ok( unifiedExtensionList.children.length, "Should have items in the Unified Extension list." @@ -770,8 +797,8 @@ add_task(async function test_context_menu() { // We can close the unified extensions panel now. await closeExtensionsPanel(win); - } - ); + }, + }); await BrowserTestUtils.closeWindow(win); }); @@ -779,9 +806,8 @@ add_task(async function test_context_menu() { add_task(async function test_action_button() { let win = await promiseEnableUnifiedExtensions(); - await withWindowOverflowed( - win, - async (defaultList, unifiedExtensionList, extensionIDs) => { + await withWindowOverflowed(win, { + whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => { Assert.ok( unifiedExtensionList.children.length, "Should have items in the Unified Extension list." @@ -916,8 +942,8 @@ add_task(async function test_action_button() { await closeExtensionsPanel(win); } ); - } - ); + }, + }); await BrowserTestUtils.closeWindow(win); }); From 4de19bba6007d9eebbdf702ae824151dec8f4493 Mon Sep 17 00:00:00 2001 From: Mike Conley Date: Thu, 24 Nov 2022 18:51:20 +0000 Subject: [PATCH 27/37] Bug 1801678 - Test that pinning a browser action to an overflowed toolbar puts the widget in the right area. r=willdurand Differential Revision: https://phabricator.services.mozilla.com/D162588 --- ...unified_extensions_overflowable_toolbar.js | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js b/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js index e9e4b0b0857d..d2e890d90685 100644 --- a/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js +++ b/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js @@ -947,3 +947,51 @@ add_task(async function test_action_button() { await BrowserTestUtils.closeWindow(win); }); + +/** + * Tests that if we pin a browser action button listed in the addons panel + * to the toolbar when that button would immediately overflow, that the + * button is put into the addons panel overflow list. + */ +add_task(async function test_pinning_to_toolbar_when_overflowed() { + let win = await promiseEnableUnifiedExtensions(); + let movedNode; + let extensionWidgetID; + + await withWindowOverflowed(win, { + beforeOverflowed: async extensionIDs => { + // Before we overflow the toolbar, let's move the last item to the addons + // panel. + extensionWidgetID = AppUiTestInternals.getBrowserActionWidgetId( + extensionIDs.at(-1) + ); + + movedNode = CustomizableUI.getWidget(extensionWidgetID).forWindow(win) + .node; + + CustomizableUI.addWidgetToArea( + extensionWidgetID, + CustomizableUI.AREA_ADDONS + ); + }, + whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => { + // Now that the window is overflowed, let's move the widget in the addons + // panel back to the navbar. This should cause the widget to overflow back + // into the addons panel. + CustomizableUI.addWidgetToArea( + extensionWidgetID, + CustomizableUI.AREA_NAVBAR + ); + await TestUtils.waitForCondition(() => { + return movedNode.hasAttribute("overflowedItem"); + }); + Assert.equal( + movedNode.parentElement, + unifiedExtensionList, + "Should have overflowed the extension button to the right list." + ); + }, + }); + + await BrowserTestUtils.closeWindow(win); +}); From b3d763f3c3ec9be7571dca6699ed8ba8c7214532 Mon Sep 17 00:00:00 2001 From: Mozilla Releng Treescript Date: Thu, 24 Nov 2022 19:04:56 +0000 Subject: [PATCH 28/37] no bug - Bumping Firefox l10n changesets r=release a=l10n-bump DONTBUILD CLOSED TREE es-AR -> 51b4794b186d52193f1d647e7399e2557c113dae fr -> cae65157bbcebc26625586b1920d1f34d9683653 hu -> 9432435c581d8839ad71ce11acf5098090c6e9e5 is -> 5f4dedafbdf70c69d98352064b3a3cf4c58a94a9 pt-PT -> ffb56c5ea7907321595a1308c6232a26f4660a40 tr -> 8d24e72d9f81161e2207a68f56cdc169b8c8110c zh-CN -> 574722005ef12d41efe4b2261e91fb21974fee7c --- browser/locales/l10n-changesets.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/browser/locales/l10n-changesets.json b/browser/locales/l10n-changesets.json index 79b5ebc65c69..5f0b604d5715 100644 --- a/browser/locales/l10n-changesets.json +++ b/browser/locales/l10n-changesets.json @@ -483,7 +483,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "9884a10915a5d27531be1848bb3056bfb541661e" + "revision": "51b4794b186d52193f1d647e7399e2557c113dae" }, "es-CL": { "pin": false, @@ -645,7 +645,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "e270b824dd33e62364be110b6d15723af68e1b1e" + "revision": "cae65157bbcebc26625586b1920d1f34d9683653" }, "fy-NL": { "pin": false, @@ -843,7 +843,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "268da803b62cbe9b579100ade7c3735dcf42baef" + "revision": "9432435c581d8839ad71ce11acf5098090c6e9e5" }, "hy-AM": { "pin": false, @@ -933,7 +933,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "4d395af1ad99bbef4e8e631e415d3ece59910cb5" + "revision": "5f4dedafbdf70c69d98352064b3a3cf4c58a94a9" }, "it": { "pin": false, @@ -1425,7 +1425,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "e2004dd62601de421adba549123e28cf12667900" + "revision": "ffb56c5ea7907321595a1308c6232a26f4660a40" }, "rm": { "pin": false, @@ -1803,7 +1803,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "8b56fdc7d226a359747095054358cc0808438b8e" + "revision": "8d24e72d9f81161e2207a68f56cdc169b8c8110c" }, "trs": { "pin": false, @@ -1947,7 +1947,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "edededa35a86a31ec0472037da9581f588eedf47" + "revision": "574722005ef12d41efe4b2261e91fb21974fee7c" }, "zh-TW": { "pin": false, From 649e436ec787b3adbb33e118d8a95c45ceb3ef43 Mon Sep 17 00:00:00 2001 From: Nick Alexander Date: Thu, 24 Nov 2022 19:22:28 +0000 Subject: [PATCH 29/37] Bug 1683278 - Support viewing PDF files in the macOS "Open With" context menu. r=mstange Differential Revision: https://phabricator.services.mozilla.com/D162132 --- browser/app/macbuild/Contents/Info.plist.in | 22 +++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/browser/app/macbuild/Contents/Info.plist.in b/browser/app/macbuild/Contents/Info.plist.in index 9ceaf88f15c1..99849c6f06c6 100644 --- a/browser/app/macbuild/Contents/Info.plist.in +++ b/browser/app/macbuild/Contents/Info.plist.in @@ -46,6 +46,28 @@ CFBundleTypeRole Viewer + + CFBundleTypeExtensions + + pdf + + CFBundleTypeIconFile + document.icns + CFBundleTypeMIMETypes + + application/pdf + + CFBundleTypeName + PDF document + CFBundleTypeOSTypes + + TEXT + + CFBundleTypeRole + Viewer + LSHandlerRank + Alternate + CFBundleTypeExtensions From 61beafe8e2064b284e61ea6cbc01cdd020077e21 Mon Sep 17 00:00:00 2001 From: scott Date: Thu, 24 Nov 2022 20:03:53 +0000 Subject: [PATCH 30/37] Bug 1802265 - Pocket newtab topsite spoc use title instead of sponsor. r=nanj Differential Revision: https://phabricator.services.mozilla.com/D162947 --- .../DiscoveryStreamComponents/TopSites/TopSites.jsx | 4 ++-- .../components/newtab/data/content/activity-stream.bundle.js | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/browser/components/newtab/content-src/components/DiscoveryStreamComponents/TopSites/TopSites.jsx b/browser/components/newtab/content-src/components/DiscoveryStreamComponents/TopSites/TopSites.jsx index e1a562d4d4cb..e1bb0cd50d87 100644 --- a/browser/components/newtab/content-src/components/DiscoveryStreamComponents/TopSites/TopSites.jsx +++ b/browser/components/newtab/content-src/components/DiscoveryStreamComponents/TopSites/TopSites.jsx @@ -51,8 +51,8 @@ export class _TopSites extends React.PureComponent { const link = { customScreenshotURL: topSiteSpoc.image_src, type: "SPOC", - label: topSiteSpoc.sponsor, - title: topSiteSpoc.sponsor, + label: topSiteSpoc.title || topSiteSpoc.sponsor, + title: topSiteSpoc.title || topSiteSpoc.sponsor, url: topSiteSpoc.url, flightId: topSiteSpoc.flight_id, id: topSiteSpoc.id, diff --git a/browser/components/newtab/data/content/activity-stream.bundle.js b/browser/components/newtab/data/content/activity-stream.bundle.js index bd8a0613500a..2df9082ff645 100644 --- a/browser/components/newtab/data/content/activity-stream.bundle.js +++ b/browser/components/newtab/data/content/activity-stream.bundle.js @@ -13660,8 +13660,8 @@ class TopSites_TopSites_TopSites extends (external_React_default()).PureComponen const link = { customScreenshotURL: topSiteSpoc.image_src, type: "SPOC", - label: topSiteSpoc.sponsor, - title: topSiteSpoc.sponsor, + label: topSiteSpoc.title || topSiteSpoc.sponsor, + title: topSiteSpoc.title || topSiteSpoc.sponsor, url: topSiteSpoc.url, flightId: topSiteSpoc.flight_id, id: topSiteSpoc.id, From 85edb2cce4ce1c2b10380d4cd974157d9c6f77f0 Mon Sep 17 00:00:00 2001 From: Nicolas Chevobbe Date: Thu, 24 Nov 2022 20:06:34 +0000 Subject: [PATCH 31/37] Bug 1802176 - Fix intermittent on browser_html_sitepermission_addons. r=rpl. Set `midi.testing` and `dom.webmidi.enabled` prefs in browser.ini for the test. Differential Revision: https://phabricator.services.mozilla.com/D162991 --- toolkit/mozapps/extensions/test/browser/browser.ini | 4 ++++ .../test/browser/browser_html_sitepermission_addons.js | 7 ------- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/toolkit/mozapps/extensions/test/browser/browser.ini b/toolkit/mozapps/extensions/test/browser/browser.ini index 61c1dbecfcb4..483935e2892f 100644 --- a/toolkit/mozapps/extensions/test/browser/browser.ini +++ b/toolkit/mozapps/extensions/test/browser/browser.ini @@ -46,6 +46,10 @@ skip-if = fission && os == "linux" && asan # Bug 1713895 - new Fission platform triage os == "win" && os_version == "6.1" # Bug 1717250 +prefs = + dom.webmidi.enabled=true + midi.testing=true + [browser_about_debugging_link.js] [browser_addon_list_reordering.js] [browser_bug572561.js] diff --git a/toolkit/mozapps/extensions/test/browser/browser_html_sitepermission_addons.js b/toolkit/mozapps/extensions/test/browser/browser_html_sitepermission_addons.js index 02bf8c911305..7652048961a9 100644 --- a/toolkit/mozapps/extensions/test/browser/browser_html_sitepermission_addons.js +++ b/toolkit/mozapps/extensions/test/browser/browser_html_sitepermission_addons.js @@ -25,13 +25,6 @@ async function uninstallAllSitePermissionAddons() { } add_setup(async () => { - await SpecialPowers.pushPrefEnv({ - set: [ - ["midi.prompt.testing", false], - ["midi.testing", true], - ], - }); - registerCleanupFunction(uninstallAllSitePermissionAddons); }); From a71d007dfda29af788f00a03f4ce46ea7da65792 Mon Sep 17 00:00:00 2001 From: William Durand Date: Thu, 24 Nov 2022 21:32:48 +0000 Subject: [PATCH 32/37] Bug 1793743 - Resize window to avoid overflowed extensions in the unified extensions panel. r=rpl Depending on the test chunks on Firefox CI, we might have a test file executed before this one that resizes the window and does not reset it. In most cases, this isn't an issue except when we have logic to overflow extension buttons pinned in the toolbar into the unified extensions panel. This is what's happening here, at least for some configurations. By making sure that the window is large enough, we should not have overflowed extension buttons in the panel anymore. That will make the test more robust. Differential Revision: https://phabricator.services.mozilla.com/D162961 --- .../browser/browser_unified_extensions.js | 5 +++++ ...unified_extensions_overflowable_toolbar.js | 11 +--------- .../test/browser/head_unified_extensions.js | 22 +++++++++++++++++++ 3 files changed, 28 insertions(+), 10 deletions(-) diff --git a/browser/components/extensions/test/browser/browser_unified_extensions.js b/browser/components/extensions/test/browser/browser_unified_extensions.js index e07184e84ed1..171275393ac4 100644 --- a/browser/components/extensions/test/browser/browser_unified_extensions.js +++ b/browser/components/extensions/test/browser/browser_unified_extensions.js @@ -50,6 +50,11 @@ let win; add_setup(async function() { win = await promiseEnableUnifiedExtensions(); + // Make sure extension buttons added to the navbar will not overflow in the + // panel, which could happen when a previous test file resizes the current + // window. + await ensureMaximizedWindow(win); + registerCleanupFunction(async () => { await BrowserTestUtils.closeWindow(win); }); diff --git a/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js b/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js index d2e890d90685..6cd2ec4d97fb 100644 --- a/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js +++ b/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js @@ -115,16 +115,7 @@ async function withWindowOverflowed( doc.documentElement.removeAttribute("persist"); const navbar = doc.getElementById(CustomizableUI.AREA_NAVBAR); - win.moveTo(0, 0); - - const widthDiff = win.screen.availWidth - win.outerWidth; - const heightDiff = win.screen.availHeight - win.outerHeight; - - if (widthDiff || heightDiff) { - let resizeDone = BrowserTestUtils.waitForEvent(win, "resize", false); - win.resizeBy(widthDiff, heightDiff); - await resizeDone; - } + await ensureMaximizedWindow(win); // The OverflowableToolbar operates asynchronously at times, so we will // poll a widget's overflowedItem attribute to detect whether or not the diff --git a/browser/components/extensions/test/browser/head_unified_extensions.js b/browser/components/extensions/test/browser/head_unified_extensions.js index d2cbe8d8f5a8..efd0c3395774 100644 --- a/browser/components/extensions/test/browser/head_unified_extensions.js +++ b/browser/components/extensions/test/browser/head_unified_extensions.js @@ -6,6 +6,7 @@ /* exported clickUnifiedExtensionsItem, closeExtensionsPanel, createExtensions, + ensureMaximizedWindow, getUnifiedExtensionsItem, openExtensionsPanel, openUnifiedExtensionsContextMenu, @@ -139,3 +140,24 @@ const createExtensions = ( }) ); }; + +/** + * Given a window, this test helper resizes it so that the window takes most of + * the available screen size (unless the window is already maximized). + */ +const ensureMaximizedWindow = async win => { + let resizeDone = Promise.resolve(); + + win.moveTo(0, 0); + + const widthDiff = win.screen.availWidth - win.outerWidth; + const heightDiff = win.screen.availHeight - win.outerHeight; + + if (widthDiff || heightDiff) { + resizeDone = BrowserTestUtils.waitForEvent(win, "resize", false); + win.windowUtils.ensureDirtyRootFrame(); + win.resizeBy(widthDiff, heightDiff); + } + + return resizeDone; +}; From 4fd50df0b578fbfbc60fb794ed583890ab080c68 Mon Sep 17 00:00:00 2001 From: Itiel Date: Thu, 24 Nov 2022 22:00:10 +0000 Subject: [PATCH 33/37] Bug 1799836 - Move attention dot to action button. r=willdurand,sfoster Differential Revision: https://phabricator.services.mozilla.com/D162724 --- .../shared/addons/unified-extensions.css | 90 +++++++++++-------- 1 file changed, 54 insertions(+), 36 deletions(-) diff --git a/browser/themes/shared/addons/unified-extensions.css b/browser/themes/shared/addons/unified-extensions.css index a3cfec04f066..108cd9dc9499 100644 --- a/browser/themes/shared/addons/unified-extensions.css +++ b/browser/themes/shared/addons/unified-extensions.css @@ -5,45 +5,79 @@ :root { /* uei = unified extensions item */ --uei-icon-size: 32px; - --uei-dot-position: calc(var(--uei-icon-size) / 2 + var(--arrowpanel-menuitem-margin-inline) + var(--arrowpanel-menuitem-padding-inline) - 4px); + --uei-attention-dot-size: 8px; --uei-button-hover-bgcolor: var(--panel-item-hover-bgcolor); --uei-button-hover-color: inherit; --uei-button-active-bgcolor: var(--panel-item-active-bgcolor); --uei-button-active-color: inherit; + --uei-button-attention-dot-color: var(--tab-attention-icon-color); } :root[uidensity="compact"] { --uei-icon-size: 24px; } -/* Align extensions rendered with custom elements. */ -unified-extensions-item { - align-items: center; - display: flex; +#unified-extensions-panel { + --uei-dot-horizontal-position-in-panel: calc(var(--uei-icon-size) / 2 + var(--arrowpanel-menuitem-padding-inline) - var(--uei-attention-dot-size) / 2); + --uei-dot-vertical-position-in-panel: max(0px, calc(var(--arrowpanel-menuitem-padding-block) / 2 - var(--uei-attention-dot-size) / 2)); } -.unified-extensions-item { +/* Align extensions rendered with custom elements. */ +unified-extensions-item { + display: flex; + align-items: center; +} + +#unified-extensions-panel .unified-extensions-item { + /* Have some spacing between items in the panel; mainly useful for when HCM is enabled. */ padding-block: 2px; } -/* This is based on the attention UI defined in: +/* The "attention UI" for the unified extensions is based on: * https://searchfox.org/mozilla-central/rev/560b7b1b17/browser/themes/shared/tabs.css#624 */ -#unified-extensions-button[attention], -.unified-extensions-item[attention] { - background-image: radial-gradient(circle, var(--tab-attention-icon-color), var(--tab-attention-icon-color) 2px, transparent 2px); - background-position: center bottom max(0px, calc(var(--arrowpanel-menuitem-padding-block) - 4px)); - background-size: 8px 8px; + +/* On the main unified extensions button, we draw the attention on the icon element. */ +#unified-extensions-button[attention] > .toolbarbutton-icon, +/* For extension widgets placed in a toolbar, we use the stack element (containing the icon) + * of the action button to draw the attention dot. + * Otherwise (in the extensions panel), we use the action button itself. */ +toolbar .unified-extensions-item[attention] > .unified-extensions-item-action > .toolbarbutton-badge-stack, +#unified-extensions-panel .unified-extensions-item[attention] > .unified-extensions-item-action, +.widget-overflow-list .unified-extensions-item[attention][unified-extensions="false"] > .unified-extensions-item-action { + background-image: radial-gradient(circle, var(--uei-button-attention-dot-color), var(--uei-button-attention-dot-color) 2px, transparent 2px); + background-size: var(--uei-attention-dot-size) var(--uei-attention-dot-size); background-repeat: no-repeat; } -/* Adjust attention dots for the custom elements. */ -.unified-extensions-list > unified-extensions-item[attention] { - background-position: left var(--uei-dot-position) bottom 0px; +/* Adjust attention dots position in the toolbar. */ +#unified-extensions-button[attention] > .toolbarbutton-icon, +toolbar .unified-extensions-item[attention] > .unified-extensions-item-action > .toolbarbutton-badge-stack { + background-position: center bottom calc(var(--toolbarbutton-inner-padding) / 2 - var(--uei-attention-dot-size) / 2); } -/* Adjust attention dots for the custom elements. */ -.unified-extensions-list > unified-extensions-item[attention]:-moz-locale-dir(rtl) { - background-position-x: right var(--uei-dot-position); +/* Adjust attention dots position in the unified extensions panel. */ +#unified-extensions-panel .unified-extensions-item[attention] > .unified-extensions-item-action { + background-position: left var(--uei-dot-horizontal-position-in-panel) bottom var(--uei-dot-vertical-position-in-panel); +} + +/* Adjust attention dots position in the unified extensions panel for RTL. */ +#unified-extensions-panel .unified-extensions-item[attention] > .unified-extensions-item-action:-moz-locale-dir(rtl) { + background-position-x: right var(--uei-dot-horizontal-position-in-panel); +} + +/* Adjust attention dots position in the overflow panel. */ +.widget-overflow-list .unified-extensions-item[attention][unified-extensions="false"] > .unified-extensions-item-action { + background-position-x: left calc(16px / 2 + var(--arrowpanel-menuitem-padding-inline) - var(--uei-attention-dot-size) / 2); + background-position-y: bottom calc(var(--arrowpanel-menuitem-padding-block) / 2 - var(--uei-attention-dot-size) / 2); +} + +:root[uidensity="compact"] .widget-overflow-list .unified-extensions-item[attention][unified-extensions="false"] > .unified-extensions-item-action { + background-position-y: bottom -2px; +} + +/* Adjust attention dots position in the overflow panel for RTL. */ +.widget-overflow-list toolbaritem.unified-extensions-item[attention][unified-extensions="false"] > .unified-extensions-item-action:-moz-locale-dir(rtl) { + background-position-x: right calc(16px / 2 + var(--arrowpanel-menuitem-padding-inline) - var(--uei-attention-dot-size) / 2); } .unified-extensions-item-action { @@ -165,25 +199,9 @@ toolbaritem.unified-extensions-item[unified-extensions="true"] .unified-extensio display: block; } -:is(#unified-extensions-panel, .widget-overflow-list) toolbaritem.unified-extensions-item[attention] { - background-position: left calc(12px + var(--arrowpanel-menuitem-margin-inline)) bottom; -} - -:is(#unified-extensions-panel, .widget-overflow-list) toolbaritem.unified-extensions-item[attention]:-moz-locale-dir(rtl) { - background-position-x: right calc(12px + var(--arrowpanel-menuitem-margin-inline)); -} - -.widget-overflow-list toolbaritem.unified-extensions-item[attention][unified-extensions="false"] { - background-position-x: left 12px; -} - -.widget-overflow-list toolbaritem.unified-extensions-item[attention][unified-extensions="false"]:-moz-locale-dir(rtl) { - background-position-x: right 12px; -} - @media (prefers-contrast) { - .unified-extensions-item[attention] { - background-image: radial-gradient(circle, ButtonText, ButtonText 2px, transparent 2px); + :root { + --uei-button-attention-dot-color: ButtonText; } .unified-extensions-item-action:not([disabled]).subviewbutton, From a740257932938bfa7ca9c82c0bc0b3e137cb6a6a Mon Sep 17 00:00:00 2001 From: Ryan VanderMeulen Date: Thu, 24 Nov 2022 22:00:48 +0000 Subject: [PATCH 34/37] Bug 1787515 - Update libjpeg-turbo to 2.1.4. r=aosmond Differential Revision: https://phabricator.services.mozilla.com/D161871 --- media/libjpeg/ChangeLog.md | 35 ++++++++++++++++++++++++++ media/libjpeg/MOZCHANGES | 4 +++ media/libjpeg/jdapistd.c | 13 ++++++++-- media/libjpeg/jdcoefct.c | 6 ++--- media/libjpeg/jerror.c | 4 +-- media/libjpeg/jinclude.h | 12 +++++++++ media/libjpeg/jmemmgr.c | 13 ++++++---- media/libjpeg/simd/arm/aarch32/jsimd.c | 2 -- media/libjpeg/simd/arm/aarch64/jsimd.c | 2 -- media/libjpeg/simd/mips/jsimd.c | 2 -- media/libjpeg/simd/mips64/jsimd.c | 2 -- media/libjpeg/simd/powerpc/jsimd.c | 15 +++++++---- 12 files changed, 85 insertions(+), 25 deletions(-) diff --git a/media/libjpeg/ChangeLog.md b/media/libjpeg/ChangeLog.md index e6700c3c271c..b0d166ea1011 100644 --- a/media/libjpeg/ChangeLog.md +++ b/media/libjpeg/ChangeLog.md @@ -1,3 +1,38 @@ +2.1.4 +===== + +### Significant changes relative to 2.1.3 + +1. Fixed a regression introduced in 2.1.3 that caused build failures with +Visual Studio 2010. + +2. The `tjDecompressHeader3()` function in the TurboJPEG C API and the +`TJDecompressor.setSourceImage()` method in the TurboJPEG Java API now accept +"abbreviated table specification" (AKA "tables-only") datastreams, which can be +used to prime the decompressor with quantization and Huffman tables that can be +used when decompressing subsequent "abbreviated image" datastreams. + +3. libjpeg-turbo now performs run-time detection of AltiVec instructions on +OS X/PowerPC systems if AltiVec instructions are not enabled at compile time. +This allows both AltiVec-equipped (PowerPC G4 and G5) and non-AltiVec-equipped +(PowerPC G3) CPUs to be supported using the same build of libjpeg-turbo. + +4. Fixed an error ("Bogus virtual array access") that occurred when attempting +to decompress a progressive JPEG image with a height less than or equal to one +iMCU (8 * the vertical sampling factor) using buffered-image mode with +interblock smoothing enabled. This was a regression introduced by +2.1 beta1[6(b)]. + +5. Fixed two issues that prevented partial image decompression from working +properly with buffered-image mode: + + - Attempting to call `jpeg_crop_scanline()` after +`jpeg_start_decompress()` but before `jpeg_start_output()` resulted in an error +("Improper call to JPEG library in state 207".) + - Attempting to use `jpeg_skip_scanlines()` resulted in an error ("Bogus +virtual array access") under certain circumstances. + + 2.1.3 ===== diff --git a/media/libjpeg/MOZCHANGES b/media/libjpeg/MOZCHANGES index 4e65df22222e..1014df1341ee 100644 --- a/media/libjpeg/MOZCHANGES +++ b/media/libjpeg/MOZCHANGES @@ -48,6 +48,10 @@ To upgrade to a new revision of libjpeg-turbo, do the following: $ hg addremove +== November 10, 2022 (libjpeg-turbo v2.1.4 8162eddf041e0be26f5c671bb6528723c55fed9d 2022-08-12) == + +* Updated to v2.1.4 release. + == February 28, 2022 (libjpeg-turbo v2.1.3 c5f269eb9665435271c05fbcaf8721fa58e9eafa 2022-02-25) == * Updated to v2.1.3 release. diff --git a/media/libjpeg/jdapistd.c b/media/libjpeg/jdapistd.c index 8827d8abf5c5..02cd0cb93a85 100644 --- a/media/libjpeg/jdapistd.c +++ b/media/libjpeg/jdapistd.c @@ -159,9 +159,12 @@ jpeg_crop_scanline(j_decompress_ptr cinfo, JDIMENSION *xoffset, JDIMENSION input_xoffset; boolean reinit_upsampler = FALSE; jpeg_component_info *compptr; +#ifdef UPSAMPLE_MERGING_SUPPORTED my_master_ptr master = (my_master_ptr)cinfo->master; +#endif - if (cinfo->global_state != DSTATE_SCANNING || cinfo->output_scanline != 0) + if ((cinfo->global_state != DSTATE_SCANNING && + cinfo->global_state != DSTATE_BUFIMAGE) || cinfo->output_scanline != 0) ERREXIT1(cinfo, JERR_BAD_STATE, cinfo->global_state); if (!xoffset || !width) @@ -209,11 +212,13 @@ jpeg_crop_scanline(j_decompress_ptr cinfo, JDIMENSION *xoffset, */ *width = *width + input_xoffset - *xoffset; cinfo->output_width = *width; +#ifdef UPSAMPLE_MERGING_SUPPORTED if (master->using_merged_upsample && cinfo->max_v_samp_factor == 2) { my_merged_upsample_ptr upsample = (my_merged_upsample_ptr)cinfo->upsample; upsample->out_row_width = cinfo->output_width * cinfo->out_color_components; } +#endif /* Set the first and last iMCU columns that we must decompress. These values * will be used in single-scan decompressions. @@ -324,7 +329,9 @@ LOCAL(void) read_and_discard_scanlines(j_decompress_ptr cinfo, JDIMENSION num_lines) { JDIMENSION n; +#ifdef UPSAMPLE_MERGING_SUPPORTED my_master_ptr master = (my_master_ptr)cinfo->master; +#endif JSAMPLE dummy_sample[1] = { 0 }; JSAMPROW dummy_row = dummy_sample; JSAMPARRAY scanlines = NULL; @@ -348,10 +355,12 @@ read_and_discard_scanlines(j_decompress_ptr cinfo, JDIMENSION num_lines) cinfo->cquantize->color_quantize = noop_quantize; } +#ifdef UPSAMPLE_MERGING_SUPPORTED if (master->using_merged_upsample && cinfo->max_v_samp_factor == 2) { my_merged_upsample_ptr upsample = (my_merged_upsample_ptr)cinfo->upsample; scanlines = &upsample->spare_row; } +#endif for (n = 0; n < num_lines; n++) jpeg_read_scanlines(cinfo, scanlines, 1); @@ -517,7 +526,7 @@ jpeg_skip_scanlines(j_decompress_ptr cinfo, JDIMENSION num_lines) * all of the entropy decoding occurs in jpeg_start_decompress(), assuming * that the input data source is non-suspending. This makes skipping easy. */ - if (cinfo->inputctl->has_multiple_scans) { + if (cinfo->inputctl->has_multiple_scans || cinfo->buffered_image) { if (cinfo->upsample->need_context_rows) { cinfo->output_scanline += lines_to_skip; cinfo->output_iMCU_row += lines_to_skip / lines_per_iMCU_row; diff --git a/media/libjpeg/jdcoefct.c b/media/libjpeg/jdcoefct.c index 15e6cded628e..88e10c08cb62 100644 --- a/media/libjpeg/jdcoefct.c +++ b/media/libjpeg/jdcoefct.c @@ -5,7 +5,7 @@ * Copyright (C) 1994-1997, Thomas G. Lane. * libjpeg-turbo Modifications: * Copyright 2009 Pierre Ossman for Cendio AB - * Copyright (C) 2010, 2015-2016, 2019-2020, D. R. Commander. + * Copyright (C) 2010, 2015-2016, 2019-2020, 2022, D. R. Commander. * Copyright (C) 2015, 2020, Google, Inc. * For conditions of distribution and use, see the accompanying README.ijg * file. @@ -475,7 +475,7 @@ decompress_smooth_data(j_decompress_ptr cinfo, JSAMPIMAGE output_buf) if (!compptr->component_needed) continue; /* Count non-dummy DCT block rows in this iMCU row. */ - if (cinfo->output_iMCU_row < last_iMCU_row - 1) { + if (cinfo->output_iMCU_row + 1 < last_iMCU_row) { block_rows = compptr->v_samp_factor; access_rows = block_rows * 3; /* this and next two iMCU rows */ } else if (cinfo->output_iMCU_row < last_iMCU_row) { @@ -560,7 +560,7 @@ decompress_smooth_data(j_decompress_ptr cinfo, JSAMPIMAGE output_buf) next_block_row = buffer_ptr; if (block_row < block_rows - 2 || - cinfo->output_iMCU_row < last_iMCU_row - 1) + cinfo->output_iMCU_row + 1 < last_iMCU_row) next_next_block_row = buffer[block_row + 2] + cinfo->master->first_MCU_col[ci]; else diff --git a/media/libjpeg/jerror.c b/media/libjpeg/jerror.c index d54470293758..d0ab5b88b0c7 100644 --- a/media/libjpeg/jerror.c +++ b/media/libjpeg/jerror.c @@ -189,9 +189,9 @@ format_message(j_common_ptr cinfo, char *buffer) /* Format the message into the passed buffer */ if (isstring) - snprintf(buffer, JMSG_LENGTH_MAX, msgtext, err->msg_parm.s); + SNPRINTF(buffer, JMSG_LENGTH_MAX, msgtext, err->msg_parm.s); else - snprintf(buffer, JMSG_LENGTH_MAX, msgtext, + SNPRINTF(buffer, JMSG_LENGTH_MAX, msgtext, err->msg_parm.i[0], err->msg_parm.i[1], err->msg_parm.i[2], err->msg_parm.i[3], err->msg_parm.i[4], err->msg_parm.i[5], diff --git a/media/libjpeg/jinclude.h b/media/libjpeg/jinclude.h index 120614b25cf3..e8d983ac171f 100644 --- a/media/libjpeg/jinclude.h +++ b/media/libjpeg/jinclude.h @@ -45,6 +45,18 @@ */ +#ifdef _MSC_VER + +#define SNPRINTF(str, n, format, ...) \ + _snprintf_s(str, n, _TRUNCATE, format, ##__VA_ARGS__) + +#else + +#define SNPRINTF snprintf + +#endif + + #ifndef NO_GETENV #ifdef _MSC_VER diff --git a/media/libjpeg/jmemmgr.c b/media/libjpeg/jmemmgr.c index 8f5a4ab1c78b..a40446f6ac36 100644 --- a/media/libjpeg/jmemmgr.c +++ b/media/libjpeg/jmemmgr.c @@ -68,10 +68,13 @@ round_up_pow2(size_t a, size_t b) * There isn't any really portable way to determine the worst-case alignment * requirement. This module assumes that the alignment requirement is * multiples of ALIGN_SIZE. - * By default, we define ALIGN_SIZE as sizeof(double). This is necessary on - * some workstations (where doubles really do need 8-byte alignment) and will - * work fine on nearly everything. If your machine has lesser alignment needs, - * you can save a few bytes by making ALIGN_SIZE smaller. + * By default, we define ALIGN_SIZE as the maximum of sizeof(double) and + * sizeof(void *). This is necessary on some workstations (where doubles + * really do need 8-byte alignment) and will work fine on nearly everything. + * We use the maximum of sizeof(double) and sizeof(void *) since sizeof(double) + * may be insufficient, for example, on CHERI-enabled platforms with 16-byte + * pointers and a 16-byte alignment requirement. If your machine has lesser + * alignment needs, you can save a few bytes by making ALIGN_SIZE smaller. * The only place I know of where this will NOT work is certain Macintosh * 680x0 compilers that define double as a 10-byte IEEE extended float. * Doing 10-byte alignment is counterproductive because longwords won't be @@ -81,7 +84,7 @@ round_up_pow2(size_t a, size_t b) #ifndef ALIGN_SIZE /* so can override from jconfig.h */ #ifndef WITH_SIMD -#define ALIGN_SIZE sizeof(double) +#define ALIGN_SIZE MAX(sizeof(void *), sizeof(double)) #else #define ALIGN_SIZE 32 /* Most of the SIMD instructions we support require 16-byte (128-bit) alignment, but AVX2 requires diff --git a/media/libjpeg/simd/arm/aarch32/jsimd.c b/media/libjpeg/simd/arm/aarch32/jsimd.c index e3adf23d5013..920f7656ebfe 100644 --- a/media/libjpeg/simd/arm/aarch32/jsimd.c +++ b/media/libjpeg/simd/arm/aarch32/jsimd.c @@ -25,8 +25,6 @@ #include "../../../jsimddct.h" #include "../../jsimd.h" -#include -#include #include static unsigned int simd_support = ~0; diff --git a/media/libjpeg/simd/arm/aarch64/jsimd.c b/media/libjpeg/simd/arm/aarch64/jsimd.c index 604d5472f6a6..41c06d318010 100644 --- a/media/libjpeg/simd/arm/aarch64/jsimd.c +++ b/media/libjpeg/simd/arm/aarch64/jsimd.c @@ -25,8 +25,6 @@ #include "../../jsimd.h" #include "jconfigint.h" -#include -#include #include #define JSIMD_FASTLD3 1 diff --git a/media/libjpeg/simd/mips/jsimd.c b/media/libjpeg/simd/mips/jsimd.c index d2546eed3289..36ea865d41e4 100644 --- a/media/libjpeg/simd/mips/jsimd.c +++ b/media/libjpeg/simd/mips/jsimd.c @@ -23,8 +23,6 @@ #include "../../jsimddct.h" #include "../jsimd.h" -#include -#include #include static unsigned int simd_support = ~0; diff --git a/media/libjpeg/simd/mips64/jsimd.c b/media/libjpeg/simd/mips64/jsimd.c index e8f1af562bab..2e626b2d3d97 100644 --- a/media/libjpeg/simd/mips64/jsimd.c +++ b/media/libjpeg/simd/mips64/jsimd.c @@ -24,8 +24,6 @@ #include "../../jsimddct.h" #include "../jsimd.h" -#include -#include #include static unsigned int simd_support = ~0; diff --git a/media/libjpeg/simd/powerpc/jsimd.c b/media/libjpeg/simd/powerpc/jsimd.c index b9e86dcfac26..9a452a309074 100644 --- a/media/libjpeg/simd/powerpc/jsimd.c +++ b/media/libjpeg/simd/powerpc/jsimd.c @@ -27,11 +27,12 @@ #include "../../jsimddct.h" #include "../jsimd.h" -#include -#include #include -#if defined(__OpenBSD__) +#if defined(__APPLE__) +#include +#include +#elif defined(__OpenBSD__) #include #include #include @@ -121,6 +122,10 @@ init_simd(void) int bufsize = 1024; /* an initial guess for the line buffer size limit */ #elif defined(__amigaos4__) uint32 altivec = 0; +#elif defined(__APPLE__) + int mib[2] = { CTL_HW, HW_VECTORUNIT }; + int altivec; + size_t len = sizeof(altivec); #elif defined(__OpenBSD__) int mib[2] = { CTL_MACHDEP, CPU_ALTIVEC }; int altivec; @@ -134,7 +139,7 @@ init_simd(void) simd_support = 0; -#if defined(__ALTIVEC__) || defined(__APPLE__) +#if defined(__ALTIVEC__) simd_support |= JSIMD_ALTIVEC; #elif defined(__linux__) || defined(ANDROID) || defined(__ANDROID__) while (!parse_proc_cpuinfo(bufsize)) { @@ -146,7 +151,7 @@ init_simd(void) IExec->GetCPUInfoTags(GCIT_VectorUnit, &altivec, TAG_DONE); if (altivec == VECTORTYPE_ALTIVEC) simd_support |= JSIMD_ALTIVEC; -#elif defined(__OpenBSD__) +#elif defined(__APPLE__) || defined(__OpenBSD__) if (sysctl(mib, 2, &altivec, &len, NULL, 0) == 0 && altivec != 0) simd_support |= JSIMD_ALTIVEC; #elif defined(__FreeBSD__) From 161b2d82610e1c6682ac19937c38b48adfb4c8eb Mon Sep 17 00:00:00 2001 From: Rob Wu Date: Thu, 24 Nov 2022 22:02:51 +0000 Subject: [PATCH 35/37] Bug 1745761 - Connect DNR to network stack r=rpl This patch implements the "block", "upgradeScheme" and "redirect" DNR actions, plus a comprehensive set of unit tests that exercise relevant scenarios and edge cases. Differential Revision: https://phabricator.services.mozilla.com/D161535 --- .../extensions/ExtensionDNR.sys.mjs | 221 +++++- .../test/mochitest/mochitest-common.ini | 1 + .../mochitest/test_ext_dnr_upgradeScheme.html | 120 +++ .../xpcshell/test_ext_dnr_allowAllRequests.js | 96 +++ .../test_ext_dnr_system_restrictions.js | 66 ++ .../test/xpcshell/test_ext_dnr_webrequest.js | 205 +++++ .../test_ext_dnr_without_webrequest.js | 720 ++++++++++++++++++ .../test/xpcshell/xpcshell-common.ini | 4 + .../extensions/webrequest/WebRequest.jsm | 29 +- 9 files changed, 1456 insertions(+), 6 deletions(-) create mode 100644 toolkit/components/extensions/test/mochitest/test_ext_dnr_upgradeScheme.html create mode 100644 toolkit/components/extensions/test/xpcshell/test_ext_dnr_allowAllRequests.js create mode 100644 toolkit/components/extensions/test/xpcshell/test_ext_dnr_system_restrictions.js create mode 100644 toolkit/components/extensions/test/xpcshell/test_ext_dnr_webrequest.js create mode 100644 toolkit/components/extensions/test/xpcshell/test_ext_dnr_without_webrequest.js diff --git a/toolkit/components/extensions/ExtensionDNR.sys.mjs b/toolkit/components/extensions/ExtensionDNR.sys.mjs index cd63d7839682..33dccf410089 100644 --- a/toolkit/components/extensions/ExtensionDNR.sys.mjs +++ b/toolkit/components/extensions/ExtensionDNR.sys.mjs @@ -69,6 +69,13 @@ const gRuleManagers = []; * - allow / allowAllRequests */ +const lazy = {}; +ChromeUtils.defineModuleGetter( + lazy, + "WebRequest", + "resource://gre/modules/WebRequest.jsm" +); + // The RuleCondition class represents a rule's "condition" type as described in // schemas/declarative_net_request.json. This class exists to allow the JS // engine to use one Shape for all Rule instances. @@ -307,8 +314,8 @@ class RuleValidator { // http(s) URLs can (regardless of extension permissions). // data:-URLs are currently blocked due to bug 1622986. - // TODO bug 1745761: With the redirect action, add schema definitions + - // implement rule.action.redirect.transform / regexSubstitution. + // TODO bug 1801870: Implement rule.action.redirect.transform. + // TODO bug 1745760: With regexFilter support, implement regexSubstitution. return true; } @@ -439,6 +446,17 @@ class RequestDetails { : null; } + static fromChannelWrapper(channel) { + return new RequestDetails({ + requestURI: channel.finalURI, + // Note: originURI may be null, if missing or null principal, as desired. + initiatorURI: channel.originURI, + type: channel.type, + method: channel.method.toLowerCase(), + tabId: null, // TODO: use getBrowserData to populate. + }); + } + canExtensionModify(extension) { const policy = extension.policy; return ( @@ -646,7 +664,15 @@ class RequestEvaluator { // Check this.req.requestURI: if (cond.urlFilter) { - // TODO bug 1745759: Check cond.urlFilter + isUrlFilterCaseSensitive + if ( + !this.#matchesUrlFilter( + this.req.requestURI, + cond.urlFilter, + cond.isUrlFilterCaseSensitive + ) + ) { + return false; + } } else if (cond.regexFilter) { // TODO bug 1745760: check cond.regexFilter + isUrlFilterCaseSensitive } @@ -703,6 +729,22 @@ class RequestEvaluator { return true; } + /** + * @param {nsIURI} uri - The request URI. + * @param {string} urlFilter + * @param {boolean} [isUrlFilterCaseSensitive] + * @returns {boolean} Whether urlFilter matches the given uri. + */ + #matchesUrlFilter(uri, urlFilter, isUrlFilterCaseSensitive) { + // TODO bug 1745759: Check cond.urlFilter + isUrlFilterCaseSensitive + // Placeholder for unit test until we have a complete implementation. + if (urlFilter === "|https:*") { + return uri.schemeIs("https"); + } + throw new Error(`urlFilter not implemented yet: ${urlFilter}`); + // return true; after all other checks passed. + } + /** * @param {string[]} domains - A list of canonicalized domain patterns. * Canonical means punycode, no ports, and IPv6 without brackets, and not @@ -746,6 +788,125 @@ class RequestEvaluator { } } +const NetworkIntegration = { + register() { + // We register via WebRequest.jsm to ensure predictable ordering of DNR and + // WebRequest behavior. + lazy.WebRequest.setDNRHandlingEnabled(true); + }, + unregister() { + lazy.WebRequest.setDNRHandlingEnabled(false); + }, + + startDNREvaluation(channel) { + let ruleManagers = gRuleManagers; + if (!channel.canModify) { + ruleManagers = []; + } + let matchedRules; + if (ruleManagers.length) { + const request = RequestDetails.fromChannelWrapper(channel); + matchedRules = RequestEvaluator.evaluateRequest(request, ruleManagers); + } + // Cache for later. In case of redirects, _dnrMatchedRules may exist for + // the pre-redirect HTTP channel, and is overwritten here again. + channel._dnrMatchedRules = matchedRules; + }, + + /** + * Applies the actions of the DNR rules. + * + * @param {ChannelWrapper} channel + * @returns {boolean} Whether to ignore any responses from the webRequest API. + */ + onBeforeRequest(channel) { + let matchedRules = channel._dnrMatchedRules; + if (!matchedRules?.length) { + return false; + } + // If a matched rule closes the channel, it is the sole match. + const finalMatch = matchedRules[0]; + switch (finalMatch.rule.action.type) { + case "block": + this.applyBlock(channel, finalMatch); + return true; + case "redirect": + this.applyRedirect(channel, finalMatch); + return true; + case "upgradeScheme": + this.applyUpgradeScheme(channel, finalMatch); + return true; + } + // If there are multiple rules, then it may be a combination of allow, + // allowAllRequests and/or modifyHeaders. + + // TODO bug 1797403: Apply allowAllRequests actions. + + return false; + }, + + onBeforeSendHeaders(channel) { + // TODO bug 1797404: apply modifyHeaders actions (requestHeaders). + }, + + onHeadersReceived(channel) { + // TODO bug 1797404: apply modifyHeaders actions (responseHeaders). + }, + + applyBlock(channel, matchedRule) { + // TODO bug 1802259: Consider a DNR-specific reason. + channel.cancel( + Cr.NS_ERROR_ABORT, + Ci.nsILoadInfo.BLOCKING_REASON_EXTENSION_WEBREQUEST + ); + const addonId = matchedRule.ruleManager.extension.id; + let properties = channel.channel.QueryInterface(Ci.nsIWritablePropertyBag); + properties.setProperty("cancelledByExtension", addonId); + }, + + applyUpgradeScheme(channel, matchedRule) { + // Request upgrade. No-op if already secure (i.e. https). + channel.upgradeToSecure(); + }, + + applyRedirect(channel, matchedRule) { + // Ambiguity resolution order of redirect dict keys, consistent with Chrome: + // - url > extensionPath > transform > regexSubstitution + const redirect = matchedRule.rule.action.redirect; + const extension = matchedRule.ruleManager.extension; + let redirectUri; + if (redirect.url) { + // redirect.url already validated by checkActionRedirect. + redirectUri = Services.io.newURI(redirect.url); + } else if (redirect.extensionPath) { + redirectUri = extension.baseURI + .mutate() + .setPathQueryRef(redirect.extensionPath) + .finalize(); + } else if (redirect.transform) { + // TODO bug 1801870: Implement transform. + throw new Error("transform not implemented"); + } else if (redirect.regexSubstitution) { + // TODO bug 1745760: Implement along with regexFilter support. + throw new Error("regexSubstitution not implemented"); + } else { + // #checkActionRedirect ensures that the redirect action is non-empty. + } + + channel.redirectTo(redirectUri); + + let properties = channel.channel.QueryInterface(Ci.nsIWritablePropertyBag); + properties.setProperty("redirectedByExtension", extension.id); + + let origin = channel.getRequestHeader("Origin"); + if (origin) { + channel.setResponseHeader("Access-Control-Allow-Origin", origin); + channel.setResponseHeader("Access-Control-Allow-Credentials", "true"); + channel.setResponseHeader("Access-Control-Max-Age", "0"); + } + }, +}; + class RuleManager { constructor(extension) { this.extension = extension; @@ -781,6 +942,10 @@ function getRuleManager(extension, createIfMissing = true) { // instantiate a RuleManager claims the highest priority. // TODO bug 1786059: order extensions by "installation time". gRuleManagers.unshift(ruleManager); + if (gRuleManagers.length === 1) { + // The first DNR registration. + NetworkIntegration.register(); + } } return ruleManager; } @@ -789,6 +954,10 @@ function clearRuleManager(extension) { let i = gRuleManagers.findIndex(rm => rm.extension === extension); if (i !== -1) { gRuleManagers.splice(i, 1); + if (gRuleManagers.length === 0) { + // The last DNR registration. + NetworkIntegration.unregister(); + } } } @@ -809,9 +978,55 @@ function getMatchedRulesForRequest(request, extension) { return RequestEvaluator.evaluateRequest(requestDetails, ruleManagers); } +/** + * Runs before any webRequest event is notified. Headers may be modified, but + * the request should not be canceled (see handleRequest instead). + * + * @param {ChannelWrapper} channel + * @param {string} kind - The name of the webRequest event. + */ +function beforeWebRequestEvent(channel, kind) { + try { + switch (kind) { + case "onBeforeRequest": + NetworkIntegration.startDNREvaluation(channel); + break; + case "onBeforeSendHeaders": + NetworkIntegration.onBeforeSendHeaders(channel); + break; + case "onHeadersReceived": + NetworkIntegration.onHeadersReceived(channel); + break; + } + } catch (e) { + Cu.reportError(e); + } +} + +/** + * Applies matching DNR rules, some of which may potentially cancel the request. + * + * @param {ChannelWrapper} channel + * @param {string} kind - The name of the webRequest event. + * @returns {boolean} Whether to ignore any responses from the webRequest API. + */ +function handleRequest(channel, kind) { + try { + if (kind === "onBeforeRequest") { + return NetworkIntegration.onBeforeRequest(channel); + } + } catch (e) { + Cu.reportError(e); + } + return false; +} + export const ExtensionDNR = { RuleValidator, getRuleManager, clearRuleManager, getMatchedRulesForRequest, + + beforeWebRequestEvent, + handleRequest, }; diff --git a/toolkit/components/extensions/test/mochitest/mochitest-common.ini b/toolkit/components/extensions/test/mochitest/mochitest-common.ini index 77a8631bcb0c..99471f216287 100644 --- a/toolkit/components/extensions/test/mochitest/mochitest-common.ini +++ b/toolkit/components/extensions/test/mochitest/mochitest-common.ini @@ -131,6 +131,7 @@ skip-if = os == 'android' || tsan # Times out on TSan intermittently, bug 161518 skip-if = os == 'android' # Bug 1513544 Android does not support multiple windows. [test_ext_cookies_permissions_bad.html] [test_ext_cookies_permissions_good.html] +[test_ext_dnr_upgradeScheme.html] [test_ext_downloads_download.html] [test_ext_embeddedimg_iframe_frameAncestors.html] [test_ext_exclude_include_globs.html] diff --git a/toolkit/components/extensions/test/mochitest/test_ext_dnr_upgradeScheme.html b/toolkit/components/extensions/test/mochitest/test_ext_dnr_upgradeScheme.html new file mode 100644 index 000000000000..4c53c7c86e22 --- /dev/null +++ b/toolkit/components/extensions/test/mochitest/test_ext_dnr_upgradeScheme.html @@ -0,0 +1,120 @@ + + + + + DNR with upgradeScheme action + + + + + + + + + diff --git a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_allowAllRequests.js b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_allowAllRequests.js new file mode 100644 index 000000000000..b98807b7dde3 --- /dev/null +++ b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_allowAllRequests.js @@ -0,0 +1,96 @@ +"use strict"; + +add_setup(() => { + Services.prefs.setBoolPref("extensions.manifestV3.enabled", true); + Services.prefs.setBoolPref("extensions.dnr.enabled", true); +}); + +const server = createHttpServer({ + hosts: ["example.com", "example.net", "example.org"], +}); +server.registerPathHandler("/never_reached", (req, res) => { + Assert.ok(false, "Server should never have been reached"); +}); +server.registerPathHandler("/allowed", (req, res) => { + res.setHeader("Access-Control-Allow-Origin", "*"); + res.setHeader("Access-Control-Max-Age", "0"); + res.write("allowed"); +}); +server.registerPathHandler("/", (req, res) => { + res.write("Dummy page"); +}); + +add_task(async function allowAllRequests_allows_request() { + async function background() { + await browser.declarativeNetRequest.updateSessionRules({ + addRules: [ + // allowAllRequests should take precedence over block. + { + id: 1, + condition: { resourceTypes: ["main_frame", "xmlhttprequest"] }, + action: { type: "block" }, + }, + { + id: 2, + condition: { resourceTypes: ["main_frame"] }, + action: { type: "allowAllRequests" }, + }, + { + id: 3, + priority: 2, + // Note: when not specified, main_frame is excluded by default. So + // when a main_frame request is triggered, only rules 1 and 2 match. + condition: { requestDomains: ["example.com"] }, + action: { type: "block" }, + }, + ], + }); + browser.test.sendMessage("dnr_registered"); + } + let extension = ExtensionTestUtils.loadExtension({ + background, + manifest: { + manifest_version: 3, + permissions: ["declarativeNetRequest"], + }, + }); + await extension.startup(); + await extension.awaitMessage("dnr_registered"); + + let contentPage = await ExtensionTestUtils.loadContentPage( + "http://example.com/" + ); + Assert.equal( + await contentPage.spawn(null, () => content.document.URL), + "http://example.com/", + "main_frame request should have been allowed by allowAllRequests" + ); + + async function checkCanFetch(url) { + return contentPage.spawn(url, async url => { + try { + await (await content.fetch(url)).text(); + return true; + } catch (e) { + return false; // NetworkError: blocked + } + }); + } + + Assert.equal( + await checkCanFetch("http://example.com/never_reached"), + false, + "should be blocked by DNR rule 3" + ); + Assert.equal( + await checkCanFetch("http://example.net/"), + // TODO bug 1797403: Fix expectation once allowAllRequests is implemented: + // true, + // "should not be blocked by block rule due to allowAllRequests rule" + false, + "is blocked because persistency of allowAllRequests is not yet implemented" + ); + + await contentPage.close(); + await extension.unload(); +}); diff --git a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_system_restrictions.js b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_system_restrictions.js new file mode 100644 index 000000000000..e2f6da072a2d --- /dev/null +++ b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_system_restrictions.js @@ -0,0 +1,66 @@ +"use strict"; + +const server = createHttpServer({ hosts: ["example.com", "restricted"] }); +server.registerPathHandler("/", (req, res) => { + res.setHeader("Access-Control-Allow-Origin", "*"); + res.write("response from server"); +}); + +add_setup(() => { + Services.prefs.setBoolPref("extensions.manifestV3.enabled", true); + Services.prefs.setBoolPref("extensions.dnr.enabled", true); + // The restrictedDomains pref should be set early, because the pref is read + // only once (on first use) by WebExtensionPolicy::IsRestrictedURI. + Services.prefs.setCharPref( + "extensions.webextensions.restrictedDomains", + "restricted" + ); +}); + +async function startDNRExtension() { + let extension = ExtensionTestUtils.loadExtension({ + async background() { + await browser.declarativeNetRequest.updateSessionRules({ + addRules: [{ id: 1, condition: {}, action: { type: "block" } }], + }); + browser.test.sendMessage("dnr_registered"); + }, + manifest: { + manifest_version: 3, + permissions: ["declarativeNetRequest"], + }, + }); + await extension.startup(); + await extension.awaitMessage("dnr_registered"); + return extension; +} + +add_task(async function dnr_ignores_system_requests() { + let extension = await startDNRExtension(); + Assert.equal( + await (await fetch("http://example.com/")).text(), + "response from server", + "DNR should not block requests from system principal" + ); + await extension.unload(); +}); + +add_task(async function dnr_ignores_requests_to_restrictedDomains() { + let extension = await startDNRExtension(); + Assert.equal( + await ExtensionTestUtils.fetch("http://example.com/", "http://restricted/"), + "response from server", + "DNR should not block destination in restrictedDomains" + ); + await extension.unload(); +}); + +add_task(async function dnr_ignores_initiator_from_restrictedDomains() { + let extension = await startDNRExtension(); + Assert.equal( + await ExtensionTestUtils.fetch("http://restricted/", "http://example.com/"), + "response from server", + "DNR should not block requests initiated from a page in restrictedDomains" + ); + await extension.unload(); +}); diff --git a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_webrequest.js b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_webrequest.js new file mode 100644 index 000000000000..15dd11b14d29 --- /dev/null +++ b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_webrequest.js @@ -0,0 +1,205 @@ +"use strict"; + +add_setup(() => { + Services.prefs.setBoolPref("extensions.manifestV3.enabled", true); + Services.prefs.setBoolPref("extensions.dnr.enabled", true); +}); + +const server = createHttpServer({ + hosts: ["example.com", "redir"], +}); +server.registerPathHandler("/never_reached", (req, res) => { + Assert.ok(false, "Server should never have been reached"); +}); + +add_task(async function block_request_with_dnr() { + async function background() { + let onBeforeRequestPromise = new Promise(resolve => { + browser.webRequest.onBeforeRequest.addListener(resolve, { + urls: ["*://example.com/*"], + }); + }); + await browser.declarativeNetRequest.updateSessionRules({ + addRules: [ + { + id: 1, + condition: { requestDomains: ["example.com"] }, + action: { type: "block" }, + }, + ], + }); + + await browser.test.assertRejects( + fetch("http://example.com/never_reached"), + "NetworkError when attempting to fetch resource.", + "blocked by DNR rule" + ); + // DNR is documented to take precedence over webRequest. We should still + // receive the webRequest event, however. + browser.test.log("Waiting for webRequest.onBeforeRequest..."); + await onBeforeRequestPromise; + browser.test.log("Seen webRequest.onBeforeRequest!"); + + browser.test.notifyPass(); + } + let extension = ExtensionTestUtils.loadExtension({ + background, + temporarilyInstalled: true, // Needed for granted_host_permissions + manifest: { + manifest_version: 3, + granted_host_permissions: true, + host_permissions: ["*://example.com/*"], + permissions: ["declarativeNetRequest", "webRequest"], + }, + }); + await extension.startup(); + await extension.awaitFinish(); + await extension.unload(); +}); + +add_task(async function upgradeScheme_and_redirect_request_with_dnr() { + async function background() { + let onBeforeRequestSeen = []; + browser.webRequest.onBeforeRequest.addListener( + d => { + onBeforeRequestSeen.push(d.url); + // webRequest cancels, but DNR should actually be taking precedence. + return { cancel: true }; + }, + { urls: ["*://example.com/*", "http://redir/here"] }, + ["blocking"] + ); + await browser.declarativeNetRequest.updateSessionRules({ + addRules: [ + { + id: 1, + condition: { requestDomains: ["example.com"] }, + action: { type: "upgradeScheme" }, + }, + { + id: 2, + condition: { requestDomains: ["example.com"], urlFilter: "|https:*" }, + action: { type: "redirect", redirect: { url: "http://redir/here" } }, + // The upgradeScheme and redirect actions have equal precedence. To + // make sure that the redirect action is executed when both rules + // match, we assign a higher priority to the redirect action. + priority: 2, + }, + ], + }); + + await browser.test.assertRejects( + fetch("http://example.com/never_reached"), + "NetworkError when attempting to fetch resource.", + "although initially redirected by DNR, ultimately blocked by webRequest" + ); + // DNR is documented to take precedence over webRequest. + // So we should actually see redirects according to the DNR rules, and + // the webRequest listener should still be able to observe all requests. + browser.test.assertDeepEq( + [ + "http://example.com/never_reached", + "https://example.com/never_reached", + "http://redir/here", + ], + onBeforeRequestSeen, + "Expected onBeforeRequest events" + ); + + browser.test.notifyPass(); + } + let extension = ExtensionTestUtils.loadExtension({ + background, + temporarilyInstalled: true, // Needed for granted_host_permissions + manifest: { + manifest_version: 3, + granted_host_permissions: true, + host_permissions: ["*://example.com/*", "*://redir/*"], + permissions: [ + "declarativeNetRequest", + "webRequest", + "webRequestBlocking", + ], + }, + }); + await extension.startup(); + await extension.awaitFinish(); + await extension.unload(); +}); + +add_task(async function block_request_with_webRequest_after_allow_with_dnr() { + async function background() { + let onBeforeRequestSeen = []; + browser.webRequest.onBeforeRequest.addListener( + d => { + onBeforeRequestSeen.push(d.url); + return { cancel: !d.url.includes("webRequestNoCancel") }; + }, + { urls: ["*://example.com/*"] }, + ["blocking"] + ); + // All DNR actions that do not end up canceling/redirecting the request: + await browser.declarativeNetRequest.updateSessionRules({ + addRules: [ + { + id: 1, + condition: { requestMethods: ["get"] }, + action: { type: "allow" }, + }, + { + id: 2, + condition: { requestMethods: ["put"] }, + action: { + type: "modifyHeaders", + requestHeaders: [{ operation: "set", header: "x", value: "y" }], + }, + }, + ], + }); + + await browser.test.assertRejects( + fetch("http://example.com/never_reached?1", { method: "get" }), + "NetworkError when attempting to fetch resource.", + "despite DNR 'allow' rule, still blocked by webRequest" + ); + await browser.test.assertRejects( + fetch("http://example.com/never_reached?2", { method: "put" }), + "NetworkError when attempting to fetch resource.", + "despite DNR 'modifyHeaders' rule, still blocked by webRequest" + ); + // Just to rule out the request having been canceled by DNR instead of + // webRequest, repeat the requests and verify that they succeed. + await fetch("http://example.com/?webRequestNoCancel1", { method: "get" }); + await fetch("http://example.com/?webRequestNoCancel2", { method: "put" }); + + browser.test.assertDeepEq( + [ + "http://example.com/never_reached?1", + "http://example.com/never_reached?2", + "http://example.com/?webRequestNoCancel1", + "http://example.com/?webRequestNoCancel2", + ], + onBeforeRequestSeen, + "Expected onBeforeRequest events" + ); + + browser.test.notifyPass(); + } + let extension = ExtensionTestUtils.loadExtension({ + background, + temporarilyInstalled: true, // Needed for granted_host_permissions + manifest: { + manifest_version: 3, + granted_host_permissions: true, + host_permissions: ["*://example.com/*"], + permissions: [ + "declarativeNetRequest", + "webRequest", + "webRequestBlocking", + ], + }, + }); + await extension.startup(); + await extension.awaitFinish(); + await extension.unload(); +}); diff --git a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_without_webrequest.js b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_without_webrequest.js new file mode 100644 index 000000000000..7550e5f375cb --- /dev/null +++ b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_without_webrequest.js @@ -0,0 +1,720 @@ +"use strict"; + +// This test file verifies that the declarativeNetRequest API can modify +// network requests as expected without the presence of the webRequest API. See +// test_ext_dnr_webRequest.js for the interaction between webRequest and DNR. + +add_setup(() => { + Services.prefs.setBoolPref("extensions.manifestV3.enabled", true); + Services.prefs.setBoolPref("extensions.dnr.enabled", true); +}); + +const server = createHttpServer({ + hosts: ["example.com", "example.net", "example.org", "redir", "dummy"], +}); +server.registerPathHandler("/cors_202", (req, res) => { + res.setStatusLine(req.httpVersion, 202, "Accepted"); + // The extensions in this test have minimal permissions, so grant CORS to + // allow them to read the response without host permissions. + res.setHeader("Access-Control-Allow-Origin", "*"); + res.setHeader("Access-Control-Max-Age", "0"); + res.write("cors_response"); +}); +server.registerPathHandler("/never_reached", (req, res) => { + Assert.ok(false, "Server should never have been reached"); + res.setHeader("Access-Control-Allow-Origin", "*"); + res.setHeader("Access-Control-Max-Age", "0"); +}); +let gPreflightCount = 0; +server.registerPathHandler("/preflight_count", (req, res) => { + res.setHeader("Access-Control-Allow-Origin", "*"); + res.setHeader("Access-Control-Max-Age", "0"); + res.setHeader("Access-Control-Allow-Methods", "NONSIMPLE"); + if (req.method === "OPTIONS") { + ++gPreflightCount; + } else { + // CORS Preflight considers 2xx to be successful. To rule out inadvertent + // server opt-in to CORS, respond with a non-2xx response. + res.setStatusLine(req.httpVersion, 418, "I'm a teapot"); + res.write(`count=${gPreflightCount}`); + } +}); +server.registerPathHandler("/", (req, res) => { + res.setHeader("Access-Control-Allow-Origin", "*"); + res.setHeader("Access-Control-Max-Age", "0"); + res.write("Dummy page"); +}); + +async function contentFetch(initiatorURL, url, options) { + let contentPage = await ExtensionTestUtils.loadContentPage(initiatorURL); + // Sanity check: that the initiator is as specified, and not redirected. + Assert.equal( + await contentPage.spawn(null, () => content.document.URL), + initiatorURL, + `Expected document load at: ${initiatorURL}` + ); + let result = await contentPage.spawn({ url, options }, async args => { + try { + let req = await content.fetch(args.url, args.options); + return { + status: req.status, + url: req.url, + body: await req.text(), + }; + } catch (e) { + return { error: e.message }; + } + }); + await contentPage.close(); + return result; +} + +add_task(async function block_request_with_dnr() { + async function background() { + await browser.declarativeNetRequest.updateSessionRules({ + addRules: [ + { + id: 1, + condition: { requestMethods: ["get"] }, + action: { type: "block" }, + }, + { + id: 2, + condition: { requestMethods: ["head"] }, + action: { type: "allow" }, + }, + ], + }); + { + // Request not matching DNR. + let req = await fetch("http://example.com/cors_202", { method: "post" }); + browser.test.assertEq(202, req.status, "allowed without DNR rule"); + browser.test.assertEq("cors_response", await req.text()); + } + { + // Request with "allow" DNR action. + let req = await fetch("http://example.com/cors_202", { method: "head" }); + browser.test.assertEq(202, req.status, "allowed by DNR rule"); + browser.test.assertEq("", await req.text(), "no response for HEAD"); + } + + // Request with "block" DNR action. + await browser.test.assertRejects( + fetch("http://example.com/never_reached", { method: "get" }), + "NetworkError when attempting to fetch resource.", + "blocked by DNR rule" + ); + + browser.test.sendMessage("tested_dnr_block"); + } + let extension = ExtensionTestUtils.loadExtension({ + background, + manifest: { + manifest_version: 3, + permissions: ["declarativeNetRequest"], + }, + }); + await extension.startup(); + await extension.awaitMessage("tested_dnr_block"); + + // DNR should not only work with requests within the extension, but also from + // web pages. + Assert.deepEqual( + await contentFetch("http://dummy/", "http://example.com/never_reached"), + { error: "NetworkError when attempting to fetch resource." }, + "Blocked by DNR with declarativeNetRequestWithHostAccess" + ); + + // The declarativeNetRequest permission grants the ability to block requests + // from other extensions. (The declarativeNetRequestWithHostAccess permission + // does not; see test task block_with_declarativeNetRequestWithHostAccess.) + let otherExtension = ExtensionTestUtils.loadExtension({ + async background() { + await browser.test.assertRejects( + fetch("http://example.com/never_reached", { method: "get" }), + "NetworkError when attempting to fetch resource.", + "blocked by different extension with declarativeNetRequest permission" + ); + browser.test.sendMessage("other_extension_done"); + }, + }); + await otherExtension.startup(); + await otherExtension.awaitMessage("other_extension_done"); + await otherExtension.unload(); + + await extension.unload(); +}); + +// Verifies that the "declarativeNetRequestWithHostAccess" permission can only +// block if it has permission for the initiator. +add_task(async function block_with_declarativeNetRequestWithHostAccess() { + let extension = ExtensionTestUtils.loadExtension({ + async background() { + await browser.declarativeNetRequest.updateSessionRules({ + addRules: [{ id: 1, condition: {}, action: { type: "block" } }], + }); + browser.test.sendMessage("dnr_registered"); + }, + temporarilyInstalled: true, // Needed for granted_host_permissions + manifest: { + manifest_version: 3, + granted_host_permissions: true, + host_permissions: [""], + permissions: ["declarativeNetRequestWithHostAccess"], + }, + }); + await extension.startup(); + await extension.awaitMessage("dnr_registered"); + + // Initiator "http://dummy" does match "", so DNR rule should apply. + Assert.deepEqual( + await contentFetch("http://dummy/", "http://example.com/never_reached"), + { error: "NetworkError when attempting to fetch resource." }, + "Blocked by DNR with declarativeNetRequestWithHostAccess" + ); + + // Extensions cannot have permissions for another extension and therefore the + // DNR rule never applies. + let otherExtension = ExtensionTestUtils.loadExtension({ + async background() { + let req = await fetch("http://example.com/cors_202", { method: "get" }); + browser.test.assertEq(202, req.status, "not blocked by other extension"); + browser.test.assertEq("cors_response", await req.text()); + browser.test.sendMessage("other_extension_done"); + }, + }); + await otherExtension.startup(); + await otherExtension.awaitMessage("other_extension_done"); + await otherExtension.unload(); + + await extension.unload(); +}); + +// Verifies that upgradeScheme works. +// The HttpServer helper does not support https (bug 1742061), so in this +// test we just verify whether the upgrade has been attempted. Coverage that +// verifies that the upgraded request completes is in: +// toolkit/components/extensions/test/mochitest/test_ext_dnr_upgradeScheme.html +add_task(async function upgradeScheme_declarativeNetRequestWithHostAccess() { + let extension = ExtensionTestUtils.loadExtension({ + async background() { + await browser.declarativeNetRequest.updateSessionRules({ + addRules: [ + { + id: 1, + condition: { excludedRequestDomains: ["dummy"] }, + action: { type: "upgradeScheme" }, + }, + { + id: 2, + // HttpServer does not support https (bug 1742061). + // As a work-around, we just redirect the https:-request to http. + condition: { urlFilter: "|https:*" }, + action: { + type: "redirect", + redirect: { url: "http://dummy/cors_202?from_https" }, + }, + // The upgradeScheme and redirect actions have equal precedence. To + // make sure that the redirect action is executed when both rules + // match, we assign a higher priority to the redirect action. + priority: 2, + }, + ], + }); + + let req = await fetch("http://redir/never_reached"); + browser.test.assertEq( + "http://dummy/cors_202?from_https", + req.url, + "upgradeScheme upgraded to https" + ); + browser.test.assertEq("cors_response", await req.text()); + + browser.test.sendMessage("tested_dnr_upgradeScheme"); + }, + temporarilyInstalled: true, // Needed for granted_host_permissions. + manifest: { + manifest_version: 3, + granted_host_permissions: true, + host_permissions: ["*://dummy/*", "*://redir/*"], + permissions: ["declarativeNetRequestWithHostAccess"], + }, + }); + await extension.startup(); + await extension.awaitMessage("tested_dnr_upgradeScheme"); + + // Request to same-origin subresource, which should be upgraded. + Assert.equal( + (await contentFetch("http://redir/", "http://redir/never_reached")).url, + "http://dummy/cors_202?from_https", + "upgradeScheme + host access should upgrade (same-origin request)" + ); + + // Request to cross-origin subresource, which should be upgraded. + // Note: after the upgrade, a cross-origin redirect happens. Internally, we + // reflect the Origin request header in the Access-Control-Allow-Origin (ACAO) + // response header, to ensure that the request is accepted by CORS. See + // https://github.com/w3c/webappsec-upgrade-insecure-requests/issues/32 + Assert.equal( + (await contentFetch("http://dummy/", "http://redir/never_reached")).url, + // TODO bug 1800990: despite the mirrored Origin in ACAO, the CORS check + // fails after a request is upgraded. Once fixed, update this expectation: + undefined, // Should be: "http://dummy/cors_202?from_https", + "TODO 1800990: upgradeScheme + host access should upgrade (cross-origin request)" + ); + + // The DNR extension does not have example.net in host_permissions. + const urlNoHostPerms = "http://example.net/cors_202?missing_host_permission"; + Assert.equal( + (await contentFetch("http://dummy/", urlNoHostPerms)).url, + urlNoHostPerms, + "upgradeScheme not matched when extension lacks host access" + ); + + await extension.unload(); +}); + +add_task(async function redirect_request_with_dnr() { + async function background() { + await browser.declarativeNetRequest.updateSessionRules({ + addRules: [ + { + id: 1, + condition: { + requestDomains: ["example.com"], + requestMethods: ["get"], + }, + action: { + type: "redirect", + redirect: { + url: "http://example.net/cors_202?1", + }, + }, + }, + { + id: 2, + // Note: extension does not have example.org host permission. + condition: { requestDomains: ["example.org"] }, + action: { + type: "redirect", + redirect: { + url: "http://example.net/cors_202?2", + }, + }, + }, + ], + }); + // The extension only has example.com permission, but the redirects to + // example.net are still due to the CORS headers from the server. + { + // Simple GET request. + let req = await fetch("http://example.com/never_reached"); + browser.test.assertEq(202, req.status, "redirected by DNR (simple)"); + browser.test.assertEq("http://example.net/cors_202?1", req.url); + browser.test.assertEq("cors_response", await req.text()); + } + { + // GeT request should be matched despite having a different case. + let req = await fetch("http://example.com/never_reached", { + method: "GeT", + }); + browser.test.assertEq(202, req.status, "redirected by DNR (GeT)"); + browser.test.assertEq("http://example.net/cors_202?1", req.url); + browser.test.assertEq("cors_response", await req.text()); + } + { + // Host permission missing for request, request not redirected by DNR. + // Response is readable due to the CORS response headers from the server. + let req = await fetch("http://example.org/cors_202?noredir"); + browser.test.assertEq(202, req.status, "not redirected by DNR"); + browser.test.assertEq("http://example.org/cors_202?noredir", req.url); + browser.test.assertEq("cors_response", await req.text()); + } + + browser.test.notifyPass(); + } + let extension = ExtensionTestUtils.loadExtension({ + background, + temporarilyInstalled: true, // Needed for granted_host_permissions + manifest: { + manifest_version: 3, + granted_host_permissions: true, + host_permissions: ["*://example.com/*"], + permissions: ["declarativeNetRequest"], + }, + }); + await extension.startup(); + await extension.awaitFinish(); + + let otherExtension = ExtensionTestUtils.loadExtension({ + async background() { + // The DNR extension has permissions for example.com, but not for this + // extension. Therefore the "redirect" action should not apply. + let req = await fetch("http://example.com/cors_202?other_ext"); + browser.test.assertEq(202, req.status, "not redirected by DNR"); + browser.test.assertEq("http://example.com/cors_202?other_ext", req.url); + browser.test.assertEq("cors_response", await req.text()); + browser.test.sendMessage("other_extension_done"); + }, + }); + await otherExtension.startup(); + await otherExtension.awaitMessage("other_extension_done"); + await otherExtension.unload(); + + await extension.unload(); +}); + +// Verifies that DNR redirects requiring a CORS preflight behave as expected. +add_task(async function redirect_request_with_dnr_cors_preflight() { + // Most other test tasks only test requests within the test extension. This + // test intentionally triggers requests outside the extension, to make sure + // that the usual CORS mechanisms is triggered (instead of exceptions from + // host permissions). + async function background() { + await browser.declarativeNetRequest.updateSessionRules({ + addRules: [ + { + id: 1, + condition: { + requestDomains: ["redir"], + excludedRequestMethods: ["options"], + }, + action: { + type: "redirect", + redirect: { + url: "http://example.com/preflight_count", + }, + }, + }, + { + id: 2, + condition: { + requestDomains: ["example.net"], + excludedRequestMethods: ["nonsimple"], // note: redirects "options" + }, + action: { + type: "redirect", + redirect: { + url: "http://example.com/preflight_count", + }, + }, + }, + ], + }); + let req = await fetch("http://redir/never_reached", { + method: "NONSIMPLE", + }); + // Extension has permission for "redir", but not for the redirect target. + // The request is non-simple (see below for explanation of non-simple), so + // a preflight (OPTIONS) request to /preflight_count is expected before the + // redirection target is requested. + browser.test.assertEq( + "count=1", + await req.text(), + "Got preflight before redirect target because of missing host_permissions" + ); + + browser.test.sendMessage("continue_preflight_tests"); + } + + let extension = ExtensionTestUtils.loadExtension({ + background, + temporarilyInstalled: true, // Needed for granted_host_permissions + manifest: { + manifest_version: 3, + granted_host_permissions: true, + // "redir" and "example.net" are needed to allow redirection of these. + // "dummy" is needed to redirect requests initiated from http://dummy. + host_permissions: ["*://redir/*", "*://example.net/*", "*://dummy/*"], + permissions: ["declarativeNetRequest"], + }, + }); + gPreflightCount = 0; + await extension.startup(); + await extension.awaitMessage("continue_preflight_tests"); + gPreflightCount = 0; // value already checked before continue_preflight_tests. + + // Simple request (i.e. without preflight requirement), that's redirected to + // another URL by the DNR rule. The redirect should be accepted, and in + // particular not be blocked by the same-origin policy. The redirect target + // (/preflight_count) is readable due to the CORS headers from the server. + Assert.deepEqual( + await contentFetch("http://dummy/", "http://redir/never_reached"), + // count=0: A simple request does not trigger a preflight (OPTIONS) request. + { status: 418, url: "http://example.com/preflight_count", body: "count=0" }, + "Simple request should not have a preflight." + ); + + // Any request method other than "GET", "POST" and "PUT" (e.g "NONSIMPLE") is + // a non-simple request that triggers a preflight request ("OPTIONS"). + // + // Usually, this happens (without extension-triggered redirects): + // 1. NONSIMPLE /never_reached : is started, but does NOT hit the server yet. + // 2. OPTIONS /never_reached + Access-Control-Request-Method: NONSIMPLE + // 3. NONSIMPLE /never_reached : reaches the server if allowed by OPTIONS. + // + // With an extension-initiated redirect to /preflight_count: + // 1. NONSIMPLE /never_reached : is started, but does not hit the server yet. + // 2. extension redirects to /preflight_count + // 3. OPTIONS /preflight_count + Access-Control-Request-Method: NONSIMPLE + // - This is because the redirect preserves the request method/body/etc. + // 4. NONSIMPLE /preflight_count : reaches the server if allowed by OPTIONS. + Assert.deepEqual( + await contentFetch("http://dummy/", "http://redir/never_reached", { + method: "NONSIMPLE", + }), + // Due to excludedRequestMethods: ["options"], the preflight for the + // redirect target is not intercepted, so the server sees a preflight. + { status: 418, url: "http://example.com/preflight_count", body: "count=1" }, + "Initial URL redirected, redirection target has preflight" + ); + gPreflightCount = 0; + + // The "example.net" rule has "excludedRequestMethods": ["nonsimple"], so the + // initial "NONSIMPLE" request is not immediately redirected. Therefore the + // preflight request happens. This OPTIONS request is matched by the DNR rule + // and redirected to /preflight_count. While preflight_count offers a very + // permissive preflight response, it is not even fetched: + // Only a 2xx HTTP status is considered a valid response to a pre-flight. + // A redirect is like a 3xx HTTP status, so the whole request is rejected, + // and the redirect is not followed for the OPTIONS request. + Assert.deepEqual( + await contentFetch("http://dummy/", "http://example.net/never_reached", { + method: "NONSIMPLE", + }), + { error: "NetworkError when attempting to fetch resource." }, + "Redirect of preflight request (OPTIONS) should be a CORS failure" + ); + + Assert.equal(gPreflightCount, 0, "Preflight OPTIONS has been intercepted"); + + await extension.unload(); +}); + +// Tests that DNR redirect rules can be chained. +add_task(async function redirect_request_with_dnr_multiple_hops() { + async function background() { + // Set up redirects from example.com up until dummy. + let hosts = ["example.com", "example.net", "example.org", "redir", "dummy"]; + let rules = []; + for (let i = 1; i < hosts.length; ++i) { + const from = hosts[i - 1]; + const to = hosts[i]; + const end = hosts.length - 1 === i; + rules.push({ + id: i, + condition: { requestDomains: [from] }, + action: { + type: "redirect", + redirect: { + // All intermediate redirects should never hit the server, but the + // last one should.. + url: end ? `http://${to}/?end` : `http://${to}/never_reached`, + }, + }, + }); + } + await browser.declarativeNetRequest.updateSessionRules({ addRules: rules }); + let req = await fetch("http://example.com/never_reached"); + browser.test.assertEq(200, req.status, "redirected by DNR (multiple)"); + browser.test.assertEq("http://dummy/?end", req.url, "Last URL in chain"); + browser.test.assertEq("Dummy page", await req.text()); + + browser.test.notifyPass(); + } + let extension = ExtensionTestUtils.loadExtension({ + background, + temporarilyInstalled: true, // Needed for granted_host_permissions + manifest: { + manifest_version: 3, + granted_host_permissions: true, + host_permissions: ["*://*/*"], // matches all in the |hosts| list. + permissions: ["declarativeNetRequest"], + }, + }); + await extension.startup(); + await extension.awaitFinish(); + + // Test again, but without special extension permissions to verify that DNR + // redirects pass CORS checks. + Assert.deepEqual( + await contentFetch("http://dummy/", "http://redir/never_reached"), + { status: 200, url: "http://dummy/?end", body: "Dummy page" }, + "Multiple redirects by DNR, requested from web origin." + ); + + await extension.unload(); +}); + +add_task(async function redirect_request_with_dnr_with_redirect_loop() { + async function background() { + await browser.declarativeNetRequest.updateSessionRules({ + addRules: [ + { + id: 1, + condition: { requestDomains: ["redir"] }, + action: { + type: "redirect", + redirect: { + url: "http://redir/cors_202?loop", + }, + }, + }, + ], + }); + + // Redirect with initially a different URL. + await browser.test.assertRejects( + fetch("http://redir/never_reached?"), + "NetworkError when attempting to fetch resource.", + "Redirect loop caught (initially different URL)" + ); + + // Redirect where redirect is exactly the same URL as requested. + await browser.test.assertRejects( + fetch("http://redir/cors_202?loop"), + "NetworkError when attempting to fetch resource.", + "Redirect loop caught (redirect target same as initial URL)" + ); + + browser.test.notifyPass(); + } + let extension = ExtensionTestUtils.loadExtension({ + background, + temporarilyInstalled: true, // Needed for granted_host_permissions + manifest: { + manifest_version: 3, + granted_host_permissions: true, + host_permissions: ["*://redir/*"], + permissions: ["declarativeNetRequest"], + }, + }); + await extension.startup(); + await extension.awaitFinish(); + await extension.unload(); +}); + +// Tests that redirect to extensionPath works, provided that the initiator is +// either the extension itself, or in host_permissions. Moreover, the requested +// resource must match a web_accessible_resources entry for both the initiator +// AND the pre-redirect URL. +add_task(async function redirect_request_with_dnr_to_extensionPath() { + async function background() { + await browser.declarativeNetRequest.updateSessionRules({ + addRules: [ + { + id: 1, + condition: { requestDomains: ["redir"], requestMethods: ["post"] }, + action: { + type: "redirect", + redirect: { + extensionPath: "/war.txt?1", + }, + }, + }, + { + id: 2, + condition: { requestDomains: ["redir"], requestMethods: ["put"] }, + action: { + type: "redirect", + redirect: { + extensionPath: "/nonwar.txt?2", + }, + }, + }, + ], + }); + { + let req = await fetch("http://redir/never_reached", { method: "post" }); + browser.test.assertEq(200, req.status, "redirected to extensionPath"); + browser.test.assertEq(`${location.origin}/war.txt?1`, req.url); + browser.test.assertEq("war_ext_res", await req.text()); + } + // Redirects to extensionPath that is not in web_accessible_resources. + // While the initiator (extension) would be allowed to read the resource + // due to it being same-origin, the pre-redirect URL (http://redir) is not + // matching web_accessible_resources[].matches, so the load is rejected. + // + // This behavior differs from Chrome (e.g. at least in Chrome 109) that + // does allow the load to complete. Extensions who really care about + // exposing a web-accessible resource to the world can just put an all_urls + // pattern in web_accessible_resources[].matches. + await browser.test.assertRejects( + fetch("http://redir/never_reached", { method: "put" }), + "NetworkError when attempting to fetch resource.", + "Redirect to nowar.txt, but pre-redirect host is not in web_accessible_resources[].matches" + ); + + browser.test.notifyPass(); + } + let extension = ExtensionTestUtils.loadExtension({ + background, + temporarilyInstalled: true, // Needed for granted_host_permissions + manifest: { + manifest_version: 3, + granted_host_permissions: true, + host_permissions: ["*://redir/*", "*://dummy/*"], + permissions: ["declarativeNetRequest"], + web_accessible_resources: [ + // *://redir/* is in matches, because that is the pre-redirect host. + // *://dummy/* is in matches, because that is an initiator below. + { resources: ["war.txt"], matches: ["*://redir/*", "*://dummy/*"] }, + // without "matches", this is almost equivalent to not being listed in + // web_accessible_resources at all. This entry is listed here to verify + // that the presence of extension_ids does not somehow allow a request + // with an extension initiator to complete. + { resources: ["nonwar.txt"], extension_ids: ["*"] }, + ], + }, + files: { + "war.txt": "war_ext_res", + "nonwar.txt": "non_war_ext_res", + }, + }); + await extension.startup(); + await extension.awaitFinish(); + const extPrefix = `moz-extension://${extension.uuid}`; + + // Request from origin in host_permissions, for web-accessible resource. + Assert.deepEqual( + await contentFetch( + "http://dummy/", // <-- Matching web_accessible_resources[].matches + "http://redir/never_reached", // <-- With matching host_permissions + { method: "post" } + ), + { status: 200, url: `${extPrefix}/war.txt?1`, body: "war_ext_res" }, + "Should have got redirect to web_accessible_resources (war.txt)" + ); + + // Request from origin in host_permissions, for non-web-accessible resource. + let { messages } = await promiseConsoleOutput(async () => { + Assert.deepEqual( + await contentFetch( + "http://dummy/", // <-- Matching web_accessible_resources[].matches + "http://redir/never_reached", // <-- With matching host_permissions + { method: "put" } + ), + { error: "NetworkError when attempting to fetch resource." }, + "Redirect to nowar.txt, without matching web_accessible_resources[].matches" + ); + }); + const EXPECTED_SECURITY_ERROR = `Content at http://redir/never_reached may not load or link to ${extPrefix}/nonwar.txt?2.`; + Assert.equal( + messages.filter(m => m.message.includes(EXPECTED_SECURITY_ERROR)).length, + 1, + `Should log SecurityError: ${EXPECTED_SECURITY_ERROR}` + ); + + // Request from origin not in host_permissions. DNR rule should not apply. + Assert.deepEqual( + await contentFetch( + "http://dummy/", // <-- Matching web_accessible_resources[].matches + "http://example.com/cors_202", // <-- NOT in host_permissions + { method: "post" } + ), + { status: 202, url: "http://example.com/cors_202", body: "cors_response" }, + "Extension should not have redirected, due to lack of host permissions" + ); + + await extension.unload(); +}); diff --git a/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini b/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini index f7406f338ddb..64a41dc59f6e 100644 --- a/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini +++ b/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini @@ -114,9 +114,13 @@ skip-if = [test_ext_cors_mozextension.js] [test_ext_csp_frame_ancestors.js] [test_ext_debugging_utils.js] +[test_ext_dnr_allowAllRequests.js] [test_ext_dnr_api.js] [test_ext_dnr_session_rules.js] +[test_ext_dnr_system_restrictions.js] [test_ext_dnr_testMatchOutcome.js] +[test_ext_dnr_webrequest.js] +[test_ext_dnr_without_webrequest.js] [test_ext_dns.js] skip-if = os == "android" # Android needs alternative for proxy.settings - bug 1723523 [test_ext_downloads.js] diff --git a/toolkit/components/extensions/webrequest/WebRequest.jsm b/toolkit/components/extensions/webrequest/WebRequest.jsm index 9b9982c42cb1..9de07fb4e0ee 100644 --- a/toolkit/components/extensions/webrequest/WebRequest.jsm +++ b/toolkit/components/extensions/webrequest/WebRequest.jsm @@ -19,6 +19,7 @@ const { XPCOMUtils } = ChromeUtils.importESModule( const lazy = {}; XPCOMUtils.defineLazyModuleGetters(lazy, { + ExtensionDNR: "resource://gre/modules/ExtensionDNR.jsm", ExtensionParent: "resource://gre/modules/ExtensionParent.jsm", ExtensionUtils: "resource://gre/modules/ExtensionUtils.jsm", WebRequestUpload: "resource://gre/modules/WebRequestUpload.jsm", @@ -619,6 +620,9 @@ HttpObserverManager = { onErrorOccurred: new Map(), onCompleted: new Map(), }, + // Whether there are any registered declarativeNetRequest rules. These DNR + // rules may match new requests and result in request modifications. + dnrActive: false, openingInitialized: false, beforeConnectInitialized: false, @@ -660,10 +664,11 @@ HttpObserverManager = { // webRequest listeners and removing those that are no longer needed if // there are no more listeners for corresponding webRequest events. addOrRemove() { - let needOpening = this.listeners.onBeforeRequest.size; + let needOpening = this.listeners.onBeforeRequest.size || this.dnrActive; let needBeforeConnect = this.listeners.onBeforeSendHeaders.size || - this.listeners.onSendHeaders.size; + this.listeners.onSendHeaders.size || + this.dnrActive; if (needOpening && !this.openingInitialized) { this.openingInitialized = true; Services.obs.addObserver(this, "http-on-modify-request"); @@ -692,7 +697,8 @@ HttpObserverManager = { let needExamine = this.needTracing || this.listeners.onHeadersReceived.size || - this.listeners.onAuthRequired.size; + this.listeners.onAuthRequired.size || + this.dnrActive; if (needExamine && !this.examineInitialized) { this.examineInitialized = true; @@ -740,6 +746,11 @@ HttpObserverManager = { this.addOrRemove(); }, + setDNRHandlingEnabled(dnrActive) { + this.dnrActive = dnrActive; + this.addOrRemove(); + }, + observe(subject, topic, data) { let channel = this.getWrapper(subject); switch (topic) { @@ -917,6 +928,10 @@ HttpObserverManager = { if (kind !== "onErrorOccurred" && channel.errorString) { return; } + if (this.dnrActive) { + // DNR may modify (but not cancel) the request at this stage. + lazy.ExtensionDNR.beforeWebRequestEvent(channel, kind); + } let registerFilter = this.FILTER_TYPES.has(kind); let commonData = null; @@ -1012,6 +1027,10 @@ HttpObserverManager = { Cu.reportError(e); } + if (this.dnrActive && lazy.ExtensionDNR.handleRequest(channel, kind)) { + return; + } + return this.applyChanges( kind, channel, @@ -1287,6 +1306,10 @@ var onCompleted = new HttpEvent("onCompleted", ["responseHeaders"]); var onErrorOccurred = new HttpEvent("onErrorOccurred"); var WebRequest = { + setDNRHandlingEnabled: dnrActive => { + HttpObserverManager.setDNRHandlingEnabled(dnrActive); + }, + onBeforeRequest, onBeforeSendHeaders, onSendHeaders, From 3dab68ed84be24a16b1525127d0239f90df3217a Mon Sep 17 00:00:00 2001 From: Rob Wu Date: Thu, 24 Nov 2022 22:02:52 +0000 Subject: [PATCH 36/37] Bug 1745761 - Enforce privateBrowsingAllowed for DNR r=rpl Differential Revision: https://phabricator.services.mozilla.com/D162574 --- .../extensions/ExtensionDNR.sys.mjs | 8 +- .../xpcshell/test_ext_dnr_private_browsing.js | 130 ++++++++++++++++++ .../test/xpcshell/xpcshell-common.ini | 1 + 3 files changed, 136 insertions(+), 3 deletions(-) create mode 100644 toolkit/components/extensions/test/xpcshell/test_ext_dnr_private_browsing.js diff --git a/toolkit/components/extensions/ExtensionDNR.sys.mjs b/toolkit/components/extensions/ExtensionDNR.sys.mjs index 33dccf410089..6deb3b763009 100644 --- a/toolkit/components/extensions/ExtensionDNR.sys.mjs +++ b/toolkit/components/extensions/ExtensionDNR.sys.mjs @@ -572,9 +572,6 @@ class RequestEvaluator { return; } - // TODO bug 1745761: when the channel/originAttributes is chosen, use - // ruleManager.extension to exclude private requests if needed. - this.#collectMatchInRuleset(this.ruleManager.sessionRules); this.#collectMatchInRuleset(this.ruleManager.dynamicRules); for (let ruleset of this.ruleManager.enabledStaticRules) { @@ -803,6 +800,11 @@ const NetworkIntegration = { if (!channel.canModify) { ruleManagers = []; } + if (channel.loadInfo.originAttributes.privateBrowsingId > 0) { + ruleManagers = ruleManagers.filter( + rm => rm.extension.privateBrowsingAllowed + ); + } let matchedRules; if (ruleManagers.length) { const request = RequestDetails.fromChannelWrapper(channel); diff --git a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_private_browsing.js b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_private_browsing.js new file mode 100644 index 000000000000..d94c31c858c8 --- /dev/null +++ b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_private_browsing.js @@ -0,0 +1,130 @@ +"use strict"; + +const server = createHttpServer({ hosts: ["example.com"] }); +server.registerPathHandler("/", (req, res) => { + res.setHeader("Access-Control-Allow-Origin", "*"); + res.setHeader("Access-Control-Max-Age", "0"); +}); + +add_setup(() => { + Services.prefs.setBoolPref("extensions.manifestV3.enabled", true); + Services.prefs.setBoolPref("extensions.dnr.enabled", true); +}); + +async function startDNRExtension({ privateBrowsingAllowed }) { + let extension = ExtensionTestUtils.loadExtension({ + incognitoOverride: privateBrowsingAllowed ? "spanning" : undefined, + async background() { + await browser.declarativeNetRequest.updateSessionRules({ + addRules: [{ id: 1, condition: {}, action: { type: "block" } }], + }); + browser.test.sendMessage("dnr_registered"); + }, + manifest: { + manifest_version: 3, + permissions: ["declarativeNetRequest"], + browser_specific_settings: { gecko: { id: "@dnr-ext" } }, + }, + }); + await extension.startup(); + await extension.awaitMessage("dnr_registered"); + return extension; +} + +async function testMatchedByDNR(privateBrowsing) { + let contentPage = await ExtensionTestUtils.loadContentPage( + "http://example.com/?page", + { privateBrowsing } + ); + let wasRequestBlocked = await contentPage.spawn(null, async () => { + try { + await content.fetch("http://example.com/?fetch"); + return false; + } catch (e) { + // Request blocked by DNR rule from startDNRExtension(). + return true; + } + }); + await contentPage.close(); + return wasRequestBlocked; +} + +add_task(async function private_browsing_not_allowed_by_default() { + let extension = await startDNRExtension({ privateBrowsingAllowed: false }); + Assert.equal( + await testMatchedByDNR(false), + true, + "DNR applies to non-private browsing requests by default" + ); + Assert.equal( + await testMatchedByDNR(true), + false, + "DNR not applied to private browsing requests by default" + ); + await extension.unload(); +}); + +add_task(async function private_browsing_allowed() { + let extension = await startDNRExtension({ privateBrowsingAllowed: true }); + Assert.equal( + await testMatchedByDNR(false), + true, + "DNR applies to non-private requests regardless of privateBrowsingAllowed" + ); + Assert.equal( + await testMatchedByDNR(true), + true, + "DNR applied to private browsing requests when privateBrowsingAllowed" + ); + await extension.unload(); +}); + +add_task( + { pref_set: [["extensions.dnr.feedback", true]] }, + async function testMatchOutcome_unaffected_by_privateBrowsing() { + let extensionWithoutPrivateBrowsingAllowed = await startDNRExtension({}); + let extension = ExtensionTestUtils.loadExtension({ + incognitoOverride: "spanning", + manifest: { + manifest_version: 3, + permissions: ["declarativeNetRequest", "declarativeNetRequestFeedback"], + }, + files: { + "page.html": ``, + "page.js": async () => { + browser.test.assertTrue( + browser.extension.inIncognitoContext, + "Extension page is opened in a private browsing context" + ); + browser.test.assertDeepEq( + { + matchedRules: [ + { ruleId: 1, rulesetId: "_session", extensionId: "@dnr-ext" }, + ], + }, + // testMatchOutcome does not offer a way to specify the private + // browsing mode of a request. Confirm that testMatchOutcome always + // simulates requests in normal private browsing mode, even if the + // testMatchOutcome method itself is called from an extension page + // in private browsing mode. + await browser.declarativeNetRequest.testMatchOutcome( + { url: "http://example.com/?simulated_request", type: "image" }, + { includeOtherExtensions: true } + ), + "testMatchOutcome includes DNR from extensions without pbm access" + ); + browser.test.sendMessage("done"); + }, + }, + }); + await extension.startup(); + let contentPage = await ExtensionTestUtils.loadContentPage( + `moz-extension://${extension.uuid}/page.html`, + { privateBrowsing: true } + ); + await extension.awaitMessage("done"); + await contentPage.close(); + await extension.unload(); + await extensionWithoutPrivateBrowsingAllowed.unload(); + } +); diff --git a/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini b/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini index 64a41dc59f6e..cdbfad6e2956 100644 --- a/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini +++ b/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini @@ -116,6 +116,7 @@ skip-if = [test_ext_debugging_utils.js] [test_ext_dnr_allowAllRequests.js] [test_ext_dnr_api.js] +[test_ext_dnr_private_browsing.js] [test_ext_dnr_session_rules.js] [test_ext_dnr_system_restrictions.js] [test_ext_dnr_testMatchOutcome.js] From 58a7f7a38f083fbc271c30330a0e41a895b55d1b Mon Sep 17 00:00:00 2001 From: Csoregi Natalia Date: Fri, 25 Nov 2022 01:13:40 +0200 Subject: [PATCH 37/37] Backed out 2 changesets (bug 1745761) for failures on test_ext_dnr_without_webrequest.js. CLOSED TREE Backed out changeset 74b33f61c4d3 (bug 1745761) Backed out changeset 5abe72701c13 (bug 1745761) --- .../extensions/ExtensionDNR.sys.mjs | 229 +----- .../test/mochitest/mochitest-common.ini | 1 - .../mochitest/test_ext_dnr_upgradeScheme.html | 120 --- .../xpcshell/test_ext_dnr_allowAllRequests.js | 96 --- .../xpcshell/test_ext_dnr_private_browsing.js | 130 ---- .../test_ext_dnr_system_restrictions.js | 66 -- .../test/xpcshell/test_ext_dnr_webrequest.js | 205 ----- .../test_ext_dnr_without_webrequest.js | 720 ------------------ .../test/xpcshell/xpcshell-common.ini | 5 - .../extensions/webrequest/WebRequest.jsm | 29 +- 10 files changed, 9 insertions(+), 1592 deletions(-) delete mode 100644 toolkit/components/extensions/test/mochitest/test_ext_dnr_upgradeScheme.html delete mode 100644 toolkit/components/extensions/test/xpcshell/test_ext_dnr_allowAllRequests.js delete mode 100644 toolkit/components/extensions/test/xpcshell/test_ext_dnr_private_browsing.js delete mode 100644 toolkit/components/extensions/test/xpcshell/test_ext_dnr_system_restrictions.js delete mode 100644 toolkit/components/extensions/test/xpcshell/test_ext_dnr_webrequest.js delete mode 100644 toolkit/components/extensions/test/xpcshell/test_ext_dnr_without_webrequest.js diff --git a/toolkit/components/extensions/ExtensionDNR.sys.mjs b/toolkit/components/extensions/ExtensionDNR.sys.mjs index 6deb3b763009..cd63d7839682 100644 --- a/toolkit/components/extensions/ExtensionDNR.sys.mjs +++ b/toolkit/components/extensions/ExtensionDNR.sys.mjs @@ -69,13 +69,6 @@ const gRuleManagers = []; * - allow / allowAllRequests */ -const lazy = {}; -ChromeUtils.defineModuleGetter( - lazy, - "WebRequest", - "resource://gre/modules/WebRequest.jsm" -); - // The RuleCondition class represents a rule's "condition" type as described in // schemas/declarative_net_request.json. This class exists to allow the JS // engine to use one Shape for all Rule instances. @@ -314,8 +307,8 @@ class RuleValidator { // http(s) URLs can (regardless of extension permissions). // data:-URLs are currently blocked due to bug 1622986. - // TODO bug 1801870: Implement rule.action.redirect.transform. - // TODO bug 1745760: With regexFilter support, implement regexSubstitution. + // TODO bug 1745761: With the redirect action, add schema definitions + + // implement rule.action.redirect.transform / regexSubstitution. return true; } @@ -446,17 +439,6 @@ class RequestDetails { : null; } - static fromChannelWrapper(channel) { - return new RequestDetails({ - requestURI: channel.finalURI, - // Note: originURI may be null, if missing or null principal, as desired. - initiatorURI: channel.originURI, - type: channel.type, - method: channel.method.toLowerCase(), - tabId: null, // TODO: use getBrowserData to populate. - }); - } - canExtensionModify(extension) { const policy = extension.policy; return ( @@ -572,6 +554,9 @@ class RequestEvaluator { return; } + // TODO bug 1745761: when the channel/originAttributes is chosen, use + // ruleManager.extension to exclude private requests if needed. + this.#collectMatchInRuleset(this.ruleManager.sessionRules); this.#collectMatchInRuleset(this.ruleManager.dynamicRules); for (let ruleset of this.ruleManager.enabledStaticRules) { @@ -661,15 +646,7 @@ class RequestEvaluator { // Check this.req.requestURI: if (cond.urlFilter) { - if ( - !this.#matchesUrlFilter( - this.req.requestURI, - cond.urlFilter, - cond.isUrlFilterCaseSensitive - ) - ) { - return false; - } + // TODO bug 1745759: Check cond.urlFilter + isUrlFilterCaseSensitive } else if (cond.regexFilter) { // TODO bug 1745760: check cond.regexFilter + isUrlFilterCaseSensitive } @@ -726,22 +703,6 @@ class RequestEvaluator { return true; } - /** - * @param {nsIURI} uri - The request URI. - * @param {string} urlFilter - * @param {boolean} [isUrlFilterCaseSensitive] - * @returns {boolean} Whether urlFilter matches the given uri. - */ - #matchesUrlFilter(uri, urlFilter, isUrlFilterCaseSensitive) { - // TODO bug 1745759: Check cond.urlFilter + isUrlFilterCaseSensitive - // Placeholder for unit test until we have a complete implementation. - if (urlFilter === "|https:*") { - return uri.schemeIs("https"); - } - throw new Error(`urlFilter not implemented yet: ${urlFilter}`); - // return true; after all other checks passed. - } - /** * @param {string[]} domains - A list of canonicalized domain patterns. * Canonical means punycode, no ports, and IPv6 without brackets, and not @@ -785,130 +746,6 @@ class RequestEvaluator { } } -const NetworkIntegration = { - register() { - // We register via WebRequest.jsm to ensure predictable ordering of DNR and - // WebRequest behavior. - lazy.WebRequest.setDNRHandlingEnabled(true); - }, - unregister() { - lazy.WebRequest.setDNRHandlingEnabled(false); - }, - - startDNREvaluation(channel) { - let ruleManagers = gRuleManagers; - if (!channel.canModify) { - ruleManagers = []; - } - if (channel.loadInfo.originAttributes.privateBrowsingId > 0) { - ruleManagers = ruleManagers.filter( - rm => rm.extension.privateBrowsingAllowed - ); - } - let matchedRules; - if (ruleManagers.length) { - const request = RequestDetails.fromChannelWrapper(channel); - matchedRules = RequestEvaluator.evaluateRequest(request, ruleManagers); - } - // Cache for later. In case of redirects, _dnrMatchedRules may exist for - // the pre-redirect HTTP channel, and is overwritten here again. - channel._dnrMatchedRules = matchedRules; - }, - - /** - * Applies the actions of the DNR rules. - * - * @param {ChannelWrapper} channel - * @returns {boolean} Whether to ignore any responses from the webRequest API. - */ - onBeforeRequest(channel) { - let matchedRules = channel._dnrMatchedRules; - if (!matchedRules?.length) { - return false; - } - // If a matched rule closes the channel, it is the sole match. - const finalMatch = matchedRules[0]; - switch (finalMatch.rule.action.type) { - case "block": - this.applyBlock(channel, finalMatch); - return true; - case "redirect": - this.applyRedirect(channel, finalMatch); - return true; - case "upgradeScheme": - this.applyUpgradeScheme(channel, finalMatch); - return true; - } - // If there are multiple rules, then it may be a combination of allow, - // allowAllRequests and/or modifyHeaders. - - // TODO bug 1797403: Apply allowAllRequests actions. - - return false; - }, - - onBeforeSendHeaders(channel) { - // TODO bug 1797404: apply modifyHeaders actions (requestHeaders). - }, - - onHeadersReceived(channel) { - // TODO bug 1797404: apply modifyHeaders actions (responseHeaders). - }, - - applyBlock(channel, matchedRule) { - // TODO bug 1802259: Consider a DNR-specific reason. - channel.cancel( - Cr.NS_ERROR_ABORT, - Ci.nsILoadInfo.BLOCKING_REASON_EXTENSION_WEBREQUEST - ); - const addonId = matchedRule.ruleManager.extension.id; - let properties = channel.channel.QueryInterface(Ci.nsIWritablePropertyBag); - properties.setProperty("cancelledByExtension", addonId); - }, - - applyUpgradeScheme(channel, matchedRule) { - // Request upgrade. No-op if already secure (i.e. https). - channel.upgradeToSecure(); - }, - - applyRedirect(channel, matchedRule) { - // Ambiguity resolution order of redirect dict keys, consistent with Chrome: - // - url > extensionPath > transform > regexSubstitution - const redirect = matchedRule.rule.action.redirect; - const extension = matchedRule.ruleManager.extension; - let redirectUri; - if (redirect.url) { - // redirect.url already validated by checkActionRedirect. - redirectUri = Services.io.newURI(redirect.url); - } else if (redirect.extensionPath) { - redirectUri = extension.baseURI - .mutate() - .setPathQueryRef(redirect.extensionPath) - .finalize(); - } else if (redirect.transform) { - // TODO bug 1801870: Implement transform. - throw new Error("transform not implemented"); - } else if (redirect.regexSubstitution) { - // TODO bug 1745760: Implement along with regexFilter support. - throw new Error("regexSubstitution not implemented"); - } else { - // #checkActionRedirect ensures that the redirect action is non-empty. - } - - channel.redirectTo(redirectUri); - - let properties = channel.channel.QueryInterface(Ci.nsIWritablePropertyBag); - properties.setProperty("redirectedByExtension", extension.id); - - let origin = channel.getRequestHeader("Origin"); - if (origin) { - channel.setResponseHeader("Access-Control-Allow-Origin", origin); - channel.setResponseHeader("Access-Control-Allow-Credentials", "true"); - channel.setResponseHeader("Access-Control-Max-Age", "0"); - } - }, -}; - class RuleManager { constructor(extension) { this.extension = extension; @@ -944,10 +781,6 @@ function getRuleManager(extension, createIfMissing = true) { // instantiate a RuleManager claims the highest priority. // TODO bug 1786059: order extensions by "installation time". gRuleManagers.unshift(ruleManager); - if (gRuleManagers.length === 1) { - // The first DNR registration. - NetworkIntegration.register(); - } } return ruleManager; } @@ -956,10 +789,6 @@ function clearRuleManager(extension) { let i = gRuleManagers.findIndex(rm => rm.extension === extension); if (i !== -1) { gRuleManagers.splice(i, 1); - if (gRuleManagers.length === 0) { - // The last DNR registration. - NetworkIntegration.unregister(); - } } } @@ -980,55 +809,9 @@ function getMatchedRulesForRequest(request, extension) { return RequestEvaluator.evaluateRequest(requestDetails, ruleManagers); } -/** - * Runs before any webRequest event is notified. Headers may be modified, but - * the request should not be canceled (see handleRequest instead). - * - * @param {ChannelWrapper} channel - * @param {string} kind - The name of the webRequest event. - */ -function beforeWebRequestEvent(channel, kind) { - try { - switch (kind) { - case "onBeforeRequest": - NetworkIntegration.startDNREvaluation(channel); - break; - case "onBeforeSendHeaders": - NetworkIntegration.onBeforeSendHeaders(channel); - break; - case "onHeadersReceived": - NetworkIntegration.onHeadersReceived(channel); - break; - } - } catch (e) { - Cu.reportError(e); - } -} - -/** - * Applies matching DNR rules, some of which may potentially cancel the request. - * - * @param {ChannelWrapper} channel - * @param {string} kind - The name of the webRequest event. - * @returns {boolean} Whether to ignore any responses from the webRequest API. - */ -function handleRequest(channel, kind) { - try { - if (kind === "onBeforeRequest") { - return NetworkIntegration.onBeforeRequest(channel); - } - } catch (e) { - Cu.reportError(e); - } - return false; -} - export const ExtensionDNR = { RuleValidator, getRuleManager, clearRuleManager, getMatchedRulesForRequest, - - beforeWebRequestEvent, - handleRequest, }; diff --git a/toolkit/components/extensions/test/mochitest/mochitest-common.ini b/toolkit/components/extensions/test/mochitest/mochitest-common.ini index 99471f216287..77a8631bcb0c 100644 --- a/toolkit/components/extensions/test/mochitest/mochitest-common.ini +++ b/toolkit/components/extensions/test/mochitest/mochitest-common.ini @@ -131,7 +131,6 @@ skip-if = os == 'android' || tsan # Times out on TSan intermittently, bug 161518 skip-if = os == 'android' # Bug 1513544 Android does not support multiple windows. [test_ext_cookies_permissions_bad.html] [test_ext_cookies_permissions_good.html] -[test_ext_dnr_upgradeScheme.html] [test_ext_downloads_download.html] [test_ext_embeddedimg_iframe_frameAncestors.html] [test_ext_exclude_include_globs.html] diff --git a/toolkit/components/extensions/test/mochitest/test_ext_dnr_upgradeScheme.html b/toolkit/components/extensions/test/mochitest/test_ext_dnr_upgradeScheme.html deleted file mode 100644 index 4c53c7c86e22..000000000000 --- a/toolkit/components/extensions/test/mochitest/test_ext_dnr_upgradeScheme.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - DNR with upgradeScheme action - - - - - - - - - diff --git a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_allowAllRequests.js b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_allowAllRequests.js deleted file mode 100644 index b98807b7dde3..000000000000 --- a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_allowAllRequests.js +++ /dev/null @@ -1,96 +0,0 @@ -"use strict"; - -add_setup(() => { - Services.prefs.setBoolPref("extensions.manifestV3.enabled", true); - Services.prefs.setBoolPref("extensions.dnr.enabled", true); -}); - -const server = createHttpServer({ - hosts: ["example.com", "example.net", "example.org"], -}); -server.registerPathHandler("/never_reached", (req, res) => { - Assert.ok(false, "Server should never have been reached"); -}); -server.registerPathHandler("/allowed", (req, res) => { - res.setHeader("Access-Control-Allow-Origin", "*"); - res.setHeader("Access-Control-Max-Age", "0"); - res.write("allowed"); -}); -server.registerPathHandler("/", (req, res) => { - res.write("Dummy page"); -}); - -add_task(async function allowAllRequests_allows_request() { - async function background() { - await browser.declarativeNetRequest.updateSessionRules({ - addRules: [ - // allowAllRequests should take precedence over block. - { - id: 1, - condition: { resourceTypes: ["main_frame", "xmlhttprequest"] }, - action: { type: "block" }, - }, - { - id: 2, - condition: { resourceTypes: ["main_frame"] }, - action: { type: "allowAllRequests" }, - }, - { - id: 3, - priority: 2, - // Note: when not specified, main_frame is excluded by default. So - // when a main_frame request is triggered, only rules 1 and 2 match. - condition: { requestDomains: ["example.com"] }, - action: { type: "block" }, - }, - ], - }); - browser.test.sendMessage("dnr_registered"); - } - let extension = ExtensionTestUtils.loadExtension({ - background, - manifest: { - manifest_version: 3, - permissions: ["declarativeNetRequest"], - }, - }); - await extension.startup(); - await extension.awaitMessage("dnr_registered"); - - let contentPage = await ExtensionTestUtils.loadContentPage( - "http://example.com/" - ); - Assert.equal( - await contentPage.spawn(null, () => content.document.URL), - "http://example.com/", - "main_frame request should have been allowed by allowAllRequests" - ); - - async function checkCanFetch(url) { - return contentPage.spawn(url, async url => { - try { - await (await content.fetch(url)).text(); - return true; - } catch (e) { - return false; // NetworkError: blocked - } - }); - } - - Assert.equal( - await checkCanFetch("http://example.com/never_reached"), - false, - "should be blocked by DNR rule 3" - ); - Assert.equal( - await checkCanFetch("http://example.net/"), - // TODO bug 1797403: Fix expectation once allowAllRequests is implemented: - // true, - // "should not be blocked by block rule due to allowAllRequests rule" - false, - "is blocked because persistency of allowAllRequests is not yet implemented" - ); - - await contentPage.close(); - await extension.unload(); -}); diff --git a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_private_browsing.js b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_private_browsing.js deleted file mode 100644 index d94c31c858c8..000000000000 --- a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_private_browsing.js +++ /dev/null @@ -1,130 +0,0 @@ -"use strict"; - -const server = createHttpServer({ hosts: ["example.com"] }); -server.registerPathHandler("/", (req, res) => { - res.setHeader("Access-Control-Allow-Origin", "*"); - res.setHeader("Access-Control-Max-Age", "0"); -}); - -add_setup(() => { - Services.prefs.setBoolPref("extensions.manifestV3.enabled", true); - Services.prefs.setBoolPref("extensions.dnr.enabled", true); -}); - -async function startDNRExtension({ privateBrowsingAllowed }) { - let extension = ExtensionTestUtils.loadExtension({ - incognitoOverride: privateBrowsingAllowed ? "spanning" : undefined, - async background() { - await browser.declarativeNetRequest.updateSessionRules({ - addRules: [{ id: 1, condition: {}, action: { type: "block" } }], - }); - browser.test.sendMessage("dnr_registered"); - }, - manifest: { - manifest_version: 3, - permissions: ["declarativeNetRequest"], - browser_specific_settings: { gecko: { id: "@dnr-ext" } }, - }, - }); - await extension.startup(); - await extension.awaitMessage("dnr_registered"); - return extension; -} - -async function testMatchedByDNR(privateBrowsing) { - let contentPage = await ExtensionTestUtils.loadContentPage( - "http://example.com/?page", - { privateBrowsing } - ); - let wasRequestBlocked = await contentPage.spawn(null, async () => { - try { - await content.fetch("http://example.com/?fetch"); - return false; - } catch (e) { - // Request blocked by DNR rule from startDNRExtension(). - return true; - } - }); - await contentPage.close(); - return wasRequestBlocked; -} - -add_task(async function private_browsing_not_allowed_by_default() { - let extension = await startDNRExtension({ privateBrowsingAllowed: false }); - Assert.equal( - await testMatchedByDNR(false), - true, - "DNR applies to non-private browsing requests by default" - ); - Assert.equal( - await testMatchedByDNR(true), - false, - "DNR not applied to private browsing requests by default" - ); - await extension.unload(); -}); - -add_task(async function private_browsing_allowed() { - let extension = await startDNRExtension({ privateBrowsingAllowed: true }); - Assert.equal( - await testMatchedByDNR(false), - true, - "DNR applies to non-private requests regardless of privateBrowsingAllowed" - ); - Assert.equal( - await testMatchedByDNR(true), - true, - "DNR applied to private browsing requests when privateBrowsingAllowed" - ); - await extension.unload(); -}); - -add_task( - { pref_set: [["extensions.dnr.feedback", true]] }, - async function testMatchOutcome_unaffected_by_privateBrowsing() { - let extensionWithoutPrivateBrowsingAllowed = await startDNRExtension({}); - let extension = ExtensionTestUtils.loadExtension({ - incognitoOverride: "spanning", - manifest: { - manifest_version: 3, - permissions: ["declarativeNetRequest", "declarativeNetRequestFeedback"], - }, - files: { - "page.html": ``, - "page.js": async () => { - browser.test.assertTrue( - browser.extension.inIncognitoContext, - "Extension page is opened in a private browsing context" - ); - browser.test.assertDeepEq( - { - matchedRules: [ - { ruleId: 1, rulesetId: "_session", extensionId: "@dnr-ext" }, - ], - }, - // testMatchOutcome does not offer a way to specify the private - // browsing mode of a request. Confirm that testMatchOutcome always - // simulates requests in normal private browsing mode, even if the - // testMatchOutcome method itself is called from an extension page - // in private browsing mode. - await browser.declarativeNetRequest.testMatchOutcome( - { url: "http://example.com/?simulated_request", type: "image" }, - { includeOtherExtensions: true } - ), - "testMatchOutcome includes DNR from extensions without pbm access" - ); - browser.test.sendMessage("done"); - }, - }, - }); - await extension.startup(); - let contentPage = await ExtensionTestUtils.loadContentPage( - `moz-extension://${extension.uuid}/page.html`, - { privateBrowsing: true } - ); - await extension.awaitMessage("done"); - await contentPage.close(); - await extension.unload(); - await extensionWithoutPrivateBrowsingAllowed.unload(); - } -); diff --git a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_system_restrictions.js b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_system_restrictions.js deleted file mode 100644 index e2f6da072a2d..000000000000 --- a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_system_restrictions.js +++ /dev/null @@ -1,66 +0,0 @@ -"use strict"; - -const server = createHttpServer({ hosts: ["example.com", "restricted"] }); -server.registerPathHandler("/", (req, res) => { - res.setHeader("Access-Control-Allow-Origin", "*"); - res.write("response from server"); -}); - -add_setup(() => { - Services.prefs.setBoolPref("extensions.manifestV3.enabled", true); - Services.prefs.setBoolPref("extensions.dnr.enabled", true); - // The restrictedDomains pref should be set early, because the pref is read - // only once (on first use) by WebExtensionPolicy::IsRestrictedURI. - Services.prefs.setCharPref( - "extensions.webextensions.restrictedDomains", - "restricted" - ); -}); - -async function startDNRExtension() { - let extension = ExtensionTestUtils.loadExtension({ - async background() { - await browser.declarativeNetRequest.updateSessionRules({ - addRules: [{ id: 1, condition: {}, action: { type: "block" } }], - }); - browser.test.sendMessage("dnr_registered"); - }, - manifest: { - manifest_version: 3, - permissions: ["declarativeNetRequest"], - }, - }); - await extension.startup(); - await extension.awaitMessage("dnr_registered"); - return extension; -} - -add_task(async function dnr_ignores_system_requests() { - let extension = await startDNRExtension(); - Assert.equal( - await (await fetch("http://example.com/")).text(), - "response from server", - "DNR should not block requests from system principal" - ); - await extension.unload(); -}); - -add_task(async function dnr_ignores_requests_to_restrictedDomains() { - let extension = await startDNRExtension(); - Assert.equal( - await ExtensionTestUtils.fetch("http://example.com/", "http://restricted/"), - "response from server", - "DNR should not block destination in restrictedDomains" - ); - await extension.unload(); -}); - -add_task(async function dnr_ignores_initiator_from_restrictedDomains() { - let extension = await startDNRExtension(); - Assert.equal( - await ExtensionTestUtils.fetch("http://restricted/", "http://example.com/"), - "response from server", - "DNR should not block requests initiated from a page in restrictedDomains" - ); - await extension.unload(); -}); diff --git a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_webrequest.js b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_webrequest.js deleted file mode 100644 index 15dd11b14d29..000000000000 --- a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_webrequest.js +++ /dev/null @@ -1,205 +0,0 @@ -"use strict"; - -add_setup(() => { - Services.prefs.setBoolPref("extensions.manifestV3.enabled", true); - Services.prefs.setBoolPref("extensions.dnr.enabled", true); -}); - -const server = createHttpServer({ - hosts: ["example.com", "redir"], -}); -server.registerPathHandler("/never_reached", (req, res) => { - Assert.ok(false, "Server should never have been reached"); -}); - -add_task(async function block_request_with_dnr() { - async function background() { - let onBeforeRequestPromise = new Promise(resolve => { - browser.webRequest.onBeforeRequest.addListener(resolve, { - urls: ["*://example.com/*"], - }); - }); - await browser.declarativeNetRequest.updateSessionRules({ - addRules: [ - { - id: 1, - condition: { requestDomains: ["example.com"] }, - action: { type: "block" }, - }, - ], - }); - - await browser.test.assertRejects( - fetch("http://example.com/never_reached"), - "NetworkError when attempting to fetch resource.", - "blocked by DNR rule" - ); - // DNR is documented to take precedence over webRequest. We should still - // receive the webRequest event, however. - browser.test.log("Waiting for webRequest.onBeforeRequest..."); - await onBeforeRequestPromise; - browser.test.log("Seen webRequest.onBeforeRequest!"); - - browser.test.notifyPass(); - } - let extension = ExtensionTestUtils.loadExtension({ - background, - temporarilyInstalled: true, // Needed for granted_host_permissions - manifest: { - manifest_version: 3, - granted_host_permissions: true, - host_permissions: ["*://example.com/*"], - permissions: ["declarativeNetRequest", "webRequest"], - }, - }); - await extension.startup(); - await extension.awaitFinish(); - await extension.unload(); -}); - -add_task(async function upgradeScheme_and_redirect_request_with_dnr() { - async function background() { - let onBeforeRequestSeen = []; - browser.webRequest.onBeforeRequest.addListener( - d => { - onBeforeRequestSeen.push(d.url); - // webRequest cancels, but DNR should actually be taking precedence. - return { cancel: true }; - }, - { urls: ["*://example.com/*", "http://redir/here"] }, - ["blocking"] - ); - await browser.declarativeNetRequest.updateSessionRules({ - addRules: [ - { - id: 1, - condition: { requestDomains: ["example.com"] }, - action: { type: "upgradeScheme" }, - }, - { - id: 2, - condition: { requestDomains: ["example.com"], urlFilter: "|https:*" }, - action: { type: "redirect", redirect: { url: "http://redir/here" } }, - // The upgradeScheme and redirect actions have equal precedence. To - // make sure that the redirect action is executed when both rules - // match, we assign a higher priority to the redirect action. - priority: 2, - }, - ], - }); - - await browser.test.assertRejects( - fetch("http://example.com/never_reached"), - "NetworkError when attempting to fetch resource.", - "although initially redirected by DNR, ultimately blocked by webRequest" - ); - // DNR is documented to take precedence over webRequest. - // So we should actually see redirects according to the DNR rules, and - // the webRequest listener should still be able to observe all requests. - browser.test.assertDeepEq( - [ - "http://example.com/never_reached", - "https://example.com/never_reached", - "http://redir/here", - ], - onBeforeRequestSeen, - "Expected onBeforeRequest events" - ); - - browser.test.notifyPass(); - } - let extension = ExtensionTestUtils.loadExtension({ - background, - temporarilyInstalled: true, // Needed for granted_host_permissions - manifest: { - manifest_version: 3, - granted_host_permissions: true, - host_permissions: ["*://example.com/*", "*://redir/*"], - permissions: [ - "declarativeNetRequest", - "webRequest", - "webRequestBlocking", - ], - }, - }); - await extension.startup(); - await extension.awaitFinish(); - await extension.unload(); -}); - -add_task(async function block_request_with_webRequest_after_allow_with_dnr() { - async function background() { - let onBeforeRequestSeen = []; - browser.webRequest.onBeforeRequest.addListener( - d => { - onBeforeRequestSeen.push(d.url); - return { cancel: !d.url.includes("webRequestNoCancel") }; - }, - { urls: ["*://example.com/*"] }, - ["blocking"] - ); - // All DNR actions that do not end up canceling/redirecting the request: - await browser.declarativeNetRequest.updateSessionRules({ - addRules: [ - { - id: 1, - condition: { requestMethods: ["get"] }, - action: { type: "allow" }, - }, - { - id: 2, - condition: { requestMethods: ["put"] }, - action: { - type: "modifyHeaders", - requestHeaders: [{ operation: "set", header: "x", value: "y" }], - }, - }, - ], - }); - - await browser.test.assertRejects( - fetch("http://example.com/never_reached?1", { method: "get" }), - "NetworkError when attempting to fetch resource.", - "despite DNR 'allow' rule, still blocked by webRequest" - ); - await browser.test.assertRejects( - fetch("http://example.com/never_reached?2", { method: "put" }), - "NetworkError when attempting to fetch resource.", - "despite DNR 'modifyHeaders' rule, still blocked by webRequest" - ); - // Just to rule out the request having been canceled by DNR instead of - // webRequest, repeat the requests and verify that they succeed. - await fetch("http://example.com/?webRequestNoCancel1", { method: "get" }); - await fetch("http://example.com/?webRequestNoCancel2", { method: "put" }); - - browser.test.assertDeepEq( - [ - "http://example.com/never_reached?1", - "http://example.com/never_reached?2", - "http://example.com/?webRequestNoCancel1", - "http://example.com/?webRequestNoCancel2", - ], - onBeforeRequestSeen, - "Expected onBeforeRequest events" - ); - - browser.test.notifyPass(); - } - let extension = ExtensionTestUtils.loadExtension({ - background, - temporarilyInstalled: true, // Needed for granted_host_permissions - manifest: { - manifest_version: 3, - granted_host_permissions: true, - host_permissions: ["*://example.com/*"], - permissions: [ - "declarativeNetRequest", - "webRequest", - "webRequestBlocking", - ], - }, - }); - await extension.startup(); - await extension.awaitFinish(); - await extension.unload(); -}); diff --git a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_without_webrequest.js b/toolkit/components/extensions/test/xpcshell/test_ext_dnr_without_webrequest.js deleted file mode 100644 index 7550e5f375cb..000000000000 --- a/toolkit/components/extensions/test/xpcshell/test_ext_dnr_without_webrequest.js +++ /dev/null @@ -1,720 +0,0 @@ -"use strict"; - -// This test file verifies that the declarativeNetRequest API can modify -// network requests as expected without the presence of the webRequest API. See -// test_ext_dnr_webRequest.js for the interaction between webRequest and DNR. - -add_setup(() => { - Services.prefs.setBoolPref("extensions.manifestV3.enabled", true); - Services.prefs.setBoolPref("extensions.dnr.enabled", true); -}); - -const server = createHttpServer({ - hosts: ["example.com", "example.net", "example.org", "redir", "dummy"], -}); -server.registerPathHandler("/cors_202", (req, res) => { - res.setStatusLine(req.httpVersion, 202, "Accepted"); - // The extensions in this test have minimal permissions, so grant CORS to - // allow them to read the response without host permissions. - res.setHeader("Access-Control-Allow-Origin", "*"); - res.setHeader("Access-Control-Max-Age", "0"); - res.write("cors_response"); -}); -server.registerPathHandler("/never_reached", (req, res) => { - Assert.ok(false, "Server should never have been reached"); - res.setHeader("Access-Control-Allow-Origin", "*"); - res.setHeader("Access-Control-Max-Age", "0"); -}); -let gPreflightCount = 0; -server.registerPathHandler("/preflight_count", (req, res) => { - res.setHeader("Access-Control-Allow-Origin", "*"); - res.setHeader("Access-Control-Max-Age", "0"); - res.setHeader("Access-Control-Allow-Methods", "NONSIMPLE"); - if (req.method === "OPTIONS") { - ++gPreflightCount; - } else { - // CORS Preflight considers 2xx to be successful. To rule out inadvertent - // server opt-in to CORS, respond with a non-2xx response. - res.setStatusLine(req.httpVersion, 418, "I'm a teapot"); - res.write(`count=${gPreflightCount}`); - } -}); -server.registerPathHandler("/", (req, res) => { - res.setHeader("Access-Control-Allow-Origin", "*"); - res.setHeader("Access-Control-Max-Age", "0"); - res.write("Dummy page"); -}); - -async function contentFetch(initiatorURL, url, options) { - let contentPage = await ExtensionTestUtils.loadContentPage(initiatorURL); - // Sanity check: that the initiator is as specified, and not redirected. - Assert.equal( - await contentPage.spawn(null, () => content.document.URL), - initiatorURL, - `Expected document load at: ${initiatorURL}` - ); - let result = await contentPage.spawn({ url, options }, async args => { - try { - let req = await content.fetch(args.url, args.options); - return { - status: req.status, - url: req.url, - body: await req.text(), - }; - } catch (e) { - return { error: e.message }; - } - }); - await contentPage.close(); - return result; -} - -add_task(async function block_request_with_dnr() { - async function background() { - await browser.declarativeNetRequest.updateSessionRules({ - addRules: [ - { - id: 1, - condition: { requestMethods: ["get"] }, - action: { type: "block" }, - }, - { - id: 2, - condition: { requestMethods: ["head"] }, - action: { type: "allow" }, - }, - ], - }); - { - // Request not matching DNR. - let req = await fetch("http://example.com/cors_202", { method: "post" }); - browser.test.assertEq(202, req.status, "allowed without DNR rule"); - browser.test.assertEq("cors_response", await req.text()); - } - { - // Request with "allow" DNR action. - let req = await fetch("http://example.com/cors_202", { method: "head" }); - browser.test.assertEq(202, req.status, "allowed by DNR rule"); - browser.test.assertEq("", await req.text(), "no response for HEAD"); - } - - // Request with "block" DNR action. - await browser.test.assertRejects( - fetch("http://example.com/never_reached", { method: "get" }), - "NetworkError when attempting to fetch resource.", - "blocked by DNR rule" - ); - - browser.test.sendMessage("tested_dnr_block"); - } - let extension = ExtensionTestUtils.loadExtension({ - background, - manifest: { - manifest_version: 3, - permissions: ["declarativeNetRequest"], - }, - }); - await extension.startup(); - await extension.awaitMessage("tested_dnr_block"); - - // DNR should not only work with requests within the extension, but also from - // web pages. - Assert.deepEqual( - await contentFetch("http://dummy/", "http://example.com/never_reached"), - { error: "NetworkError when attempting to fetch resource." }, - "Blocked by DNR with declarativeNetRequestWithHostAccess" - ); - - // The declarativeNetRequest permission grants the ability to block requests - // from other extensions. (The declarativeNetRequestWithHostAccess permission - // does not; see test task block_with_declarativeNetRequestWithHostAccess.) - let otherExtension = ExtensionTestUtils.loadExtension({ - async background() { - await browser.test.assertRejects( - fetch("http://example.com/never_reached", { method: "get" }), - "NetworkError when attempting to fetch resource.", - "blocked by different extension with declarativeNetRequest permission" - ); - browser.test.sendMessage("other_extension_done"); - }, - }); - await otherExtension.startup(); - await otherExtension.awaitMessage("other_extension_done"); - await otherExtension.unload(); - - await extension.unload(); -}); - -// Verifies that the "declarativeNetRequestWithHostAccess" permission can only -// block if it has permission for the initiator. -add_task(async function block_with_declarativeNetRequestWithHostAccess() { - let extension = ExtensionTestUtils.loadExtension({ - async background() { - await browser.declarativeNetRequest.updateSessionRules({ - addRules: [{ id: 1, condition: {}, action: { type: "block" } }], - }); - browser.test.sendMessage("dnr_registered"); - }, - temporarilyInstalled: true, // Needed for granted_host_permissions - manifest: { - manifest_version: 3, - granted_host_permissions: true, - host_permissions: [""], - permissions: ["declarativeNetRequestWithHostAccess"], - }, - }); - await extension.startup(); - await extension.awaitMessage("dnr_registered"); - - // Initiator "http://dummy" does match "", so DNR rule should apply. - Assert.deepEqual( - await contentFetch("http://dummy/", "http://example.com/never_reached"), - { error: "NetworkError when attempting to fetch resource." }, - "Blocked by DNR with declarativeNetRequestWithHostAccess" - ); - - // Extensions cannot have permissions for another extension and therefore the - // DNR rule never applies. - let otherExtension = ExtensionTestUtils.loadExtension({ - async background() { - let req = await fetch("http://example.com/cors_202", { method: "get" }); - browser.test.assertEq(202, req.status, "not blocked by other extension"); - browser.test.assertEq("cors_response", await req.text()); - browser.test.sendMessage("other_extension_done"); - }, - }); - await otherExtension.startup(); - await otherExtension.awaitMessage("other_extension_done"); - await otherExtension.unload(); - - await extension.unload(); -}); - -// Verifies that upgradeScheme works. -// The HttpServer helper does not support https (bug 1742061), so in this -// test we just verify whether the upgrade has been attempted. Coverage that -// verifies that the upgraded request completes is in: -// toolkit/components/extensions/test/mochitest/test_ext_dnr_upgradeScheme.html -add_task(async function upgradeScheme_declarativeNetRequestWithHostAccess() { - let extension = ExtensionTestUtils.loadExtension({ - async background() { - await browser.declarativeNetRequest.updateSessionRules({ - addRules: [ - { - id: 1, - condition: { excludedRequestDomains: ["dummy"] }, - action: { type: "upgradeScheme" }, - }, - { - id: 2, - // HttpServer does not support https (bug 1742061). - // As a work-around, we just redirect the https:-request to http. - condition: { urlFilter: "|https:*" }, - action: { - type: "redirect", - redirect: { url: "http://dummy/cors_202?from_https" }, - }, - // The upgradeScheme and redirect actions have equal precedence. To - // make sure that the redirect action is executed when both rules - // match, we assign a higher priority to the redirect action. - priority: 2, - }, - ], - }); - - let req = await fetch("http://redir/never_reached"); - browser.test.assertEq( - "http://dummy/cors_202?from_https", - req.url, - "upgradeScheme upgraded to https" - ); - browser.test.assertEq("cors_response", await req.text()); - - browser.test.sendMessage("tested_dnr_upgradeScheme"); - }, - temporarilyInstalled: true, // Needed for granted_host_permissions. - manifest: { - manifest_version: 3, - granted_host_permissions: true, - host_permissions: ["*://dummy/*", "*://redir/*"], - permissions: ["declarativeNetRequestWithHostAccess"], - }, - }); - await extension.startup(); - await extension.awaitMessage("tested_dnr_upgradeScheme"); - - // Request to same-origin subresource, which should be upgraded. - Assert.equal( - (await contentFetch("http://redir/", "http://redir/never_reached")).url, - "http://dummy/cors_202?from_https", - "upgradeScheme + host access should upgrade (same-origin request)" - ); - - // Request to cross-origin subresource, which should be upgraded. - // Note: after the upgrade, a cross-origin redirect happens. Internally, we - // reflect the Origin request header in the Access-Control-Allow-Origin (ACAO) - // response header, to ensure that the request is accepted by CORS. See - // https://github.com/w3c/webappsec-upgrade-insecure-requests/issues/32 - Assert.equal( - (await contentFetch("http://dummy/", "http://redir/never_reached")).url, - // TODO bug 1800990: despite the mirrored Origin in ACAO, the CORS check - // fails after a request is upgraded. Once fixed, update this expectation: - undefined, // Should be: "http://dummy/cors_202?from_https", - "TODO 1800990: upgradeScheme + host access should upgrade (cross-origin request)" - ); - - // The DNR extension does not have example.net in host_permissions. - const urlNoHostPerms = "http://example.net/cors_202?missing_host_permission"; - Assert.equal( - (await contentFetch("http://dummy/", urlNoHostPerms)).url, - urlNoHostPerms, - "upgradeScheme not matched when extension lacks host access" - ); - - await extension.unload(); -}); - -add_task(async function redirect_request_with_dnr() { - async function background() { - await browser.declarativeNetRequest.updateSessionRules({ - addRules: [ - { - id: 1, - condition: { - requestDomains: ["example.com"], - requestMethods: ["get"], - }, - action: { - type: "redirect", - redirect: { - url: "http://example.net/cors_202?1", - }, - }, - }, - { - id: 2, - // Note: extension does not have example.org host permission. - condition: { requestDomains: ["example.org"] }, - action: { - type: "redirect", - redirect: { - url: "http://example.net/cors_202?2", - }, - }, - }, - ], - }); - // The extension only has example.com permission, but the redirects to - // example.net are still due to the CORS headers from the server. - { - // Simple GET request. - let req = await fetch("http://example.com/never_reached"); - browser.test.assertEq(202, req.status, "redirected by DNR (simple)"); - browser.test.assertEq("http://example.net/cors_202?1", req.url); - browser.test.assertEq("cors_response", await req.text()); - } - { - // GeT request should be matched despite having a different case. - let req = await fetch("http://example.com/never_reached", { - method: "GeT", - }); - browser.test.assertEq(202, req.status, "redirected by DNR (GeT)"); - browser.test.assertEq("http://example.net/cors_202?1", req.url); - browser.test.assertEq("cors_response", await req.text()); - } - { - // Host permission missing for request, request not redirected by DNR. - // Response is readable due to the CORS response headers from the server. - let req = await fetch("http://example.org/cors_202?noredir"); - browser.test.assertEq(202, req.status, "not redirected by DNR"); - browser.test.assertEq("http://example.org/cors_202?noredir", req.url); - browser.test.assertEq("cors_response", await req.text()); - } - - browser.test.notifyPass(); - } - let extension = ExtensionTestUtils.loadExtension({ - background, - temporarilyInstalled: true, // Needed for granted_host_permissions - manifest: { - manifest_version: 3, - granted_host_permissions: true, - host_permissions: ["*://example.com/*"], - permissions: ["declarativeNetRequest"], - }, - }); - await extension.startup(); - await extension.awaitFinish(); - - let otherExtension = ExtensionTestUtils.loadExtension({ - async background() { - // The DNR extension has permissions for example.com, but not for this - // extension. Therefore the "redirect" action should not apply. - let req = await fetch("http://example.com/cors_202?other_ext"); - browser.test.assertEq(202, req.status, "not redirected by DNR"); - browser.test.assertEq("http://example.com/cors_202?other_ext", req.url); - browser.test.assertEq("cors_response", await req.text()); - browser.test.sendMessage("other_extension_done"); - }, - }); - await otherExtension.startup(); - await otherExtension.awaitMessage("other_extension_done"); - await otherExtension.unload(); - - await extension.unload(); -}); - -// Verifies that DNR redirects requiring a CORS preflight behave as expected. -add_task(async function redirect_request_with_dnr_cors_preflight() { - // Most other test tasks only test requests within the test extension. This - // test intentionally triggers requests outside the extension, to make sure - // that the usual CORS mechanisms is triggered (instead of exceptions from - // host permissions). - async function background() { - await browser.declarativeNetRequest.updateSessionRules({ - addRules: [ - { - id: 1, - condition: { - requestDomains: ["redir"], - excludedRequestMethods: ["options"], - }, - action: { - type: "redirect", - redirect: { - url: "http://example.com/preflight_count", - }, - }, - }, - { - id: 2, - condition: { - requestDomains: ["example.net"], - excludedRequestMethods: ["nonsimple"], // note: redirects "options" - }, - action: { - type: "redirect", - redirect: { - url: "http://example.com/preflight_count", - }, - }, - }, - ], - }); - let req = await fetch("http://redir/never_reached", { - method: "NONSIMPLE", - }); - // Extension has permission for "redir", but not for the redirect target. - // The request is non-simple (see below for explanation of non-simple), so - // a preflight (OPTIONS) request to /preflight_count is expected before the - // redirection target is requested. - browser.test.assertEq( - "count=1", - await req.text(), - "Got preflight before redirect target because of missing host_permissions" - ); - - browser.test.sendMessage("continue_preflight_tests"); - } - - let extension = ExtensionTestUtils.loadExtension({ - background, - temporarilyInstalled: true, // Needed for granted_host_permissions - manifest: { - manifest_version: 3, - granted_host_permissions: true, - // "redir" and "example.net" are needed to allow redirection of these. - // "dummy" is needed to redirect requests initiated from http://dummy. - host_permissions: ["*://redir/*", "*://example.net/*", "*://dummy/*"], - permissions: ["declarativeNetRequest"], - }, - }); - gPreflightCount = 0; - await extension.startup(); - await extension.awaitMessage("continue_preflight_tests"); - gPreflightCount = 0; // value already checked before continue_preflight_tests. - - // Simple request (i.e. without preflight requirement), that's redirected to - // another URL by the DNR rule. The redirect should be accepted, and in - // particular not be blocked by the same-origin policy. The redirect target - // (/preflight_count) is readable due to the CORS headers from the server. - Assert.deepEqual( - await contentFetch("http://dummy/", "http://redir/never_reached"), - // count=0: A simple request does not trigger a preflight (OPTIONS) request. - { status: 418, url: "http://example.com/preflight_count", body: "count=0" }, - "Simple request should not have a preflight." - ); - - // Any request method other than "GET", "POST" and "PUT" (e.g "NONSIMPLE") is - // a non-simple request that triggers a preflight request ("OPTIONS"). - // - // Usually, this happens (without extension-triggered redirects): - // 1. NONSIMPLE /never_reached : is started, but does NOT hit the server yet. - // 2. OPTIONS /never_reached + Access-Control-Request-Method: NONSIMPLE - // 3. NONSIMPLE /never_reached : reaches the server if allowed by OPTIONS. - // - // With an extension-initiated redirect to /preflight_count: - // 1. NONSIMPLE /never_reached : is started, but does not hit the server yet. - // 2. extension redirects to /preflight_count - // 3. OPTIONS /preflight_count + Access-Control-Request-Method: NONSIMPLE - // - This is because the redirect preserves the request method/body/etc. - // 4. NONSIMPLE /preflight_count : reaches the server if allowed by OPTIONS. - Assert.deepEqual( - await contentFetch("http://dummy/", "http://redir/never_reached", { - method: "NONSIMPLE", - }), - // Due to excludedRequestMethods: ["options"], the preflight for the - // redirect target is not intercepted, so the server sees a preflight. - { status: 418, url: "http://example.com/preflight_count", body: "count=1" }, - "Initial URL redirected, redirection target has preflight" - ); - gPreflightCount = 0; - - // The "example.net" rule has "excludedRequestMethods": ["nonsimple"], so the - // initial "NONSIMPLE" request is not immediately redirected. Therefore the - // preflight request happens. This OPTIONS request is matched by the DNR rule - // and redirected to /preflight_count. While preflight_count offers a very - // permissive preflight response, it is not even fetched: - // Only a 2xx HTTP status is considered a valid response to a pre-flight. - // A redirect is like a 3xx HTTP status, so the whole request is rejected, - // and the redirect is not followed for the OPTIONS request. - Assert.deepEqual( - await contentFetch("http://dummy/", "http://example.net/never_reached", { - method: "NONSIMPLE", - }), - { error: "NetworkError when attempting to fetch resource." }, - "Redirect of preflight request (OPTIONS) should be a CORS failure" - ); - - Assert.equal(gPreflightCount, 0, "Preflight OPTIONS has been intercepted"); - - await extension.unload(); -}); - -// Tests that DNR redirect rules can be chained. -add_task(async function redirect_request_with_dnr_multiple_hops() { - async function background() { - // Set up redirects from example.com up until dummy. - let hosts = ["example.com", "example.net", "example.org", "redir", "dummy"]; - let rules = []; - for (let i = 1; i < hosts.length; ++i) { - const from = hosts[i - 1]; - const to = hosts[i]; - const end = hosts.length - 1 === i; - rules.push({ - id: i, - condition: { requestDomains: [from] }, - action: { - type: "redirect", - redirect: { - // All intermediate redirects should never hit the server, but the - // last one should.. - url: end ? `http://${to}/?end` : `http://${to}/never_reached`, - }, - }, - }); - } - await browser.declarativeNetRequest.updateSessionRules({ addRules: rules }); - let req = await fetch("http://example.com/never_reached"); - browser.test.assertEq(200, req.status, "redirected by DNR (multiple)"); - browser.test.assertEq("http://dummy/?end", req.url, "Last URL in chain"); - browser.test.assertEq("Dummy page", await req.text()); - - browser.test.notifyPass(); - } - let extension = ExtensionTestUtils.loadExtension({ - background, - temporarilyInstalled: true, // Needed for granted_host_permissions - manifest: { - manifest_version: 3, - granted_host_permissions: true, - host_permissions: ["*://*/*"], // matches all in the |hosts| list. - permissions: ["declarativeNetRequest"], - }, - }); - await extension.startup(); - await extension.awaitFinish(); - - // Test again, but without special extension permissions to verify that DNR - // redirects pass CORS checks. - Assert.deepEqual( - await contentFetch("http://dummy/", "http://redir/never_reached"), - { status: 200, url: "http://dummy/?end", body: "Dummy page" }, - "Multiple redirects by DNR, requested from web origin." - ); - - await extension.unload(); -}); - -add_task(async function redirect_request_with_dnr_with_redirect_loop() { - async function background() { - await browser.declarativeNetRequest.updateSessionRules({ - addRules: [ - { - id: 1, - condition: { requestDomains: ["redir"] }, - action: { - type: "redirect", - redirect: { - url: "http://redir/cors_202?loop", - }, - }, - }, - ], - }); - - // Redirect with initially a different URL. - await browser.test.assertRejects( - fetch("http://redir/never_reached?"), - "NetworkError when attempting to fetch resource.", - "Redirect loop caught (initially different URL)" - ); - - // Redirect where redirect is exactly the same URL as requested. - await browser.test.assertRejects( - fetch("http://redir/cors_202?loop"), - "NetworkError when attempting to fetch resource.", - "Redirect loop caught (redirect target same as initial URL)" - ); - - browser.test.notifyPass(); - } - let extension = ExtensionTestUtils.loadExtension({ - background, - temporarilyInstalled: true, // Needed for granted_host_permissions - manifest: { - manifest_version: 3, - granted_host_permissions: true, - host_permissions: ["*://redir/*"], - permissions: ["declarativeNetRequest"], - }, - }); - await extension.startup(); - await extension.awaitFinish(); - await extension.unload(); -}); - -// Tests that redirect to extensionPath works, provided that the initiator is -// either the extension itself, or in host_permissions. Moreover, the requested -// resource must match a web_accessible_resources entry for both the initiator -// AND the pre-redirect URL. -add_task(async function redirect_request_with_dnr_to_extensionPath() { - async function background() { - await browser.declarativeNetRequest.updateSessionRules({ - addRules: [ - { - id: 1, - condition: { requestDomains: ["redir"], requestMethods: ["post"] }, - action: { - type: "redirect", - redirect: { - extensionPath: "/war.txt?1", - }, - }, - }, - { - id: 2, - condition: { requestDomains: ["redir"], requestMethods: ["put"] }, - action: { - type: "redirect", - redirect: { - extensionPath: "/nonwar.txt?2", - }, - }, - }, - ], - }); - { - let req = await fetch("http://redir/never_reached", { method: "post" }); - browser.test.assertEq(200, req.status, "redirected to extensionPath"); - browser.test.assertEq(`${location.origin}/war.txt?1`, req.url); - browser.test.assertEq("war_ext_res", await req.text()); - } - // Redirects to extensionPath that is not in web_accessible_resources. - // While the initiator (extension) would be allowed to read the resource - // due to it being same-origin, the pre-redirect URL (http://redir) is not - // matching web_accessible_resources[].matches, so the load is rejected. - // - // This behavior differs from Chrome (e.g. at least in Chrome 109) that - // does allow the load to complete. Extensions who really care about - // exposing a web-accessible resource to the world can just put an all_urls - // pattern in web_accessible_resources[].matches. - await browser.test.assertRejects( - fetch("http://redir/never_reached", { method: "put" }), - "NetworkError when attempting to fetch resource.", - "Redirect to nowar.txt, but pre-redirect host is not in web_accessible_resources[].matches" - ); - - browser.test.notifyPass(); - } - let extension = ExtensionTestUtils.loadExtension({ - background, - temporarilyInstalled: true, // Needed for granted_host_permissions - manifest: { - manifest_version: 3, - granted_host_permissions: true, - host_permissions: ["*://redir/*", "*://dummy/*"], - permissions: ["declarativeNetRequest"], - web_accessible_resources: [ - // *://redir/* is in matches, because that is the pre-redirect host. - // *://dummy/* is in matches, because that is an initiator below. - { resources: ["war.txt"], matches: ["*://redir/*", "*://dummy/*"] }, - // without "matches", this is almost equivalent to not being listed in - // web_accessible_resources at all. This entry is listed here to verify - // that the presence of extension_ids does not somehow allow a request - // with an extension initiator to complete. - { resources: ["nonwar.txt"], extension_ids: ["*"] }, - ], - }, - files: { - "war.txt": "war_ext_res", - "nonwar.txt": "non_war_ext_res", - }, - }); - await extension.startup(); - await extension.awaitFinish(); - const extPrefix = `moz-extension://${extension.uuid}`; - - // Request from origin in host_permissions, for web-accessible resource. - Assert.deepEqual( - await contentFetch( - "http://dummy/", // <-- Matching web_accessible_resources[].matches - "http://redir/never_reached", // <-- With matching host_permissions - { method: "post" } - ), - { status: 200, url: `${extPrefix}/war.txt?1`, body: "war_ext_res" }, - "Should have got redirect to web_accessible_resources (war.txt)" - ); - - // Request from origin in host_permissions, for non-web-accessible resource. - let { messages } = await promiseConsoleOutput(async () => { - Assert.deepEqual( - await contentFetch( - "http://dummy/", // <-- Matching web_accessible_resources[].matches - "http://redir/never_reached", // <-- With matching host_permissions - { method: "put" } - ), - { error: "NetworkError when attempting to fetch resource." }, - "Redirect to nowar.txt, without matching web_accessible_resources[].matches" - ); - }); - const EXPECTED_SECURITY_ERROR = `Content at http://redir/never_reached may not load or link to ${extPrefix}/nonwar.txt?2.`; - Assert.equal( - messages.filter(m => m.message.includes(EXPECTED_SECURITY_ERROR)).length, - 1, - `Should log SecurityError: ${EXPECTED_SECURITY_ERROR}` - ); - - // Request from origin not in host_permissions. DNR rule should not apply. - Assert.deepEqual( - await contentFetch( - "http://dummy/", // <-- Matching web_accessible_resources[].matches - "http://example.com/cors_202", // <-- NOT in host_permissions - { method: "post" } - ), - { status: 202, url: "http://example.com/cors_202", body: "cors_response" }, - "Extension should not have redirected, due to lack of host permissions" - ); - - await extension.unload(); -}); diff --git a/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini b/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini index cdbfad6e2956..f7406f338ddb 100644 --- a/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini +++ b/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini @@ -114,14 +114,9 @@ skip-if = [test_ext_cors_mozextension.js] [test_ext_csp_frame_ancestors.js] [test_ext_debugging_utils.js] -[test_ext_dnr_allowAllRequests.js] [test_ext_dnr_api.js] -[test_ext_dnr_private_browsing.js] [test_ext_dnr_session_rules.js] -[test_ext_dnr_system_restrictions.js] [test_ext_dnr_testMatchOutcome.js] -[test_ext_dnr_webrequest.js] -[test_ext_dnr_without_webrequest.js] [test_ext_dns.js] skip-if = os == "android" # Android needs alternative for proxy.settings - bug 1723523 [test_ext_downloads.js] diff --git a/toolkit/components/extensions/webrequest/WebRequest.jsm b/toolkit/components/extensions/webrequest/WebRequest.jsm index 9de07fb4e0ee..9b9982c42cb1 100644 --- a/toolkit/components/extensions/webrequest/WebRequest.jsm +++ b/toolkit/components/extensions/webrequest/WebRequest.jsm @@ -19,7 +19,6 @@ const { XPCOMUtils } = ChromeUtils.importESModule( const lazy = {}; XPCOMUtils.defineLazyModuleGetters(lazy, { - ExtensionDNR: "resource://gre/modules/ExtensionDNR.jsm", ExtensionParent: "resource://gre/modules/ExtensionParent.jsm", ExtensionUtils: "resource://gre/modules/ExtensionUtils.jsm", WebRequestUpload: "resource://gre/modules/WebRequestUpload.jsm", @@ -620,9 +619,6 @@ HttpObserverManager = { onErrorOccurred: new Map(), onCompleted: new Map(), }, - // Whether there are any registered declarativeNetRequest rules. These DNR - // rules may match new requests and result in request modifications. - dnrActive: false, openingInitialized: false, beforeConnectInitialized: false, @@ -664,11 +660,10 @@ HttpObserverManager = { // webRequest listeners and removing those that are no longer needed if // there are no more listeners for corresponding webRequest events. addOrRemove() { - let needOpening = this.listeners.onBeforeRequest.size || this.dnrActive; + let needOpening = this.listeners.onBeforeRequest.size; let needBeforeConnect = this.listeners.onBeforeSendHeaders.size || - this.listeners.onSendHeaders.size || - this.dnrActive; + this.listeners.onSendHeaders.size; if (needOpening && !this.openingInitialized) { this.openingInitialized = true; Services.obs.addObserver(this, "http-on-modify-request"); @@ -697,8 +692,7 @@ HttpObserverManager = { let needExamine = this.needTracing || this.listeners.onHeadersReceived.size || - this.listeners.onAuthRequired.size || - this.dnrActive; + this.listeners.onAuthRequired.size; if (needExamine && !this.examineInitialized) { this.examineInitialized = true; @@ -746,11 +740,6 @@ HttpObserverManager = { this.addOrRemove(); }, - setDNRHandlingEnabled(dnrActive) { - this.dnrActive = dnrActive; - this.addOrRemove(); - }, - observe(subject, topic, data) { let channel = this.getWrapper(subject); switch (topic) { @@ -928,10 +917,6 @@ HttpObserverManager = { if (kind !== "onErrorOccurred" && channel.errorString) { return; } - if (this.dnrActive) { - // DNR may modify (but not cancel) the request at this stage. - lazy.ExtensionDNR.beforeWebRequestEvent(channel, kind); - } let registerFilter = this.FILTER_TYPES.has(kind); let commonData = null; @@ -1027,10 +1012,6 @@ HttpObserverManager = { Cu.reportError(e); } - if (this.dnrActive && lazy.ExtensionDNR.handleRequest(channel, kind)) { - return; - } - return this.applyChanges( kind, channel, @@ -1306,10 +1287,6 @@ var onCompleted = new HttpEvent("onCompleted", ["responseHeaders"]); var onErrorOccurred = new HttpEvent("onErrorOccurred"); var WebRequest = { - setDNRHandlingEnabled: dnrActive => { - HttpObserverManager.setDNRHandlingEnabled(dnrActive); - }, - onBeforeRequest, onBeforeSendHeaders, onSendHeaders,