Bug 1690375 - Record markers with partial stacks for flushing pending notifications and marking a style flush as needed, r=gerald.

Differential Revision: https://phabricator.services.mozilla.com/D103844
This commit is contained in:
Florian Quèze 2021-02-04 15:13:21 +00:00
parent 13a8322968
commit d520574f5c
9 changed files with 102 additions and 55 deletions

View File

@ -4089,6 +4089,11 @@ void PresShell::DoFlushPendingNotifications(mozilla::ChangesToFlush aFlush) {
MOZ_ASSERT(NeedFlush(flushType), "Why did we get called?");
#ifdef MOZ_GECKO_PROFILER
AUTO_PROFILER_MARKER_TEXT(
"DoFlushPendingNotifications", LAYOUT,
MarkerOptions(MarkerStack::Capture(), MarkerInnerWindowIdFromDocShell(
mPresContext->GetDocShell())),
nsDependentCString(kFlushTypeNames[flushType]));
AUTO_PROFILER_LABEL_DYNAMIC_CSTR_NONSENSITIVE(
"PresShell::DoFlushPendingNotifications", LAYOUT,
kFlushTypeNames[flushType]);

View File

@ -7,6 +7,7 @@
#ifndef mozilla_PresShellInlines_h
#define mozilla_PresShellInlines_h
#include "nsDocShell.h"
#include "GeckoProfiler.h"
#include "mozilla/PresShell.h"
#include "mozilla/dom/Document.h"
@ -33,6 +34,13 @@ void PresShell::SetNeedLayoutFlush() {
void PresShell::SetNeedStyleFlush() {
mNeedStyleFlush = true;
PROFILER_MARKER_UNTYPED(
"SetNeedStyleFlush", LAYOUT,
MarkerOptions(MarkerStack::Capture(StackCaptureOptions::NonNative),
mPresContext ? MarkerInnerWindowIdFromDocShell(
mPresContext->GetDocShell())
: MarkerInnerWindowId::NoId()));
if (dom::Document* doc = mDocument->GetDisplayDocument()) {
if (PresShell* presShell = doc->GetPresShell()) {
presShell->mNeedStyleFlush = true;

View File

@ -1667,10 +1667,10 @@ static void DoNativeBacktrace(PSLockRef aLock,
//
// The grammar for entry sequences is in a comment above
// ProfileBuffer::StreamSamplesToJSON.
static inline void DoSharedSample(PSLockRef aLock, bool aIsSynchronous,
RegisteredThread& aRegisteredThread,
const Registers& aRegs, uint64_t aSamplePos,
ProfileBuffer& aBuffer) {
static inline void DoSharedSample(
PSLockRef aLock, bool aIsSynchronous, RegisteredThread& aRegisteredThread,
const Registers& aRegs, uint64_t aSamplePos, ProfileBuffer& aBuffer,
StackCaptureOptions aCaptureOptions = StackCaptureOptions::Full) {
// WARNING: this function runs within the profiler's "critical section".
MOZ_ASSERT(!aBuffer.IsThreadSafe(),
@ -1681,7 +1681,8 @@ static inline void DoSharedSample(PSLockRef aLock, bool aIsSynchronous,
ProfileBufferCollector collector(aBuffer, aSamplePos);
NativeStack nativeStack;
#if defined(HAVE_NATIVE_UNWIND)
if (ActivePS::FeatureStackWalk(aLock)) {
if (ActivePS::FeatureStackWalk(aLock) &&
aCaptureOptions == StackCaptureOptions::Full) {
DoNativeBacktrace(aLock, aRegisteredThread, aRegs, nativeStack);
MergeStacks(ActivePS::Features(aLock), aIsSynchronous, aRegisteredThread,
@ -1693,7 +1694,8 @@ static inline void DoSharedSample(PSLockRef aLock, bool aIsSynchronous,
aRegs, nativeStack, collector);
// We can't walk the whole native stack, but we can record the top frame.
if (ActivePS::FeatureLeaf(aLock)) {
if (ActivePS::FeatureLeaf(aLock) &&
aCaptureOptions == StackCaptureOptions::Full) {
aBuffer.AddEntry(ProfileBufferEntry::NativeLeafAddr((void*)aRegs.mPC));
}
}
@ -1702,9 +1704,13 @@ static inline void DoSharedSample(PSLockRef aLock, bool aIsSynchronous,
// Writes the components of a synchronous sample to the given ProfileBuffer.
static void DoSyncSample(PSLockRef aLock, RegisteredThread& aRegisteredThread,
const TimeStamp& aNow, const Registers& aRegs,
ProfileBuffer& aBuffer) {
ProfileBuffer& aBuffer,
StackCaptureOptions aCaptureOptions) {
// WARNING: this function runs within the profiler's "critical section".
MOZ_ASSERT(aCaptureOptions != StackCaptureOptions::NoStack,
"DoSyncSample should not be called when no capture is needed");
uint64_t samplePos =
aBuffer.AddThreadIdEntry(aRegisteredThread.Info()->ThreadId());
@ -1712,7 +1718,7 @@ static void DoSyncSample(PSLockRef aLock, RegisteredThread& aRegisteredThread,
aBuffer.AddEntry(ProfileBufferEntry::Time(delta.ToMilliseconds()));
DoSharedSample(aLock, /* aIsSynchronous = */ true, aRegisteredThread, aRegs,
samplePos, aBuffer);
samplePos, aBuffer, aCaptureOptions);
}
// Writes the components of a periodic sample to ActivePS's ProfileBuffer.
@ -3550,12 +3556,14 @@ double profiler_time() {
return delta.ToMilliseconds();
}
bool profiler_capture_backtrace_into(ProfileChunkedBuffer& aChunkedBuffer) {
bool profiler_capture_backtrace_into(ProfileChunkedBuffer& aChunkedBuffer,
StackCaptureOptions aCaptureOptions) {
MOZ_RELEASE_ASSERT(CorePS::Exists());
PSAutoLock lock;
if (!ActivePS::Exists(lock)) {
if (!ActivePS::Exists(lock) ||
aCaptureOptions == StackCaptureOptions::NoStack) {
return false;
}
@ -3576,7 +3584,7 @@ bool profiler_capture_backtrace_into(ProfileChunkedBuffer& aChunkedBuffer) {
#endif
DoSyncSample(lock, *registeredThread, TimeStamp::NowUnfuzzed(), regs,
profileBuffer);
profileBuffer, aCaptureOptions);
return true;
}
@ -3594,7 +3602,7 @@ UniquePtr<ProfileChunkedBuffer> profiler_capture_backtrace() {
MakeUnique<ProfileBufferChunkManagerSingle>(
ProfileBufferChunkManager::scExpectedMaximumStackSize));
if (!profiler_capture_backtrace_into(*buffer)) {
if (!profiler_capture_backtrace_into(*buffer, StackCaptureOptions::Full)) {
return nullptr;
}

View File

@ -76,7 +76,7 @@ static inline UniqueProfilerBacktrace profiler_get_backtrace() {
}
static inline bool profiler_capture_backtrace_into(
ProfileChunkedBuffer& aChunkedBuffer) {
ProfileChunkedBuffer& aChunkedBuffer, StackCaptureOptions aCaptureOptions) {
return false;
}
@ -108,6 +108,7 @@ namespace mozilla {
class MallocAllocPolicy;
class ProfileChunkedBuffer;
enum class StackCaptureOptions;
template <class T, size_t MinInlineCapacity, class AllocPolicy>
class Vector;
@ -554,9 +555,10 @@ using UniqueProfilerBacktrace =
// Immediately capture the current thread's call stack, store it in the provided
// buffer (usually to avoid allocations if you can construct the buffer on the
// stack). Returns false if unsuccessful, or if the profiler is inactive.
// stack). Returns false if unsuccessful, if the profiler is inactive, or if
// aCaptureOptions is NoStack.
MFBT_API bool profiler_capture_backtrace_into(
ProfileChunkedBuffer& aChunkedBuffer);
ProfileChunkedBuffer& aChunkedBuffer, StackCaptureOptions aCaptureOptions);
// Immediately capture the current thread's call stack, and return it in a
// ProfileChunkedBuffer (usually for later use in MarkerStack::TakeBacktrace()).

View File

@ -251,7 +251,8 @@ static ProfileBufferBlockIndex AddMarkerWithOptionalStackToBuffer(
// Pointer to a function that can capture a backtrace into the provided
// `ProfileChunkedBuffer`, and returns true when successful.
using BacktraceCaptureFunction = bool (*)(ProfileChunkedBuffer&);
using BacktraceCaptureFunction = bool (*)(ProfileChunkedBuffer&,
StackCaptureOptions);
// Add a marker with the given name, options, and arguments to the given buffer.
// Because this may be called from either Base or Gecko Profiler functions, the
@ -271,7 +272,8 @@ ProfileBufferBlockIndex AddMarkerToBuffer(
aOptions.Set(MarkerTiming::InstantNow());
}
if (aOptions.Stack().IsCaptureNeeded()) {
StackCaptureOptions captureOptions = aOptions.Stack().CaptureOptions();
if (captureOptions != StackCaptureOptions::NoStack) {
// A capture was requested, let's attempt to do it here&now. This avoids a
// lot of allocations that would be necessary if capturing a backtrace
// separately.
@ -282,7 +284,9 @@ ProfileBufferBlockIndex AddMarkerToBuffer(
ProfileChunkedBuffer chunkedBuffer(
ProfileChunkedBuffer::ThreadSafety::WithoutMutex, chunkManager);
aOptions.StackRef().UseRequestedBacktrace(
aBacktraceCaptureFunction(chunkedBuffer) ? &chunkedBuffer : nullptr);
aBacktraceCaptureFunction(chunkedBuffer, captureOptions)
? &chunkedBuffer
: nullptr);
// This call must be made from here, while chunkedBuffer is in scope.
return AddMarkerWithOptionalStackToBuffer<MarkerType>(
aBuffer, aName, aCategory, std::move(aOptions), aTs...);

View File

@ -13,6 +13,17 @@
#ifndef BaseProfilerMarkersPrerequisites_h
#define BaseProfilerMarkersPrerequisites_h
namespace mozilla {
enum class StackCaptureOptions {
NoStack, // No stack captured.
Full, // Capture a full stack, including label frames, JS frames and
// native frames.
NonNative, // Capture a stack without native frames for reduced overhead.
};
}
#ifdef MOZ_GECKO_PROFILER
# include "BaseProfilingCategory.h"
@ -427,7 +438,7 @@ class MarkerStack {
// Allow move.
MarkerStack(MarkerStack&& aOther)
: mIsCaptureRequested(aOther.mIsCaptureRequested),
: mCaptureOptions(aOther.mCaptureOptions),
mOptionalChunkedBufferStorage(
std::move(aOther.mOptionalChunkedBufferStorage)),
mChunkedBuffer(aOther.mChunkedBuffer) {
@ -435,7 +446,7 @@ class MarkerStack {
aOther.Clear();
}
MarkerStack& operator=(MarkerStack&& aOther) {
mIsCaptureRequested = aOther.mIsCaptureRequested;
mCaptureOptions = aOther.mCaptureOptions;
mOptionalChunkedBufferStorage =
std::move(aOther.mOptionalChunkedBufferStorage);
mChunkedBuffer = aOther.mChunkedBuffer;
@ -446,8 +457,7 @@ class MarkerStack {
// Take ownership of a backtrace. If null or empty, equivalent to NoStack().
explicit MarkerStack(UniquePtr<ProfileChunkedBuffer>&& aExternalChunkedBuffer)
: mIsCaptureRequested(false),
mOptionalChunkedBufferStorage(
: mOptionalChunkedBufferStorage(
(!aExternalChunkedBuffer || aExternalChunkedBuffer->IsEmpty())
? nullptr
: std::move(aExternalChunkedBuffer)),
@ -458,25 +468,27 @@ class MarkerStack {
// Use an existing backtrace stored elsewhere, which the user must guarantee
// is alive during the add-marker call. If empty, equivalent to NoStack().
explicit MarkerStack(ProfileChunkedBuffer& aExternalChunkedBuffer)
: mIsCaptureRequested(false),
mChunkedBuffer(aExternalChunkedBuffer.IsEmpty()
: mChunkedBuffer(aExternalChunkedBuffer.IsEmpty()
? nullptr
: &aExternalChunkedBuffer) {
AssertInvariants();
}
// Don't capture a stack in this marker.
static MarkerStack NoStack() { return MarkerStack(false); }
static MarkerStack NoStack() {
return MarkerStack(StackCaptureOptions::NoStack);
}
// Capture a stack when adding this marker.
static MarkerStack Capture() {
static MarkerStack Capture(
StackCaptureOptions aCaptureOptions = StackCaptureOptions::Full) {
// Actual capture will be handled inside profiler_add_marker.
return MarkerStack(true);
return MarkerStack(aCaptureOptions);
}
// Optionally capture a stack, useful for avoiding long-winded ternaries.
static MarkerStack MaybeCapture(bool aDoCapture) {
return MarkerStack(aDoCapture);
return aDoCapture ? Capture() : NoStack();
}
// Use an existing backtrace stored elsewhere, which the user must guarantee
@ -493,18 +505,16 @@ class MarkerStack {
return MarkerStack(std::move(aExternalChunkedBuffer));
}
[[nodiscard]] bool IsCaptureNeeded() const {
// If the chunked buffer already contains something, consider the capture
// request already fulfilled.
return mIsCaptureRequested;
[[nodiscard]] StackCaptureOptions CaptureOptions() const {
return mCaptureOptions;
}
ProfileChunkedBuffer* GetChunkedBuffer() const { return mChunkedBuffer; }
// Use backtrace after a request. If null, equivalent to NoStack().
void UseRequestedBacktrace(ProfileChunkedBuffer* aExternalChunkedBuffer) {
MOZ_RELEASE_ASSERT(IsCaptureNeeded());
mIsCaptureRequested = false;
MOZ_RELEASE_ASSERT(mCaptureOptions != StackCaptureOptions::NoStack);
mCaptureOptions = StackCaptureOptions::NoStack;
if (aExternalChunkedBuffer && !aExternalChunkedBuffer->IsEmpty()) {
// We only need to use the provided buffer if it is not empty.
mChunkedBuffer = aExternalChunkedBuffer;
@ -513,22 +523,22 @@ class MarkerStack {
}
void Clear() {
mIsCaptureRequested = false;
mCaptureOptions = StackCaptureOptions::NoStack;
mOptionalChunkedBufferStorage.reset();
mChunkedBuffer = nullptr;
AssertInvariants();
}
private:
explicit MarkerStack(bool aIsCaptureRequested)
: mIsCaptureRequested(aIsCaptureRequested) {
explicit MarkerStack(StackCaptureOptions aCaptureOptions)
: mCaptureOptions(aCaptureOptions) {
AssertInvariants();
}
// This should be called after every constructor and non-const function.
void AssertInvariants() const {
# ifdef DEBUG
if (mIsCaptureRequested) {
if (mCaptureOptions != StackCaptureOptions::NoStack) {
MOZ_ASSERT(!mOptionalChunkedBufferStorage,
"We should not hold a buffer when capture is requested");
MOZ_ASSERT(!mChunkedBuffer,
@ -547,8 +557,7 @@ class MarkerStack {
# endif // DEBUG
}
// True if a capture is requested when marker is added to the profile buffer.
bool mIsCaptureRequested = false;
StackCaptureOptions mCaptureOptions = StackCaptureOptions::NoStack;
// Optional storage for the backtrace, in case it was captured before the
// add-marker call.

View File

@ -2206,10 +2206,10 @@ static void DoNativeBacktrace(PSLockRef aLock,
//
// The grammar for entry sequences is in a comment above
// ProfileBuffer::StreamSamplesToJSON.
static inline void DoSharedSample(PSLockRef aLock, bool aIsSynchronous,
RegisteredThread& aRegisteredThread,
const Registers& aRegs, uint64_t aSamplePos,
ProfileBuffer& aBuffer) {
static inline void DoSharedSample(
PSLockRef aLock, bool aIsSynchronous, RegisteredThread& aRegisteredThread,
const Registers& aRegs, uint64_t aSamplePos, ProfileBuffer& aBuffer,
StackCaptureOptions aCaptureOptions = StackCaptureOptions::Full) {
// WARNING: this function runs within the profiler's "critical section".
MOZ_ASSERT(!aBuffer.IsThreadSafe(),
@ -2220,7 +2220,8 @@ static inline void DoSharedSample(PSLockRef aLock, bool aIsSynchronous,
ProfileBufferCollector collector(aBuffer, aSamplePos);
NativeStack nativeStack;
#if defined(HAVE_NATIVE_UNWIND)
if (ActivePS::FeatureStackWalk(aLock)) {
if (ActivePS::FeatureStackWalk(aLock) &&
aCaptureOptions == StackCaptureOptions::Full) {
DoNativeBacktrace(aLock, aRegisteredThread, aRegs, nativeStack);
MergeStacks(ActivePS::Features(aLock), aIsSynchronous, aRegisteredThread,
@ -2232,7 +2233,8 @@ static inline void DoSharedSample(PSLockRef aLock, bool aIsSynchronous,
aRegs, nativeStack, collector, CorePS::JsFrames(aLock));
// We can't walk the whole native stack, but we can record the top frame.
if (ActivePS::FeatureLeaf(aLock)) {
if (ActivePS::FeatureLeaf(aLock) &&
aCaptureOptions == StackCaptureOptions::Full) {
aBuffer.AddEntry(ProfileBufferEntry::NativeLeafAddr((void*)aRegs.mPC));
}
}
@ -2241,9 +2243,13 @@ static inline void DoSharedSample(PSLockRef aLock, bool aIsSynchronous,
// Writes the components of a synchronous sample to the given ProfileBuffer.
static void DoSyncSample(PSLockRef aLock, RegisteredThread& aRegisteredThread,
const TimeStamp& aNow, const Registers& aRegs,
ProfileBuffer& aBuffer) {
ProfileBuffer& aBuffer,
StackCaptureOptions aCaptureOptions) {
// WARNING: this function runs within the profiler's "critical section".
MOZ_ASSERT(aCaptureOptions != StackCaptureOptions::NoStack,
"DoSyncSample should not be called when no capture is needed");
uint64_t samplePos =
aBuffer.AddThreadIdEntry(aRegisteredThread.Info()->ThreadId());
@ -2251,7 +2257,7 @@ static void DoSyncSample(PSLockRef aLock, RegisteredThread& aRegisteredThread,
aBuffer.AddEntry(ProfileBufferEntry::Time(delta.ToMilliseconds()));
DoSharedSample(aLock, /* aIsSynchronous = */ true, aRegisteredThread, aRegs,
samplePos, aBuffer);
samplePos, aBuffer, aCaptureOptions);
}
// Writes the components of a periodic sample to ActivePS's ProfileBuffer.
@ -5431,12 +5437,14 @@ double profiler_time() {
return delta.ToMilliseconds();
}
bool profiler_capture_backtrace_into(ProfileChunkedBuffer& aChunkedBuffer) {
bool profiler_capture_backtrace_into(ProfileChunkedBuffer& aChunkedBuffer,
StackCaptureOptions aCaptureOptions) {
MOZ_RELEASE_ASSERT(CorePS::Exists());
PSAutoLock lock(gPSMutex);
if (!ActivePS::Exists(lock)) {
if (!ActivePS::Exists(lock) ||
aCaptureOptions == StackCaptureOptions::NoStack) {
return false;
}
@ -5460,7 +5468,7 @@ bool profiler_capture_backtrace_into(ProfileChunkedBuffer& aChunkedBuffer) {
#endif
DoSyncSample(lock, *registeredThread, TimeStamp::NowUnfuzzed(), regs,
profileBuffer);
profileBuffer, aCaptureOptions);
return true;
}
@ -5478,7 +5486,7 @@ UniquePtr<ProfileChunkedBuffer> profiler_capture_backtrace() {
MakeUnique<ProfileBufferChunkManagerSingle>(
ProfileBufferChunkManager::scExpectedMaximumStackSize));
if (!profiler_capture_backtrace_into(*buffer)) {
if (!profiler_capture_backtrace_into(*buffer, StackCaptureOptions::Full)) {
return nullptr;
}

View File

@ -79,7 +79,8 @@ static inline UniqueProfilerBacktrace profiler_get_backtrace() {
}
static inline bool profiler_capture_backtrace_into(
mozilla::ProfileChunkedBuffer& aChunkedBuffer) {
mozilla::ProfileChunkedBuffer& aChunkedBuffer,
StackCaptureOptions aCaptureOptions) {
return false;
}
static inline mozilla::UniquePtr<mozilla::ProfileChunkedBuffer>
@ -700,7 +701,8 @@ using UniqueProfilerBacktrace =
// buffer (usually to avoid allocations if you can construct the buffer on the
// stack). Returns false if unsuccessful, or if the profiler is inactive.
bool profiler_capture_backtrace_into(
mozilla::ProfileChunkedBuffer& aChunkedBuffer);
mozilla::ProfileChunkedBuffer& aChunkedBuffer,
mozilla::StackCaptureOptions aCaptureOptions);
// Immediately capture the current thread's call stack, and return it in a
// ProfileChunkedBuffer (usually for later use in MarkerStack::TakeBacktrace()).

View File

@ -53,7 +53,8 @@ class ProfileChunkedBuffer;
bool profiler_can_accept_markers();
bool profiler_capture_backtrace_into(
mozilla::ProfileChunkedBuffer& aChunkedBuffer);
mozilla::ProfileChunkedBuffer& aChunkedBuffer,
mozilla::StackCaptureOptions aCaptureOptions);
// Bring category names from Base Profiler into the geckoprofiler::category
// namespace, for consistency with other Gecko Profiler identifiers.