Description:UpdateDerivedObjectInStack Crash In Asan version

Resolution:Stack overflow when Stack address store in heap

Issue:https://gitee.com/openharmony/arkcompiler_ets_runtime/issues/I5JPG4?from=project-issue

Signed-off-by: yingguofeng@huawei.com <yingguofeng@huawei.com>
Change-Id: I93869e94b91354030c4b299d683dfa7e0d78d702
This commit is contained in:
yingguofeng@huawei.com 2022-07-30 16:29:50 +08:00
parent 8205cd1b10
commit d37f9efec9
29 changed files with 254 additions and 281 deletions

View File

@ -243,7 +243,6 @@ config("ark_jsruntime_common_config") {
if (is_debug) {
cflags_cc += [
"-Og",
"-O0",
"-ggdb3",
]
@ -256,7 +255,6 @@ config("ark_jsruntime_common_config") {
cflags_cc += [
"-fno-inline-functions",
"-fno-inline",
"-O1",
"-fsanitize=address",
"-fsanitize-address-use-after-scope",
"-fno-omit-frame-pointer",

View File

@ -3763,6 +3763,10 @@ GateRef StubBuilder::AllocateInYoung(GateRef glue, GateRef size)
Label success(env);
Label callRuntime(env);
#if __has_feature(address_sanitizer) || defined(__SANITIZE_ADDRESS__)
DEFVARIABLE(result, VariableType::JS_ANY(), Undefined());
Jump(&callRuntime);
#else
auto topOffset = JSThread::GlueData::GetNewSpaceAllocationTopAddressOffset(env->Is32Bit());
auto endOffset = JSThread::GlueData::GetNewSpaceAllocationEndAddressOffset(env->Is32Bit());
auto topAddress = Load(VariableType::NATIVE_POINTER(), glue, IntPtr(topOffset));
@ -3781,6 +3785,7 @@ GateRef StubBuilder::AllocateInYoung(GateRef glue, GateRef size)
result = top;
Jump(&exit);
}
#endif
Bind(&callRuntime);
{
result = CallRuntime(glue, RTSTUB_ID(AllocateInYoung), {

View File

@ -20,11 +20,12 @@
namespace panda::ecmascript {
void HeapRootVisitor::VisitHeapRoots(JSThread *thread, const RootVisitor &visitor,
const RootRangeVisitor &range_visitor)
const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor)
{
auto ecma_vm = GetVMInstance(thread);
ecma_vm->Iterate(visitor);
thread->Iterate(visitor, range_visitor);
thread->Iterate(visitor, rangeVisitor, derivedVisitor);
}
EcmaVM *HeapRootVisitor::GetVMInstance(JSThread *thread) const

View File

@ -28,7 +28,8 @@ public:
~HeapRootVisitor() = default;
NO_MOVE_SEMANTIC(HeapRootVisitor);
NO_COPY_SEMANTIC(HeapRootVisitor);
void VisitHeapRoots(JSThread *thread, const RootVisitor &visitor, const RootRangeVisitor &range_visitor);
void VisitHeapRoots(JSThread *thread, const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor);
private:
EcmaVM *GetVMInstance(JSThread *thread) const;

View File

@ -961,6 +961,10 @@ void HeapSnapshot::AddSyntheticRoot()
RootVisitor rootEdgeBuilder = [this, syntheticRoot, &edgeOffset]([[maybe_unused]] Root type, ObjectSlot slot) {
ROOT_EDGE_BUILDER_CORE(type, slot);
};
RootBaseAndDerivedVisitor rootBaseEdgeBuilder = []
([[maybe_unused]] Root type, [[maybe_unused]]ObjectSlot base, [[maybe_unused]]ObjectSlot derived,
[[maybe_unused]]uintptr_t baseOldObject) {
};
RootRangeVisitor rootRangeEdgeBuilder = [this, syntheticRoot, &edgeOffset]([[maybe_unused]] Root type,
ObjectSlot start, ObjectSlot end) {
@ -969,7 +973,7 @@ void HeapSnapshot::AddSyntheticRoot()
}
};
#undef ROOT_EDGE_BUILDER_CORE
rootVisitor_.VisitHeapRoots(vm_->GetJSThread(), rootEdgeBuilder, rootRangeEdgeBuilder);
rootVisitor_.VisitHeapRoots(vm_->GetJSThread(), rootEdgeBuilder, rootRangeEdgeBuilder, rootBaseEdgeBuilder);
int reindex = 0;
for (Node *node : nodes_) {

View File

@ -16,8 +16,9 @@
#ifndef ECMASCRIPT_ECMA_GLOBAL_STORAGE_H
#define ECMASCRIPT_ECMA_GLOBAL_STORAGE_H
#include "ecmascript/js_tagged_value.h"
#if ECMASCRIPT_ENABLE_HANDLE_LEAK_CHECK
#include "ecmascript/dfx/native_dfx/backtrace.h"
#endif
#include "ecmascript/mem/c_containers.h"
#include "ecmascript/mem/chunk.h"
#include "ecmascript/js_thread.h"
@ -141,6 +142,14 @@ public:
ResetMarkCount();
if (isUsing) {
IncGlobalNumber();
// This value needs to be adjusted according to the specific scene.
static const int START_GLOBAL_NUMBER = 110000;
static const int GLOBAL_NUMBER_COUNT = 10000;
if (globalNumber_ > START_GLOBAL_NUMBER && globalNumber_ < START_GLOBAL_NUMBER + GLOBAL_NUMBER_COUNT
&& JSTaggedValue(value).IsHeapObject()) {
LOG_ECMA(ERROR) << "---------------Global Number:" << globalNumber_ << "-------------------";
PrintBacktrace(value);
}
}
#endif
}

View File

@ -554,12 +554,12 @@ void EcmaVM::PrintJSErrorInfo(const JSHandle<JSTaggedValue> &exceptionInfo)
LOG_ECMA(ERROR) << nameBuffer << ": " << msgBuffer << "\n" << stackBuffer;
}
void EcmaVM::ProcessNativeDelete(const WeakRootVisitor &v0)
void EcmaVM::ProcessNativeDelete(const WeakRootVisitor &visitor)
{
auto iter = nativePointerList_.begin();
while (iter != nativePointerList_.end()) {
JSNativePointer *object = *iter;
auto fwd = v0(reinterpret_cast<TaggedObject *>(object));
auto fwd = visitor(reinterpret_cast<TaggedObject *>(object));
if (fwd == nullptr) {
object->Destroy();
iter = nativePointerList_.erase(iter);
@ -568,7 +568,7 @@ void EcmaVM::ProcessNativeDelete(const WeakRootVisitor &v0)
}
}
}
void EcmaVM::ProcessReferences(const WeakRootVisitor &v0)
void EcmaVM::ProcessReferences(const WeakRootVisitor &visitor)
{
if (regExpParserCache_ != nullptr) {
regExpParserCache_->Clear();
@ -577,7 +577,7 @@ void EcmaVM::ProcessReferences(const WeakRootVisitor &v0)
// array buffer
for (auto iter = nativePointerList_.begin(); iter != nativePointerList_.end();) {
JSNativePointer *object = *iter;
auto fwd = v0(reinterpret_cast<TaggedObject *>(object));
auto fwd = visitor(reinterpret_cast<TaggedObject *>(object));
if (fwd == nullptr) {
object->Destroy();
iter = nativePointerList_.erase(iter);
@ -594,7 +594,7 @@ void EcmaVM::ProcessReferences(const WeakRootVisitor &v0)
auto object = iter->second;
if (object.IsHeapObject()) {
TaggedObject *obj = object.GetTaggedObject();
auto fwd = v0(obj);
auto fwd = visitor(obj);
if (fwd == nullptr) {
iter = cachedConstpools_.erase(iter);
continue;

View File

@ -225,8 +225,8 @@ public:
{
return const_cast<Chunk *>(&chunk_);
}
void ProcessNativeDelete(const WeakRootVisitor &v0);
void ProcessReferences(const WeakRootVisitor &v0);
void ProcessNativeDelete(const WeakRootVisitor &visitor);
void ProcessReferences(const WeakRootVisitor &visitor);
ModuleManager *GetModuleManager() const
{

View File

@ -259,30 +259,22 @@ uintptr_t FrameIterator::GetPrevFrame() const
return end;
}
bool FrameIterator::CollectGCSlots(std::set<uintptr_t> &baseSet, ChunkMap<DerivedDataKey, uintptr_t> *data,
[[maybe_unused]] bool isVerifying) const
bool FrameIterator::CollectGCSlots(const RootVisitor &visitor, const RootBaseAndDerivedVisitor &derivedVisitor) const
{
return stackmapParser_->CollectGCSlots(optimizedReturnAddr_, reinterpret_cast<uintptr_t>(current_),
baseSet, data, isVerifying, optimizedCallSiteSp_);
return stackmapParser_->CollectGCSlots(visitor, derivedVisitor, optimizedReturnAddr_,
reinterpret_cast<uintptr_t>(current_), optimizedCallSiteSp_);
}
ARK_INLINE void OptimizedFrame::GCIterate(const FrameIterator &it,
const RootVisitor &v0,
[[maybe_unused]] const RootRangeVisitor &v1,
ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers,
bool isVerifying) const
[[maybe_unused]] const RootVisitor &visitor,
[[maybe_unused]] const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor) const
{
std::set<uintptr_t> slotAddrs;
bool ret = it.CollectGCSlots(slotAddrs, derivedPointers, isVerifying);
bool ret = it.CollectGCSlots(visitor, derivedVisitor);
if (!ret) {
#ifndef NDEBUG
LOG_ECMA(DEBUG) << " stackmap don't found returnAddr " << it.GetOptimizedReturnAddr();
#endif
return;
}
for (const auto &slot : slotAddrs) {
v0(Root::ROOT_FRAME, ObjectSlot(slot));
}
}
@ -302,15 +294,14 @@ ARK_INLINE uintptr_t* OptimizedJSFunctionFrame::ComputePrevFrameSp(const FrameIt
}
ARK_INLINE void OptimizedJSFunctionFrame::GCIterate(const FrameIterator &it,
const RootVisitor &v0,
const RootRangeVisitor &v1,
ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers,
bool isVerifying) const
const RootVisitor &visitor,
const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor) const
{
OptimizedJSFunctionFrame *frame = OptimizedJSFunctionFrame::GetFrameFromSp(it.GetSp());
uintptr_t *envPtr = reinterpret_cast<uintptr_t *>(frame);
uintptr_t envslot = ToUintPtr(envPtr);
v0(Root::ROOT_FRAME, ObjectSlot(envslot));
visitor(Root::ROOT_FRAME, ObjectSlot(envslot));
uintptr_t *preFrameSp = frame->ComputePrevFrameSp(it);
@ -319,55 +310,44 @@ ARK_INLINE void OptimizedJSFunctionFrame::GCIterate(const FrameIterator &it,
if (argc > 0) {
uintptr_t start = ToUintPtr(argv); // argv
uintptr_t end = ToUintPtr(argv + argc);
v1(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
rangeVisitor(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
}
std::set<uintptr_t> slotAddrs;
bool ret = it.CollectGCSlots(slotAddrs, derivedPointers, isVerifying);
bool ret = it.CollectGCSlots(visitor, derivedVisitor);
if (!ret) {
#ifndef NDEBUG
LOG_ECMA(DEBUG) << " stackmap don't found returnAddr " << it.GetOptimizedReturnAddr();
#endif
return;
}
for (const auto &slot : slotAddrs) {
v0(Root::ROOT_FRAME, ObjectSlot(slot));
}
}
ARK_INLINE void AsmInterpretedFrame::GCIterate(const FrameIterator &it,
const RootVisitor &v0,
const RootRangeVisitor &v1,
ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers,
bool isVerifying) const
const RootVisitor &visitor,
const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor) const
{
AsmInterpretedFrame *frame = AsmInterpretedFrame::GetFrameFromSp(it.GetSp());
uintptr_t start = ToUintPtr(it.GetSp());
uintptr_t end = ToUintPtr(frame->GetCurrentFramePointer());
v1(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
v0(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->function)));
rangeVisitor(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
visitor(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->function)));
if (frame->pc != nullptr) {
v0(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->acc)));
v0(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->env)));
visitor(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->acc)));
visitor(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->env)));
}
std::set<uintptr_t> slotAddrs;
bool ret = it.CollectGCSlots(slotAddrs, derivedPointers, isVerifying);
bool ret = it.CollectGCSlots(visitor, derivedVisitor);
if (!ret) {
#ifndef NDEBUG
LOG_ECMA(DEBUG) << " stackmap don't found returnAddr " << it.GetOptimizedReturnAddr();
#endif
return;
}
for (auto slot : slotAddrs) {
v0(Root::ROOT_FRAME, ObjectSlot(slot));
}
}
ARK_INLINE void InterpretedFrame::GCIterate(const FrameIterator &it,
const RootVisitor &v0,
const RootRangeVisitor &v1) const
const RootVisitor &visitor,
const RootRangeVisitor &rangeVisitor) const
{
auto sp = it.GetSp();
InterpretedFrame *frame = InterpretedFrame::GetFrameFromSp(sp);
@ -381,21 +361,21 @@ ARK_INLINE void InterpretedFrame::GCIterate(const FrameIterator &it,
FrameIterator prevIt(prevSp, thread);
uintptr_t end = prevIt.GetPrevFrame();
v1(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
v0(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->function)));
rangeVisitor(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
visitor(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->function)));
// pc == nullptr, init InterpretedFrame & native InterpretedFrame.
if (frame->pc != nullptr) {
v0(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->acc)));
v0(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->constpool)));
v0(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->env)));
v0(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->profileTypeInfo)));
visitor(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->acc)));
visitor(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->constpool)));
visitor(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->env)));
visitor(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->profileTypeInfo)));
}
}
ARK_INLINE void InterpretedBuiltinFrame::GCIterate(const FrameIterator &it,
const RootVisitor &v0,
const RootRangeVisitor &v1) const
const RootVisitor &visitor,
const RootRangeVisitor &rangeVisitor) const
{
auto sp = it.GetSp();
InterpretedBuiltinFrame *frame = InterpretedBuiltinFrame::GetFrameFromSp(sp);
@ -405,15 +385,13 @@ ARK_INLINE void InterpretedBuiltinFrame::GCIterate(const FrameIterator &it,
uintptr_t start = ToUintPtr(sp + 2); // 2: numArgs & thread.
uintptr_t end = prevIt.GetPrevFrame();
v1(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
v0(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->function)));
rangeVisitor(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
visitor(Root::ROOT_FRAME, ObjectSlot(ToUintPtr(&frame->function)));
}
ARK_INLINE void OptimizedLeaveFrame::GCIterate(const FrameIterator &it,
[[maybe_unused]] const RootVisitor &v0,
const RootRangeVisitor &v1,
[[maybe_unused]] ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers,
[[maybe_unused]] bool isVerifying) const
[[maybe_unused]] const RootVisitor &visitor,
const RootRangeVisitor &rangeVisitor) const
{
const JSTaggedType *sp = it.GetSp();
OptimizedLeaveFrame *frame = OptimizedLeaveFrame::GetFrameFromSp(sp);
@ -421,15 +399,13 @@ ARK_INLINE void OptimizedLeaveFrame::GCIterate(const FrameIterator &it,
JSTaggedType *argv = reinterpret_cast<JSTaggedType *>(&frame->argc + 1);
uintptr_t start = ToUintPtr(argv); // argv
uintptr_t end = ToUintPtr(argv + frame->argc);
v1(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
rangeVisitor(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
}
}
ARK_INLINE void OptimizedWithArgvLeaveFrame::GCIterate(const FrameIterator &it,
[[maybe_unused]] const RootVisitor &v0,
const RootRangeVisitor &v1,
[[maybe_unused]] ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers,
[[maybe_unused]] bool isVerifying) const
[[maybe_unused]] const RootVisitor &visitor,
const RootRangeVisitor &rangeVisitor) const
{
const JSTaggedType *sp = it.GetSp();
OptimizedWithArgvLeaveFrame *frame = OptimizedWithArgvLeaveFrame::GetFrameFromSp(sp);
@ -438,15 +414,13 @@ ARK_INLINE void OptimizedWithArgvLeaveFrame::GCIterate(const FrameIterator &it,
JSTaggedType *argv = reinterpret_cast<JSTaggedType *>(*argvPtr);
uintptr_t start = ToUintPtr(argv); // argv
uintptr_t end = ToUintPtr(argv + frame->argc);
v1(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
rangeVisitor(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
}
}
ARK_INLINE void OptimizedBuiltinLeaveFrame::GCIterate(const FrameIterator &it,
[[maybe_unused]] const RootVisitor &v0,
const RootRangeVisitor &v1,
[[maybe_unused]] ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers,
[[maybe_unused]] bool isVerifying) const
[[maybe_unused]] const RootVisitor &visitor,
const RootRangeVisitor &rangeVisitor) const
{
const JSTaggedType *sp = it.GetSp();
OptimizedBuiltinLeaveFrame *frame = OptimizedBuiltinLeaveFrame::GetFrameFromSp(sp);
@ -454,15 +428,13 @@ ARK_INLINE void OptimizedBuiltinLeaveFrame::GCIterate(const FrameIterator &it,
JSTaggedType *argv = reinterpret_cast<JSTaggedType *>(&frame->argc + 1);
uintptr_t start = ToUintPtr(argv); // argv
uintptr_t end = ToUintPtr(argv + frame->argc);
v1(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
rangeVisitor(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
}
}
ARK_INLINE void BuiltinWithArgvFrame::GCIterate(const FrameIterator &it,
[[maybe_unused]] const RootVisitor &v0,
const RootRangeVisitor &v1,
[[maybe_unused]] ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers,
[[maybe_unused]] bool isVerifying) const
[[maybe_unused]] const RootVisitor &visitor,
const RootRangeVisitor &rangeVisitor) const
{
const JSTaggedType *sp = it.GetSp();
auto frame = BuiltinWithArgvFrame::GetFrameFromSp(sp);
@ -470,33 +442,31 @@ ARK_INLINE void BuiltinWithArgvFrame::GCIterate(const FrameIterator &it,
JSTaggedType *argv = reinterpret_cast<JSTaggedType *>(frame->GetStackArgsAddress());
uintptr_t start = ToUintPtr(argv);
uintptr_t end = ToUintPtr(argv + argc);
v1(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
rangeVisitor(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
}
ARK_INLINE void BuiltinFrame::GCIterate(const FrameIterator &it,
const RootVisitor &v0,
const RootRangeVisitor &v1,
[[maybe_unused]] ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers,
[[maybe_unused]] bool isVerifying) const
const RootVisitor &visitor,
const RootRangeVisitor &rangeVisitor) const
{
const JSTaggedType *sp = it.GetSp();
auto frame = BuiltinFrame::GetFrameFromSp(sp);
// no need to visit stack map for entry frame
if (frame->type == FrameType::BUILTIN_ENTRY_FRAME) {
// only visit function
v0(Root::ROOT_FRAME, ObjectSlot(frame->GetStackArgsAddress()));
visitor(Root::ROOT_FRAME, ObjectSlot(frame->GetStackArgsAddress()));
return;
}
JSTaggedType *argv = reinterpret_cast<JSTaggedType *>(frame->GetStackArgsAddress());
auto argc = frame->GetNumArgs();
uintptr_t start = ToUintPtr(argv);
uintptr_t end = ToUintPtr(argv + argc);
v1(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
rangeVisitor(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
}
ARK_INLINE void InterpretedEntryFrame::GCIterate(const FrameIterator &it,
[[maybe_unused]] const RootVisitor &v0,
const RootRangeVisitor &v1) const
[[maybe_unused]] const RootVisitor &visitor,
const RootRangeVisitor &rangeVisitor) const
{
const JSTaggedType* sp = it.GetSp();
InterpretedEntryFrame *frame = InterpretedEntryFrame::GetFrameFromSp(sp);
@ -509,6 +479,6 @@ ARK_INLINE void InterpretedEntryFrame::GCIterate(const FrameIterator &it,
FrameIterator prevIt(prevSp, thread);
uintptr_t start = ToUintPtr(sp + 2); // 2: numArgs & thread.
uintptr_t end = prevIt.GetPrevFrame();
v1(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
rangeVisitor(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
}
} // namespace panda::ecmascript

View File

@ -256,7 +256,6 @@ class FrameIterator;
namespace kungfu {
class LLVMStackMapParser;
};
using DerivedDataKey = std::pair<uintptr_t, uintptr_t>;
enum class FrameType: uintptr_t {
OPTIMIZED_FRAME = 0,
OPTIMIZED_ENTRY_FRAME = 1,
@ -314,11 +313,8 @@ struct OptimizedFrame : public base::AlignedStruct<base::AlignedPointer::Size(),
base::AlignedPointer,
base::AlignedPointer> {
public:
void GCIterate(const FrameIterator &it,
const RootVisitor &v0,
[[maybe_unused]] const RootRangeVisitor &v1,
ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers,
bool isVerifying) const;
void GCIterate(const FrameIterator &it, const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor) const;
private:
enum class Index : size_t {
TypeIndex = 0,
@ -456,9 +452,8 @@ public:
{
return returnAddr;
}
void GCIterate(
const FrameIterator &it, const RootVisitor &v0, const RootRangeVisitor &v1,
ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers, bool isVerifying) const;
void GCIterate(const FrameIterator &it, const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor) const;
inline JSTaggedValue GetEnv() const
{
@ -595,7 +590,7 @@ public:
{
return sizeof(InterpretedFrame) / JSTaggedValue::TaggedTypeSize();
}
void GCIterate(const FrameIterator &it, const RootVisitor &v0, const RootRangeVisitor &v1) const;
void GCIterate(const FrameIterator &it, const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor) const;
alignas(EAS) JSTaggedValue constpool {JSTaggedValue::Hole()};
alignas(EAS) JSTaggedValue function {JSTaggedValue::Hole()};
@ -636,7 +631,7 @@ struct InterpretedBuiltinFrame : public base::AlignedStruct<JSTaggedValue::Tagge
return sizeof(InterpretedBuiltinFrame) / JSTaggedValue::TaggedTypeSize();
}
void GCIterate(const FrameIterator &it, const RootVisitor &v0, const RootRangeVisitor &v1) const;
void GCIterate(const FrameIterator &it, const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor) const;
alignas(EAS) JSTaggedValue function {JSTaggedValue::Hole()};
alignas(EAS) const uint8_t *pc {nullptr};
@ -727,8 +722,8 @@ struct AsmInterpretedFrame : public base::AlignedStruct<JSTaggedValue::TaggedTyp
{
return sizeof(AsmInterpretedFrame) / JSTaggedValue::TaggedTypeSize();
}
void GCIterate(const FrameIterator &it, const RootVisitor &v0, const RootRangeVisitor &v1,
ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers, bool isVerifying) const;
void GCIterate(const FrameIterator &it, const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor) const;
JSTaggedValue GetEnv() const
{
@ -781,8 +776,8 @@ struct InterpretedEntryFrame : public base::AlignedStruct<JSTaggedValue::TaggedT
return sizeof(InterpretedEntryFrame) / JSTaggedValue::TaggedTypeSize();
}
void GCIterate(const FrameIterator &it, const RootVisitor &v0,
const RootRangeVisitor &v1) const;
void GCIterate(const FrameIterator &it, const RootVisitor &visitor,
const RootRangeVisitor &rangeVisitor) const;
alignas(EAS) const uint8_t *pc {nullptr};
alignas(EAS) InterpretedFrameBase base;
};
@ -875,9 +870,7 @@ struct OptimizedLeaveFrame {
{
return returnAddr;
}
void GCIterate(
const FrameIterator &it, const RootVisitor &v0, const RootRangeVisitor &v1,
ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers, bool isVerifying) const;
void GCIterate(const FrameIterator &it, const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor) const;
};
struct OptimizedWithArgvLeaveFrame {
@ -903,9 +896,7 @@ struct OptimizedWithArgvLeaveFrame {
{
return returnAddr;
}
void GCIterate(
const FrameIterator &it, const RootVisitor &v0, const RootRangeVisitor &v1,
ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers, bool isVerifying) const;
void GCIterate(const FrameIterator &it, const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor) const;
};
struct OptimizedBuiltinLeaveFrame {
@ -927,9 +918,7 @@ public:
{
return returnAddr;
}
void GCIterate(
const FrameIterator &it, const RootVisitor &v0, const RootRangeVisitor &v1,
ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers, bool isVerifying) const;
void GCIterate(const FrameIterator &it, const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor) const;
private:
[[maybe_unused]] FrameType type;
@ -1004,9 +993,7 @@ struct BuiltinFrame : public base::AlignedStruct<base::AlignedPointer::Size(),
{
return returnAddr;
}
void GCIterate(
const FrameIterator &it, const RootVisitor &v0, const RootRangeVisitor &v1,
ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers, bool isVerifying) const;
void GCIterate(const FrameIterator &it, const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor) const;
alignas(EAS) FrameType type;
alignas(EAS) JSTaggedType *prevFp;
alignas(EAS) uintptr_t returnAddr;
@ -1064,9 +1051,7 @@ struct BuiltinWithArgvFrame : public base::AlignedStruct<base::AlignedPointer::S
{
return returnAddr;
}
void GCIterate(
const FrameIterator &it, const RootVisitor &v0, const RootRangeVisitor &v1,
ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers, bool isVerifying) const;
void GCIterate(const FrameIterator &it, const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor) const;
// argv(... this, new.target, function)
// numargs
alignas(EAS) FrameType type;
@ -1125,8 +1110,7 @@ public:
{
return thread_;
}
bool CollectGCSlots(std::set<uintptr_t> &baseSet, ChunkMap<DerivedDataKey, uintptr_t> *data,
bool isVerifying) const;
bool CollectGCSlots(const RootVisitor &visitor, const RootBaseAndDerivedVisitor &derivedVisitor) const;
private:
JSTaggedType *current_ {nullptr};
const JSThread *thread_ {nullptr};
@ -1135,4 +1119,4 @@ private:
uintptr_t optimizedReturnAddr_ {0};
};
} // namespace panda::ecmascript
#endif // ECMASCRIPT_FRAMES_H
#endif // ECMASCRIPT_FRAMES_H

View File

@ -302,31 +302,33 @@ ARK_INLINE uintptr_t FrameHandler::GetInterpretedFrameEnd(JSTaggedType *prevSp)
return end;
}
void FrameHandler::IterateAssembleStack(const RootVisitor &v0, const RootRangeVisitor &v1)
void FrameHandler::IterateAssembleStack(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor)
{
JSTaggedType *current = const_cast<JSTaggedType *>(thread_->GetLastLeaveFrame());
IterateFrameChain(current, v0, v1);
IterateFrameChain(current, visitor, rangeVisitor, derivedVisitor);
}
// We seperate InterpretedEntryFrame from assemble stack when asm interpreter is enable.
// To protect EcmaRuntimeCallInfo on InterpretedEntryFrame, we iterate InterpretedEntryFrame on thread sp individually.
// And only InterpretedEntryFrame is on thread sp when asm interpreter is enable.
void FrameHandler::IterateEcmaRuntimeCallInfo(const RootVisitor &v0, const RootRangeVisitor &v1)
void FrameHandler::IterateEcmaRuntimeCallInfo(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor)
{
ASSERT(thread_->IsAsmInterpreter());
JSTaggedType *current = const_cast<JSTaggedType *>(thread_->GetCurrentSPFrame());
for (FrameIterator it(current, thread_); !it.Done(); it.Advance()) {
ASSERT(it.GetFrameType() == FrameType::INTERPRETER_ENTRY_FRAME);
auto frame = it.GetFrame<InterpretedEntryFrame>();
frame->GCIterate(it, v0, v1);
frame->GCIterate(it, visitor, rangeVisitor);
}
}
void FrameHandler::Iterate(const RootVisitor &v0, const RootRangeVisitor &v1)
void FrameHandler::Iterate(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor)
{
if (thread_->IsAsmInterpreter()) {
IterateEcmaRuntimeCallInfo(v0, v1);
IterateAssembleStack(v0, v1);
IterateEcmaRuntimeCallInfo(visitor, rangeVisitor);
IterateAssembleStack(visitor, rangeVisitor, derivedVisitor);
return;
}
JSTaggedType *current = const_cast<JSTaggedType *>(thread_->GetCurrentSPFrame());
@ -337,77 +339,72 @@ void FrameHandler::Iterate(const RootVisitor &v0, const RootRangeVisitor &v1)
current = leaveFrame;
}
}
IterateFrameChain(current, v0, v1);
IterateFrameChain(current, visitor, rangeVisitor, derivedVisitor);
}
void FrameHandler::IterateFrameChain(JSTaggedType *start, const RootVisitor &v0, const RootRangeVisitor &v1) const
void FrameHandler::IterateFrameChain(JSTaggedType *start, const RootVisitor &visitor,
const RootRangeVisitor &rangeVisitor, const RootBaseAndDerivedVisitor &derivedVisitor) const
{
ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers = thread_->GetEcmaVM()->GetHeap()->GetDerivedPointers();
bool isVerifying = false;
#if ECMASCRIPT_ENABLE_HEAP_VERIFY
isVerifying = thread_->GetEcmaVM()->GetHeap()->IsVerifying();
#endif
JSTaggedType *current = start;
for (FrameIterator it(current, thread_); !it.Done(); it.Advance()) {
FrameType type = it.GetFrameType();
switch (type) {
case FrameType::OPTIMIZED_FRAME: {
auto frame = it.GetFrame<OptimizedFrame>();
frame->GCIterate(it, v0, v1, derivedPointers, isVerifying);
frame->GCIterate(it, visitor, rangeVisitor, derivedVisitor);
break;
}
case FrameType::OPTIMIZED_JS_FUNCTION_FRAME: {
auto frame = it.GetFrame<OptimizedJSFunctionFrame>();
frame->GCIterate(it, v0, v1, derivedPointers, isVerifying);
frame->GCIterate(it, visitor, rangeVisitor, derivedVisitor);
break;
}
case FrameType::ASM_INTERPRETER_FRAME:
case FrameType::INTERPRETER_CONSTRUCTOR_FRAME: {
auto frame = it.GetFrame<AsmInterpretedFrame>();
frame->GCIterate(it, v0, v1, derivedPointers, isVerifying);
frame->GCIterate(it, visitor, rangeVisitor, derivedVisitor);
break;
}
case FrameType::INTERPRETER_FRAME:
case FrameType::INTERPRETER_FAST_NEW_FRAME: {
auto frame = it.GetFrame<InterpretedFrame>();
frame->GCIterate(it, v0, v1);
frame->GCIterate(it, visitor, rangeVisitor);
break;
}
case FrameType::INTERPRETER_BUILTIN_FRAME: {
auto frame = it.GetFrame<InterpretedBuiltinFrame>();
frame->GCIterate(it, v0, v1);
frame->GCIterate(it, visitor, rangeVisitor);
break;
}
case FrameType::LEAVE_FRAME: {
auto frame = it.GetFrame<OptimizedLeaveFrame>();
frame->GCIterate(it, v0, v1, derivedPointers, isVerifying);
frame->GCIterate(it, visitor, rangeVisitor);
break;
}
case FrameType::LEAVE_FRAME_WITH_ARGV: {
auto frame = it.GetFrame<OptimizedWithArgvLeaveFrame>();
frame->GCIterate(it, v0, v1, derivedPointers, isVerifying);
frame->GCIterate(it, visitor, rangeVisitor);
break;
}
case FrameType::BUILTIN_CALL_LEAVE_FRAME: {
auto frame = it.GetFrame<OptimizedBuiltinLeaveFrame>();
frame->GCIterate(it, v0, v1, derivedPointers, isVerifying);
frame->GCIterate(it, visitor, rangeVisitor);
break;
}
case FrameType::BUILTIN_FRAME_WITH_ARGV: {
auto frame = it.GetFrame<BuiltinWithArgvFrame>();
frame->GCIterate(it, v0, v1, derivedPointers, isVerifying);
frame->GCIterate(it, visitor, rangeVisitor);
break;
}
case FrameType::BUILTIN_ENTRY_FRAME:
case FrameType::BUILTIN_FRAME: {
auto frame = it.GetFrame<BuiltinFrame>();
frame->GCIterate(it, v0, v1, derivedPointers, isVerifying);
frame->GCIterate(it, visitor, rangeVisitor);
break;
}
case FrameType::INTERPRETER_ENTRY_FRAME: {
auto frame = it.GetFrame<InterpretedEntryFrame>();
frame->GCIterate(it, v0, v1);
frame->GCIterate(it, visitor, rangeVisitor);
break;
}
case FrameType::OPTIMIZED_JS_FUNCTION_UNFOLD_ARGV_FRAME:
@ -484,4 +481,4 @@ void FrameHandler::CollectBCOffsetInfo()
}
}
}
} // namespace panda::ecmascript
} // namespace panda::ecmascript

View File

@ -163,10 +163,13 @@ public:
}
// for Frame GC.
void Iterate(const RootVisitor &v0, const RootRangeVisitor &v1);
void IterateFrameChain(JSTaggedType *start, const RootVisitor &v0, const RootRangeVisitor &v1) const;
void IterateAssembleStack(const RootVisitor &v0, const RootRangeVisitor &v1);
void IterateEcmaRuntimeCallInfo(const RootVisitor &v0, const RootRangeVisitor &v1);
void Iterate(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor);
void IterateFrameChain(JSTaggedType *start, const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor) const;
void IterateAssembleStack(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor);
void IterateEcmaRuntimeCallInfo(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor);
// for collecting bc offset in aot
void CollectBCOffsetInfo();

View File

@ -71,17 +71,17 @@ using CommonStubCSigns = kungfu::CommonStubCSigns;
#define GOTO_NEXT() // NOLINT(clang-diagnostic-gnu-label-as-value, cppcoreguidelines-macro-usage)
// NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
#define DISPATCH(format) \
do { \
ADVANCE_PC(BytecodeInstruction::Size(format)) \
opcode = READ_INST_OP(); goto * (*dispatchTable)[opcode]; \
#define DISPATCH(format) \
do { \
ADVANCE_PC(BytecodeInstruction::Size(format)) \
opcode = READ_INST_OP(); goto *dispatchTable[opcode]; \
} while (false)
// NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
#define DISPATCH_OFFSET(offset) \
do { \
ADVANCE_PC(offset) \
opcode = READ_INST_OP(); goto * (*dispatchTable)[opcode]; \
#define DISPATCH_OFFSET(offset) \
do { \
ADVANCE_PC(offset) \
opcode = READ_INST_OP(); goto *dispatchTable[opcode]; \
} while (false)
// NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
@ -114,7 +114,7 @@ using CommonStubCSigns = kungfu::CommonStubCSigns;
#define INTERPRETER_GOTO_EXCEPTION_HANDLER() \
do { \
SAVE_PC(); \
goto *(*dispatchTable)[EcmaOpcode::LAST_OPCODE]; \
goto *dispatchTable[EcmaOpcode::LAST_OPCODE]; \
} while (false)
// NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
@ -125,9 +125,9 @@ using CommonStubCSigns = kungfu::CommonStubCSigns;
} while (false)
// NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
#define CHECK_SWITCH_TO_DEBUGGER_TABLE() \
#define CHECK_SWITCH_TO_DEBUGGER_TABLE() \
if (ecmaVm->GetJsDebuggerManager()->IsDebugMode()) { \
dispatchTable = &debugDispatchTable; \
dispatchTable = debugDispatchTable.data(); \
}
// NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
@ -713,9 +713,9 @@ NO_UB_SANITIZE void EcmaInterpreter::RunInternal(JSThread *thread, const uint8_t
#include "templates/debugger_instruction_dispatch.inl"
};
std::array<const void *, numOps> *dispatchTable = &instDispatchTable;
auto *dispatchTable = instDispatchTable.data();
CHECK_SWITCH_TO_DEBUGGER_TABLE();
goto *(*dispatchTable)[opcode];
goto *dispatchTable[opcode];
HANDLE_OPCODE(HANDLE_MOV_V4_V4) {
uint16_t vdst = READ_INST_4_0();

View File

@ -132,23 +132,24 @@ const JSTaggedType *JSThread::GetCurrentInterpretedFrame() const
return GetCurrentSPFrame();
}
void JSThread::Iterate(const RootVisitor &v0, const RootRangeVisitor &v1)
void JSThread::Iterate(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor)
{
if (propertiesCache_ != nullptr) {
propertiesCache_->Clear();
}
if (!glueData_.exception_.IsHole()) {
v0(Root::ROOT_VM, ObjectSlot(ToUintPtr(&glueData_.exception_)));
visitor(Root::ROOT_VM, ObjectSlot(ToUintPtr(&glueData_.exception_)));
}
// visit global Constant
glueData_.globalConst_.VisitRangeSlot(v1);
glueData_.globalConst_.VisitRangeSlot(rangeVisitor);
// visit stack roots
FrameHandler frameHandler(this);
frameHandler.Iterate(v0, v1);
frameHandler.Iterate(visitor, rangeVisitor, derivedVisitor);
// visit tagged handle storage roots
#if ECMASCRIPT_ENABLE_HANDLE_LEAK_CHECK
IterateHandleWithCheck(v0, v1);
IterateHandleWithCheck(visitor, rangeVisitor);
#else
if (currentHandleStorageIndex_ != -1) {
int32_t nid = currentHandleStorageIndex_;
@ -156,21 +157,21 @@ void JSThread::Iterate(const RootVisitor &v0, const RootRangeVisitor &v1)
auto node = handleStorageNodes_.at(i);
auto start = node->data();
auto end = (i != nid) ? &(node->data()[NODE_BLOCK_SIZE]) : handleScopeStorageNext_;
v1(ecmascript::Root::ROOT_HANDLE, ObjectSlot(ToUintPtr(start)), ObjectSlot(ToUintPtr(end)));
rangeVisitor(ecmascript::Root::ROOT_HANDLE, ObjectSlot(ToUintPtr(start)), ObjectSlot(ToUintPtr(end)));
}
}
globalStorage_->IterateUsageGlobal([v0](EcmaGlobalStorage::Node *node) {
globalStorage_->IterateUsageGlobal([visitor](EcmaGlobalStorage::Node *node) {
JSTaggedValue value(node->GetObject());
if (value.IsHeapObject()) {
v0(ecmascript::Root::ROOT_HANDLE, ecmascript::ObjectSlot(node->GetObjectAddress()));
visitor(ecmascript::Root::ROOT_HANDLE, ecmascript::ObjectSlot(node->GetObjectAddress()));
}
});
#endif
}
#if ECMASCRIPT_ENABLE_HANDLE_LEAK_CHECK
void JSThread::IterateHandleWithCheck(const RootVisitor &v0, const RootRangeVisitor &v1)
void JSThread::IterateHandleWithCheck(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor)
{
size_t handleCount = 0;
if (currentHandleStorageIndex_ != -1) {
@ -179,7 +180,7 @@ void JSThread::IterateHandleWithCheck(const RootVisitor &v0, const RootRangeVisi
auto node = handleStorageNodes_.at(i);
auto start = node->data();
auto end = (i != nid) ? &(node->data()[NODE_BLOCK_SIZE]) : handleScopeStorageNext_;
v1(ecmascript::Root::ROOT_HANDLE, ObjectSlot(ToUintPtr(start)), ObjectSlot(ToUintPtr(end)));
rangeVisitor(ecmascript::Root::ROOT_HANDLE, ObjectSlot(ToUintPtr(start)), ObjectSlot(ToUintPtr(end)));
handleCount += (ToUintPtr(end) - ToUintPtr(start)) / sizeof(JSTaggedType);
}
}
@ -188,11 +189,12 @@ void JSThread::IterateHandleWithCheck(const RootVisitor &v0, const RootRangeVisi
static const int JS_TYPE_LAST = static_cast<int>(JSType::TYPE_LAST);
int typeCount[JS_TYPE_LAST] = { 0 };
int primitiveCount = 0;
globalStorage_->IterateUsageGlobal([v0, &globalCount, &typeCount, &primitiveCount](EcmaGlobalStorage::Node *node) {
globalStorage_->IterateUsageGlobal(
[visitor, &globalCount, &typeCount, &primitiveCount](EcmaGlobalStorage::Node *node) {
node->MarkCount();
JSTaggedValue value(node->GetObject());
if (value.IsHeapObject()) {
v0(ecmascript::Root::ROOT_HANDLE, ecmascript::ObjectSlot(node->GetObjectAddress()));
visitor(ecmascript::Root::ROOT_HANDLE, ecmascript::ObjectSlot(node->GetObjectAddress()));
TaggedObject *object = value.GetTaggedObject();
MarkWord word(value.GetTaggedObject());
if (word.IsForwardingAddress()) {
@ -202,7 +204,7 @@ void JSThread::IterateHandleWithCheck(const RootVisitor &v0, const RootRangeVisi
// There are some reasonable framework-level global objects in the initial phase.
// The value can be adjusted as required.
static const int MIN_NUMBER_COUNT = 100000;
static const int MIN_NUMBER_COUNT = 110000;
static const int MARK_INTERVAL_TIMES = 10;
// Print global information about possible memory leaks.
// You can print the global new stack within the range of the leaked global number.

View File

@ -240,10 +240,11 @@ public:
glueData_.newSpaceAllocationEndAddress_ = end;
}
void Iterate(const RootVisitor &v0, const RootRangeVisitor &v1);
void Iterate(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor);
#if ECMASCRIPT_ENABLE_HANDLE_LEAK_CHECK
void IterateHandleWithCheck(const RootVisitor &v0, const RootRangeVisitor &v1);
void IterateHandleWithCheck(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor);
#endif
uintptr_t* PUBLIC_API ExpandHandleStorage();

View File

@ -103,12 +103,13 @@ uintptr_t LLVMStackMapParser::GetStackSlotAddress(const DwarfRegAndOffsetType in
return address;
}
void LLVMStackMapParser::CollectBaseAndDerivedPointers(const CallSiteInfo* infos, std::set<uintptr_t> &baseSet,
ChunkMap<DerivedDataKey, uintptr_t> *data, [[maybe_unused]] bool isVerifying,
uintptr_t callsiteFp, uintptr_t callSiteSp) const
void LLVMStackMapParser::CollectBaseAndDerivedPointers(const RootVisitor &visitor,
const RootBaseAndDerivedVisitor &derivedVisitor, const CallSiteInfo* infos, uintptr_t callsiteFp,
uintptr_t callSiteSp) const
{
bool flag = (infos->size() % 2 != 0);
size_t j = flag ? 1 : 0; // skip first element when size is odd number
std::map<uintptr_t, uintptr_t> baseSet;
for (; j < infos->size(); j += 2) { // 2: base and derived
const DwarfRegAndOffsetType& baseInfo = infos->at(j);
const DwarfRegAndOffsetType& derivedInfo = infos->at(j + 1);
@ -118,30 +119,29 @@ void LLVMStackMapParser::CollectBaseAndDerivedPointers(const CallSiteInfo* infos
base = derived;
}
if (*reinterpret_cast<uintptr_t *>(base) != 0) {
baseSet.emplace(base);
}
if (base != derived) {
#if ECMASCRIPT_ENABLE_HEAP_VERIFY
if (!isVerifying) {
#endif
(*data)[std::make_pair(base, derived)] = *reinterpret_cast<uintptr_t *>(base);
#if ECMASCRIPT_ENABLE_HEAP_VERIFY
}
#endif
// The base address may be marked repeatedly
if (baseSet.find(base) == baseSet.end()) {
baseSet.emplace(base, *reinterpret_cast<uintptr_t *>(base));
visitor(Root::ROOT_FRAME, ObjectSlot(base));
}
if (base != derived) {
derivedVisitor(Root::ROOT_FRAME, ObjectSlot(base), ObjectSlot(derived), baseSet[base]);
}
}
}
baseSet.clear();
}
bool LLVMStackMapParser::CollectGCSlots(uintptr_t callSiteAddr, uintptr_t callsiteFp,
std::set<uintptr_t> &baseSet, ChunkMap<DerivedDataKey, uintptr_t> *data, [[maybe_unused]] bool isVerifying,
uintptr_t callSiteSp) const
bool LLVMStackMapParser::CollectGCSlots(const RootVisitor &visitor, const RootBaseAndDerivedVisitor &derivedVisitor,
uintptr_t callSiteAddr, uintptr_t callsiteFp, uintptr_t callSiteSp) const
{
const CallSiteInfo *infos = GetCallSiteInfoByPc(callSiteAddr);
if (infos == nullptr) {
return false;
}
ASSERT(callsiteFp != callSiteSp);
CollectBaseAndDerivedPointers(infos, baseSet, data, isVerifying, callsiteFp, callSiteSp);
CollectBaseAndDerivedPointers(visitor, derivedVisitor, infos, callsiteFp, callSiteSp);
if (IsLogEnabled()) {
PrintCallSiteInfo(infos, callsiteFp, callSiteSp);

View File

@ -203,10 +203,8 @@ public:
}
}
const CallSiteInfo *GetCallSiteInfoByPc(uintptr_t funcAddr) const;
bool CollectGCSlots(uintptr_t callSiteAddr, uintptr_t callsiteFp,
std::set<uintptr_t> &baseSet, ChunkMap<DerivedDataKey, uintptr_t> *data,
[[maybe_unused]] bool isVerifying,
uintptr_t callSiteSp) const;
bool CollectGCSlots(const RootVisitor &visitor, const RootBaseAndDerivedVisitor &derivedVisitor,
uintptr_t callSiteAddr, uintptr_t callsiteFp, uintptr_t callSiteSp) const;
bool IsLogEnabled() const
{
return enableLog_;
@ -250,9 +248,8 @@ private:
int FindFpDelta(uintptr_t funcAddr, uintptr_t callsitePc) const;
inline uintptr_t GetStackSlotAddress(const DwarfRegAndOffsetType info,
uintptr_t callSiteSp, uintptr_t callsiteFp) const;
void CollectBaseAndDerivedPointers(const CallSiteInfo *infos, std::set<uintptr_t> &baseSet,
ChunkMap<DerivedDataKey, uintptr_t> *data, [[maybe_unused]] bool isVerifying,
uintptr_t callsiteFp, uintptr_t callSiteSp) const;
void CollectBaseAndDerivedPointers(const RootVisitor &visitor, const RootBaseAndDerivedVisitor &derivedVisitor,
const CallSiteInfo *infos, uintptr_t callsiteFp, uintptr_t callSiteSp) const;
void PrintCallSiteSlotAddr(const CallSiteInfo& callsiteInfo, uintptr_t callSiteSp,
uintptr_t callsiteFp) const;
@ -265,4 +262,4 @@ private:
std::vector<Pc2ConstInfo> pc2ConstInfoVec_;
};
} // namespace panda::ecmascript::kungfu
#endif // ECMASCRIPT_LLVM_STACKMAP_PARSER_H
#endif // ECMASCRIPT_LLVM_STACKMAP_PARSER_H

View File

@ -144,7 +144,6 @@ void FullGC::Sweep()
heap_->GetEcmaVM()->GetJSThread()->IterateWeakEcmaGlobalStorage(gcUpdateWeak);
heap_->GetEcmaVM()->ProcessReferences(gcUpdateWeak);
heap_->UpdateDerivedObjectInStack();
heap_->GetSweeper()->Sweep(true);
}

View File

@ -114,7 +114,6 @@ void Heap::Initialize()
stwYoungGC_ = new STWYoungGC(this, parallelGC_);
fullGC_ = new FullGC(this);
derivedPointers_ = new ChunkMap<DerivedDataKey, uintptr_t>(ecmaVm_->GetChunk());
partialGC_ = new PartialGC(this);
sweeper_ = new ConcurrentSweeper(this, ecmaVm_->GetJSOptions().EnableConcurrentSweep() ?
EnableConcurrentSweepType::ENABLE : EnableConcurrentSweepType::CONFIG_DISABLE);
@ -203,10 +202,6 @@ void Heap::Destroy()
delete sweeper_;
sweeper_ = nullptr;
}
if (derivedPointers_ != nullptr) {
delete derivedPointers_;
derivedPointers_ = nullptr;
}
if (concurrentMarker_ != nullptr) {
delete concurrentMarker_;
concurrentMarker_ = nullptr;
@ -327,7 +322,6 @@ void Heap::CollectGarbage(TriggerGCType gcType)
size_t originalNewSpaceSize = activeSemiSpace_->GetHeapObjectSize();
memController_->StartCalculationBeforeGC();
StatisticHeapObject(gcType);
GetDerivedPointers()->clear();
switch (gcType) {
case TriggerGCType::YOUNG_GC:
// Use partial GC for young generation.
@ -625,33 +619,6 @@ void Heap::TriggerConcurrentMarking()
}
}
void Heap::UpdateDerivedObjectInStack()
{
if (derivedPointers_->empty()) {
return;
}
for (auto derived : *derivedPointers_) {
auto baseAddr = reinterpret_cast<JSTaggedValue *>(derived.first.first);
JSTaggedValue base = *baseAddr;
if (base.IsHeapObject()) {
uintptr_t baseOldObject = derived.second;
uintptr_t *derivedAddr = reinterpret_cast<uintptr_t *>(derived.first.second);
#ifndef NDEBUG
LOG_GC(DEBUG) << std::hex << "fix base before:" << baseAddr << " base old Value: " << baseOldObject <<
" derived:" << derivedAddr << " old Value: " << *derivedAddr << std::endl;
#endif
// derived is always bigger than base
*derivedAddr = reinterpret_cast<uintptr_t>(base.GetTaggedObject()) + (*derivedAddr - baseOldObject);
#ifndef NDEBUG
LOG_GC(DEBUG) << std::hex << "fix base after:" << baseAddr <<
" base New Value: " << base.GetTaggedObject() <<
" derived:" << derivedAddr << " New Value: " << *derivedAddr << std::endl;
#endif
}
}
derivedPointers_->clear();
}
void Heap::WaitRunningTaskFinished()
{
os::memory::LockHolder holder(waitTaskFinishedMutex_);

View File

@ -353,13 +353,6 @@ public:
return maxEvacuateTaskCount_;
}
ChunkMap<DerivedDataKey, uintptr_t> *GetDerivedPointers() const
{
return derivedPointers_;
}
void UpdateDerivedObjectInStack();
static constexpr uint32_t STACK_MAP_DEFALUT_DERIVED_SIZE = 8U;
/*
* Heap tracking will be used by tools like heap profiler etc.
*/
@ -548,8 +541,6 @@ private:
NativeAreaAllocator *nativeAreaAllocator_ {nullptr};
HeapRegionAllocator *heapRegionAllocator_ {nullptr};
ChunkMap<DerivedDataKey, uintptr_t> *derivedPointers_ {nullptr};
// The tracker tracking heap object allocation and movement events.
HeapTracker *tracker_ {nullptr};

View File

@ -109,10 +109,11 @@ public:
explicit ObjectXRay(EcmaVM *ecmaVm) : ecmaVm_(ecmaVm) {}
~ObjectXRay() = default;
inline void VisitVMRoots(const RootVisitor &visitor, const RootRangeVisitor &range_visitor) const
inline void VisitVMRoots(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
const RootBaseAndDerivedVisitor &derivedVisitor) const
{
ecmaVm_->Iterate(visitor);
ecmaVm_->GetJSThread()->Iterate(visitor, range_visitor);
ecmaVm_->GetJSThread()->Iterate(visitor, rangeVisitor, derivedVisitor);
}
template<VisitType visitType>
inline void VisitObjectBody(TaggedObject *object, JSHClass *klass, const EcmaObjectRangeVisitor &visitor)

View File

@ -51,7 +51,6 @@ void ParallelEvacuator::Evacuate()
Initialize();
EvacuateSpace();
UpdateReference();
heap_->UpdateDerivedObjectInStack();
Finalize();
heap_->GetEcmaVM()->GetEcmaGCStats()->StatisticConcurrentEvacuate(clockScope.GetPauseTime());
}
@ -228,8 +227,18 @@ void ParallelEvacuator::UpdateRoot()
UpdateObjectSlot(slot);
}
};
RootBaseAndDerivedVisitor gcUpdateDerived =
[]([[maybe_unused]]Root type, ObjectSlot base, ObjectSlot derived, uintptr_t baseOldObject) {
if (JSTaggedValue(base.GetTaggedType()).IsHeapObject()) {
derived.Update(base.GetTaggedType() + derived.GetTaggedType() - baseOldObject);
LOG_GC(DEBUG) << std::hex << "fix base after:" << base.SlotAddress() << " base Old Value:"
<< baseOldObject << " base New Value:" << base.GetTaggedType()
<< " derived:" << derived.SlotAddress() << " derived New Value:"
<< derived.GetTaggedType();
}
};
objXRay_.VisitVMRoots(gcUpdateYoung, gcUpdateRangeYoung);
objXRay_.VisitVMRoots(gcUpdateYoung, gcUpdateRangeYoung, gcUpdateDerived);
}
void ParallelEvacuator::UpdateRecordWeakReference()

View File

@ -64,6 +64,14 @@ inline void NonMovableMarker::HandleRangeRoots(uint32_t threadId, [[maybe_unused
}
}
inline void NonMovableMarker::HandleDerivedRoots([[maybe_unused]] Root type, ObjectSlot base,
ObjectSlot derived, uintptr_t baseOldObject)
{
// It is only used to update the derived value. The mark of partial GC does not need to update slot
LOG_GC(DEBUG) << std::hex << "fix base before:" << base.SlotAddress() << " base old Value: " << baseOldObject
<< " derived:" << derived.SlotAddress() << " old Value: " << derived.GetTaggedType();
}
inline void NonMovableMarker::HandleOldToNewRSet(uint32_t threadId, Region *region)
{
region->IterateAllOldToNewBits([this, threadId, &region](void *mem) -> bool {
@ -113,6 +121,17 @@ inline void MovableMarker::HandleRangeRoots(uint32_t threadId, [[maybe_unused]]
}
}
inline void MovableMarker::HandleDerivedRoots([[maybe_unused]] Root type, ObjectSlot base,
ObjectSlot derived, uintptr_t baseOldObject)
{
if (JSTaggedValue(base.GetTaggedType()).IsHeapObject()) {
derived.Update(base.GetTaggedType() + derived.GetTaggedType() - baseOldObject);
LOG_GC(DEBUG) << std::hex << "fix base after:" << base.SlotAddress() << " base Old Value:"
<< baseOldObject << " base New Value:" << base.GetTaggedType()
<< " derived:" << derived.SlotAddress() << " derived New Value:" << derived.GetTaggedType();
}
}
inline void MovableMarker::HandleOldToNewRSet(uint32_t threadId, Region *region)
{
region->IterateAllOldToNewBits([this, threadId, &region](void *mem) -> bool {

View File

@ -24,7 +24,9 @@ void Marker::MarkRoots(uint32_t threadId)
objXRay_.VisitVMRoots(
std::bind(&Marker::HandleRoots, this, threadId, std::placeholders::_1, std::placeholders::_2),
std::bind(&Marker::HandleRangeRoots, this, threadId, std::placeholders::_1, std::placeholders::_2,
std::placeholders::_3));
std::placeholders::_3),
std::bind(&Marker::HandleDerivedRoots, this, std::placeholders::_1, std::placeholders::_2,
std::placeholders::_3, std::placeholders::_4));
workManager_->PushWorkNodeToGlobal(threadId, false);
}

View File

@ -67,6 +67,8 @@ protected:
virtual inline void HandleRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot slot) = 0;
virtual inline void HandleRangeRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot start,
ObjectSlot end) = 0;
virtual inline void HandleDerivedRoots(Root type, ObjectSlot base, ObjectSlot derived,
uintptr_t baseOldObject) = 0;
virtual inline void RecordWeakReference([[maybe_unused]] uint32_t threadId, [[maybe_unused]] JSTaggedType *ref,
[[maybe_unused]] Region *objectRegion)
{
@ -89,6 +91,8 @@ protected:
inline void HandleRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot slot) override;
inline void HandleRangeRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot start,
ObjectSlot end) override;
inline void HandleDerivedRoots(Root type, ObjectSlot base, ObjectSlot derived,
uintptr_t baseOldObject) override;
inline void HandleOldToNewRSet(uint32_t threadId, Region *region) override;
inline void RecordWeakReference(uint32_t threadId, JSTaggedType *ref, Region *objectRegion) override;
@ -103,6 +107,8 @@ protected:
inline void HandleRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot slot) override;
inline void HandleRangeRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot start,
ObjectSlot end) override;
inline void HandleDerivedRoots(Root type, ObjectSlot base, ObjectSlot derived,
uintptr_t baseOldObject) override;
virtual inline SlotStatus EvacuateObject(uint32_t threadId, TaggedObject *object, const MarkWord &markWord,
ObjectSlot slot) = 0;

View File

@ -135,7 +135,6 @@ void STWYoungGC::Sweep()
stringTable->SweepWeakReference(gcUpdateWeak);
heap_->GetEcmaVM()->GetJSThread()->IterateWeakEcmaGlobalStorage(gcUpdateWeak);
heap_->GetEcmaVM()->ProcessReferences(gcUpdateWeak);
heap_->UpdateDerivedObjectInStack();
}
void STWYoungGC::Finish()

View File

@ -54,7 +54,8 @@ void VerifyObjectVisitor::operator()(TaggedObject *obj, JSTaggedValue value)
<< slot.GetTaggedType();
return;
}
TaggedObject *object = value.GetTaggedObject();
TaggedObject *object = value.GetRawTaggedObject();
auto region = Region::ObjectAddressToRange(object);
if (!region->InYoungSpace()) {
LOG_GC(ERROR) << "Heap object(" << slot.GetTaggedType() << ") old to new rset fail: value("
@ -68,25 +69,19 @@ void VerifyObjectVisitor::operator()(TaggedObject *obj, JSTaggedValue value)
size_t Verification::VerifyRoot() const
{
size_t failCount = 0;
RootVisitor visit1 = [this, &failCount]([[maybe_unused]] Root type, ObjectSlot slot) {
JSTaggedValue value(slot.GetTaggedType());
if (value.IsWeak()) {
VerifyObjectVisitor(heap_, &failCount)(value.GetTaggedWeakRef());
} else if (value.IsHeapObject()) {
VerifyObjectVisitor(heap_, &failCount)(value.GetTaggedObject());
}
RootVisitor visitor = [this, &failCount]([[maybe_unused]] Root type, ObjectSlot slot) {
VerifyObjectSlot(slot, &failCount);
};
RootRangeVisitor visit2 = [this, &failCount]([[maybe_unused]] Root type, ObjectSlot start, ObjectSlot end) {
RootRangeVisitor rangeVisitor = [this, &failCount]([[maybe_unused]] Root type, ObjectSlot start, ObjectSlot end) {
for (ObjectSlot slot = start; slot < end; slot++) {
JSTaggedValue value(slot.GetTaggedType());
if (value.IsWeak()) {
VerifyObjectVisitor(heap_, &failCount)(value.GetTaggedWeakRef());
} else if (value.IsHeapObject()) {
VerifyObjectVisitor(heap_, &failCount)(value.GetTaggedObject());
}
VerifyObjectSlot(slot, &failCount);
}
};
objXRay_.VisitVMRoots(visit1, visit2);
RootBaseAndDerivedVisitor derivedVisitor =
[]([[maybe_unused]] Root type, [[maybe_unused]] ObjectSlot base, [[maybe_unused]] ObjectSlot derived,
[[maybe_unused]] uintptr_t baseOldObject) {
};
objXRay_.VisitVMRoots(visitor, rangeVisitor, derivedVisitor);
if (failCount > 0) {
LOG_GC(ERROR) << "VerifyRoot detects deadObject count is " << failCount;
}
@ -111,4 +106,14 @@ size_t Verification::VerifyOldToNewRSet() const
}
return failCount;
}
void Verification::VerifyObjectSlot(const ObjectSlot &slot, size_t *failCount) const
{
JSTaggedValue value(slot.GetTaggedType());
if (value.IsWeak()) {
VerifyObjectVisitor(heap_, failCount)(value.GetTaggedWeakRef());
} else if (value.IsHeapObject()) {
VerifyObjectVisitor(heap_, failCount)(value.GetTaggedObject());
}
}
} // namespace panda::ecmascript

View File

@ -71,6 +71,8 @@ public:
size_t VerifyHeap() const;
size_t VerifyOldToNewRSet() const;
private:
void VerifyObjectSlot(const ObjectSlot &slot, size_t *failCount) const;
NO_COPY_SEMANTIC(Verification);
NO_MOVE_SEMANTIC(Verification);

View File

@ -34,9 +34,10 @@ enum class VisitType : size_t { SEMI_GC_VISIT, OLD_GC_VISIT, SNAPSHOT_VISIT };
using RootVisitor = std::function<void(Root type, ObjectSlot p)>;
using RootRangeVisitor = std::function<void(Root type, ObjectSlot start, ObjectSlot end)>;
using EcmaObjectVisitor = std::function<void(TaggedObject *root, ObjectSlot p)>;
using RootBaseAndDerivedVisitor =
std::function<void(Root type, ObjectSlot base, ObjectSlot derived, uintptr_t baseOldObject)>;
using EcmaObjectRangeVisitor = std::function<void(TaggedObject *root, ObjectSlot start, ObjectSlot end,
bool isNative)>;
using WeakRootVisitor = std::function<TaggedObject *(TaggedObject *p)>;
} // namespace panda::ecmascript
#endif // ECMASCRIPT_MEM_VISITOR_H
#endif // ECMASCRIPT_MEM_VISITOR_H