Support multi-thread test for shared heap

Issue:https://gitee.com/openharmony/arkcompiler_ets_runtime/issues/I93UGW?from=project-issue
Fix some bugs and Support multi-thread test for shared heap

Signed-off-by: lukai <lukai25@huawei.com>
Change-Id: I64200f5e6d8b1b863cc63ac7068d10efa7260f88
This commit is contained in:
lukai 2024-02-26 17:20:39 +08:00
parent 6fb1530738
commit 2c83f10480
24 changed files with 141 additions and 64 deletions

View File

@ -96,6 +96,7 @@
#include "ecmascript/marker_cell.h"
#include "ecmascript/napi/include/jsnapi.h"
#include "ecmascript/object_factory.h"
#include "ecmascript/runtime.h"
#ifdef ARK_SUPPORT_INTL
#include "ecmascript/builtins/builtins_collator.h"
#include "ecmascript/builtins/builtins_date_time_format.h"
@ -325,7 +326,12 @@ void Builtins::Initialize(const JSHandle<GlobalEnv> &env, JSThread *thread, bool
InitializeBoolean(env, primRefObjHClass);
InitializeRegExp(env);
InitializeString(env, objFuncPrototypeVal);
InitializeSObjectAndSFunction(env);
auto runtimeGlobalEnv = Runtime::GetInstance()->GetGlobalEnv();
if (runtimeGlobalEnv.IsHole()) {
InitializeSObjectAndSFunction(env);
} else {
CopySObjectAndSFunction(env, runtimeGlobalEnv);
}
JSHandle<JSHClass> argumentsClass = factory_->CreateJSArguments(env);
env->SetArgumentsClass(thread_, argumentsClass);
SetArgumentsSharedAccessor(env);

View File

@ -346,6 +346,7 @@ private:
// For SharedObject/SharedFunction
void InitializeSObjectAndSFunction(const JSHandle<GlobalEnv> &env) const;
void CopySObjectAndSFunction(const JSHandle<GlobalEnv> &env, const JSTaggedValue &srcEnv) const;
void InitializeSObject(const JSHandle<GlobalEnv> &env, const JSHandle<JSHClass> &sObjIHClass,
const JSHandle<JSObject> &sObjFuncPrototype,
const JSHandle<JSFunction> &sFuncPrototype) const;

View File

@ -51,6 +51,17 @@ void Builtins::InitializeSObjectAndSFunction(const JSHandle<GlobalEnv> &env) con
env->SetSObjectFunctionPrototype(thread_, sObjFuncPrototype);
}
void Builtins::CopySObjectAndSFunction(const JSHandle<GlobalEnv> &env, const JSTaggedValue &srcEnv) const
{
// Copy shareds.
ASSERT(srcEnv.IsJSGlobalEnv());
auto sGlobalEnv = reinterpret_cast<GlobalEnv*>(srcEnv.GetTaggedObject());
#define COPY_ENV_SHARED_FIELDS(Type, Name, INDEX) \
env->Set##Name(thread_, sGlobalEnv->Get##Name());
GLOBAL_ENV_SHARED_FIELDS(COPY_ENV_SHARED_FIELDS)
#undef COPY_ENV_SHARED_FIELDS
}
void Builtins::InitializeSObject(const JSHandle<GlobalEnv> &env, const JSHandle<JSHClass> &sObjIHClass,
const JSHandle<JSObject> &sObjFuncPrototype,
const JSHandle<JSFunction> &sFuncPrototype) const

View File

@ -745,8 +745,11 @@ void EcmaContext::ClearBufferData()
void EcmaContext::SetGlobalEnv(GlobalEnv *global)
{
ASSERT(global != nullptr);
globalEnv_ = JSTaggedValue(global);
// In jsthread iteration, SwitchCurrentContext is called to iterate each context.
// If the target context is not fully initialized, the variable "global" will be nullptr.
if (global != nullptr) {
globalEnv_ = JSTaggedValue(global);
}
}
void EcmaContext::SetMicroJobQueue(job::MicroJobQueue *queue)
@ -826,13 +829,27 @@ void EcmaContext::Iterate(const RootVisitor &v, const RootRangeVisitor &rv)
globalConst_.VisitRangeSlot(rv);
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&globalEnv_)));
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&regexpCache_)));
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&regexpGlobal_)));
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&numberToStringResultCache_)));
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&stringSplitResultCache_)));
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&stringToListResultCache_)));
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&microJobQueue_)));
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&pointerToIndexDictionary_)));
if (!regexpCache_.IsHole()) {
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&regexpCache_)));
}
if (!regexpGlobal_.IsHole()) {
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&regexpGlobal_)));
}
if (!numberToStringResultCache_.IsHole()) {
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&numberToStringResultCache_)));
}
if (!stringSplitResultCache_.IsHole()) {
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&stringSplitResultCache_)));
}
if (!stringToListResultCache_.IsHole()) {
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&stringToListResultCache_)));
}
if (!microJobQueue_.IsHole()) {
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&microJobQueue_)));
}
if (!pointerToIndexDictionary_.IsHole()) {
v(Root::ROOT_VM, ObjectSlot(reinterpret_cast<uintptr_t>(&pointerToIndexDictionary_)));
}
if (moduleManager_) {
moduleManager_->Iterate(v);

View File

@ -232,6 +232,7 @@ bool EcmaVM::Initialize()
auto context = new EcmaContext(thread_);
thread_->PushContext(context);
[[maybe_unused]] EcmaHandleScope scope(thread_);
thread_->SetReadyForGCIterating();
context->Initialize();
thread_->SetGlueGlobalEnv(reinterpret_cast<GlobalEnv *>(context->GetGlobalEnv().GetTaggedType()));
thread_->SetGlobalObject(GetGlobalEnv()->GetGlobalObject());
@ -533,10 +534,12 @@ void EcmaVM::Iterate(const RootVisitor &v, const RootRangeVisitor &rv)
{
rv(Root::ROOT_VM, ObjectSlot(ToUintPtr(&internalNativeMethods_.front())),
ObjectSlot(ToUintPtr(&internalNativeMethods_.back()) + JSTaggedValue::TaggedTypeSize()));
if (!WIN_OR_MAC_OR_IOS_PLATFORM) {
if (!WIN_OR_MAC_OR_IOS_PLATFORM && snapshotEnv_!= nullptr) {
snapshotEnv_->Iterate(v);
}
pgoProfiler_->Iterate(v);
if (pgoProfiler_ != nullptr) {
pgoProfiler_->Iterate(v);
}
}
#if defined(ECMASCRIPT_SUPPORT_HEAPPROFILER)

View File

@ -26,13 +26,8 @@
V(JSTaggedValue, ObjectFunctionClass, OBJECT_FUNCTION_CLASS_INDEX) \
V(JSTaggedValue, ObjectFunctionPrototype, OBJECT_FUNCTION_PROTOTYPE_INDEX) \
V(JSTaggedValue, ObjectFunctionPrototypeClass, OBJECT_FUNCTION_PROTOTYPE_CLASS_INDEX) \
V(JSTaggedValue, SObjectFunction, SHARED_OBJECT_FUNCTION_INDEX) \
V(JSTaggedValue, SObjectFunctionPrototype, SHARED_OBJECT_FUNCTION_PROTOTYPE_INDEX) \
V(JSTaggedValue, FunctionFunction, FUNCTION_FUNCTION_INDEX) \
V(JSTaggedValue, FunctionPrototype, FUNCTION_PROTOTYPE_INDEX) \
V(JSTaggedValue, SFunctionFunction, SHARED_FUNCTION_FUNCTION_INDEX) \
V(JSTaggedValue, SFunctionPrototype, SHARED_FUNCTION_PROTOTYPE_INDEX) \
V(JSTaggedValue, SConstructorClass, SHARED_CONSTRUCTOR_CLASS_INDEX) \
V(JSTaggedValue, NumberFunction, NUMBER_FUNCTION_INDEX) \
V(JSTaggedValue, NumberPrototype, NUMBER_PROTOTYPE_INDEX) \
V(JSTaggedValue, BigIntFunction, BIGINT_FUNCTION_INDEX) \
@ -188,8 +183,6 @@
V(JSTaggedValue, FunctionClassWithoutProto, FUNCTION_CLASS_WITHOUT_PROTO) \
V(JSTaggedValue, FunctionClassWithoutName, FUNCTION_CLASS_WITHOUT_NAME) \
V(JSTaggedValue, BoundFunctionClass, BOUND_FUNCTION_CLASS) \
V(JSTaggedValue, SFunctionClassWithoutProto, SHARED_FUNCTION_CLASS_WITHOUT_PROTO) \
V(JSTaggedValue, SFunctionClassWithoutAccessor, SHARED_FUNCTION_CLASS_WITHOUT_ACCESSOR) \
V(JSTaggedValue, ArgumentsClass, ARGUMENTS_CLASS) \
V(JSTaggedValue, ArgumentsCallerAccessor, ARGUMENTS_CALLER_ACCESSOR) \
V(JSTaggedValue, ArgumentsCalleeAccessor, ARGUMENTS_CALLEE_ACCESSOR) \
@ -215,7 +208,6 @@
V(JSTaggedValue, SpecificTypedArrayFunctionClass, SPERCIFIC_TYPED_ARRAY_FUNCTION_CLASS) \
V(JSTaggedValue, ConstructorFunctionClass, CONSTRUCTOR_FUNCTION_CLASS) \
V(JSTaggedValue, NormalFunctionClass, NORMAL_FUNCTION_CLASS) \
V(JSTaggedValue, SNormalFunctionClass, SHARED_NORMAL_FUNCTION_CLASS) \
V(JSTaggedValue, JSIntlBoundFunctionClass, JS_INTL_BOUND_FUNCTION_CLASS) \
V(JSTaggedValue, NumberFormatLocales, NUMBER_FORMAT_LOCALES_INDEX) \
V(JSTaggedValue, DateTimeFormatLocales, DATE_TIMEFORMAT_LOCALES_INDEX) \
@ -240,7 +232,18 @@
V(JSTaggedValue, IteratorSymbol, ITERATOR_SYMBOL_INDEX) \
V(JSTaggedValue, SpeciesSymbol, SPECIES_SYMBOL_INDEX)
#define GLOBAL_ENV_SHARED_FIELDS(V) \
V(JSTaggedValue, SObjectFunction, SHARED_OBJECT_FUNCTION_INDEX) \
V(JSTaggedValue, SObjectFunctionPrototype, SHARED_OBJECT_FUNCTION_PROTOTYPE_INDEX) \
V(JSTaggedValue, SFunctionFunction, SHARED_FUNCTION_FUNCTION_INDEX) \
V(JSTaggedValue, SFunctionPrototype, SHARED_FUNCTION_PROTOTYPE_INDEX) \
V(JSTaggedValue, SConstructorClass, SHARED_CONSTRUCTOR_CLASS_INDEX) \
V(JSTaggedValue, SFunctionClassWithoutProto, SHARED_FUNCTION_CLASS_WITHOUT_PROTO) \
V(JSTaggedValue, SFunctionClassWithoutAccessor, SHARED_FUNCTION_CLASS_WITHOUT_ACCESSOR) \
V(JSTaggedValue, SNormalFunctionClass, SHARED_NORMAL_FUNCTION_CLASS)
#define GLOBAL_ENV_FIELDS(V) \
GLOBAL_ENV_SHARED_FIELDS(V) \
GLOBAL_ENV_COMMON_FIELDS(V) \
GLOBAL_ENV_DETECTOR_SYMBOL_FIELDS(V) \
GLOBAL_ENV_DETECTOR_FIELDS(V)

View File

@ -638,7 +638,6 @@ JSTaggedValue EcmaInterpreter::ExecuteNative(EcmaRuntimeCallInfo *info)
JSTaggedValue tagged;
{
ASSERT(thread == JSThread::GetCurrent());
ThreadNativeScope nativeScope(thread);
tagged = reinterpret_cast<EcmaEntrypoint>(const_cast<void *>(method->GetNativePointer()))(info);
}
LOG_INST() << "Exit: Runtime Call.";

View File

@ -767,7 +767,11 @@ void JSThread::SwitchCurrentContext(EcmaContext *currentContext, bool isInIterat
glueData_.currentContext_->SetStackLimit(GetStackLimit());
glueData_.currentContext_->SetStackStart(GetStackStart());
glueData_.currentContext_->SetGlobalEnv(GetGlueGlobalEnv());
glueData_.currentContext_->GetGlobalEnv()->SetJSGlobalObject(this, glueData_.globalObject_);
// When the glueData_.currentContext_ is not fully initializedglueData_.globalObject_ will be hole.
// Assigning hole to JSGlobalObject could cause a mistake at builtins initalization.
if (!glueData_.globalObject_.IsHole()) {
glueData_.currentContext_->GetGlobalEnv()->SetJSGlobalObject(this, glueData_.globalObject_);
}
SetCurrentSPFrame(currentContext->GetCurrentFrame());
SetLastLeaveFrame(currentContext->GetLeaveFrame());
@ -957,7 +961,11 @@ void JSThread::StoreState(ThreadState newState, bool lockMutatorLock)
oldStateAndFlags.asInt = stateAndFlags_.asInt;
if (lockMutatorLock && oldStateAndFlags.asStruct.flags != ThreadFlag::NO_FLAGS) {
// Someone requested smth from this thread. Go to safepoint
CheckSafepoint();
if (InRunningState()) {
CheckSafepoint();
} else {
WaitSuspension();
}
continue;
}
ThreadStateAndFlags newStateAndFlags;

View File

@ -520,6 +520,16 @@ public:
return enableLazyBuiltins_;
}
void SetReadyForGCIterating()
{
readyForGCIterating_ = true;
}
bool ReadyForGCIterating() const
{
return readyForGCIterating_;
}
static constexpr size_t GetGlueDataOffset()
{
return MEMBER_OFFSET(JSThread, glueData_);
@ -1054,8 +1064,8 @@ public:
alignas(EAS) JSTaggedType *frameBase_ {nullptr};
alignas(EAS) uint64_t stackStart_ {0};
alignas(EAS) uint64_t stackLimit_ {0};
alignas(EAS) GlobalEnv *glueGlobalEnv_;
alignas(EAS) GlobalEnvConstants *globalConst_;
alignas(EAS) GlobalEnv *glueGlobalEnv_ {nullptr};
alignas(EAS) GlobalEnvConstants *globalConst_ {nullptr};
alignas(EAS) bool allowCrossThreadExecution_ {false};
alignas(EAS) volatile uint64_t interruptVector_ {0};
alignas(EAS) JSTaggedValue isStartHeapSampling_ {JSTaggedValue::False()};
@ -1112,6 +1122,12 @@ public:
{
return ReadFlag(ThreadFlag::SUSPEND_REQUEST);
}
bool InRunningState()
{
return GetState() == ThreadState::RUNNING;
}
ThreadState GetState()
{
uint32_t stateAndFlags = stateAndFlags_.asAtomicInt.load(std::memory_order_acquire);
@ -1198,7 +1214,7 @@ private:
VmThreadControl *vmThreadControl_ {nullptr};
bool enableStackSourceFile_ {true};
bool enableLazyBuiltins_ {false};
bool readyForGCIterating_ {false};
// CpuProfiler
bool isProfiling_ {false};
bool gcState_ {false};

View File

@ -43,10 +43,7 @@ static ARK_INLINE void WriteBarrier(const JSThread *thread, void *obj, size_t of
} else if (!objectRegion->InSharedHeap() && valueRegion->InSharedSweepableSpace()) {
objectRegion->AtomicInsertLocalToShareRSet(slotAddr);
}
// TODO(lukai) remove this check in future, when all references are allocated in sharedheap.
if (objectRegion->InSharedHeap() && !valueRegion->InSharedHeap()) {
LOG_FULL(ERROR) << "Shared space reference to " << valueRegion->GetSpaceTypeName();
}
ASSERT(!objectRegion->InSharedHeap() || valueRegion->InSharedHeap());
if (thread->IsConcurrentMarkingOrFinished()) {
Barriers::Update(thread, slotAddr, objectRegion, reinterpret_cast<TaggedObject *>(value),
valueRegion, writeType);

View File

@ -98,7 +98,6 @@ void ConcurrentSweeper::WaitAllTaskFinished()
void ConcurrentSweeper::EnsureAllTaskFinished()
{
CHECK_JS_THREAD(heap_->GetEcmaVM());
if (!isSweeping_) {
return;
}

View File

@ -77,6 +77,7 @@ void SharedHeap::Initialize(NativeAreaAllocator *nativeAreaAllocator, HeapRegion
{
nativeAreaAllocator_ = nativeAreaAllocator;
heapRegionAllocator_ = heapRegionAllocator;
shouldVerifyHeap_ = option.EnableHeapVerify();
parallelGC_ = option.EnableParallelGC();
size_t maxHeapSize = config_.GetMaxHeapSize();
size_t nonmovableSpaceCapacity = config_.GetDefaultNonMovableSpaceSize();
@ -140,9 +141,9 @@ bool SharedHeap::AsyncClearTask::Run([[maybe_unused]] uint32_t threadIndex)
void SharedHeap::CollectGarbage(JSThread *thread, [[maybe_unused]]TriggerGCType gcType, [[maybe_unused]]GCReason reason)
{
ASSERT(gcType == TriggerGCType::SHARED_GC);
Prepare();
{
SuspendAllScope scope(thread);
Prepare();
sharedGC_->RunPhases();
}
// Don't process weak node nativeFinalizeCallback here. These callbacks would be called after localGC.
@ -500,6 +501,7 @@ TriggerGCType Heap::SelectGCType() const
void Heap::CollectGarbage(TriggerGCType gcType, GCReason reason)
{
{
ASSERT(thread_->InRunningState());
RecursionScope recurScope(this);
if (thread_->IsCrossThreadExecutionEnable() || (InSensitiveStatus() && !ObjectExceedMaxHeapSize())) {
return;
@ -508,7 +510,6 @@ void Heap::CollectGarbage(TriggerGCType gcType, GCReason reason)
[[maybe_unused]] GcStateScope scope(thread_);
#endif
CHECK_NO_GC
if (UNLIKELY(ShouldVerifyHeap())) {
// pre gc heap verify
LOG_ECMA(DEBUG) << "pre gc heap verify";

View File

@ -32,6 +32,7 @@ LinearSpace::LinearSpace(Heap *heap, MemSpaceType type, size_t initialCapacity,
uintptr_t LinearSpace::Allocate(size_t size, bool isPromoted)
{
ASSERT(localHeap_->GetJSThread()->InRunningState());
auto object = allocator_.Allocate(size);
if (object != 0) {
#ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING

View File

@ -50,11 +50,10 @@ void SharedGC::Initialize()
void SharedGC::Mark()
{
ECMA_BYTRACE_NAME(HITRACE_TAG_ARK, "SharedGC::Mark");
Runtime::GetInstance()->IterateThreadList([&](JSThread *thread) {
Runtime::GetInstance()->GCIterateThreadList([&](JSThread *thread) {
ASSERT(!thread->InRunningState());
auto vm = thread->GetEcmaVM();
if (!vm->IsInitialized()) {
return;
}
vm->GetHeap()->GetSweeper()->EnsureAllTaskFinished();
sHeap_->GetSharedGCMarker()->MarkRoots(MAIN_THREAD_INDEX, vm);
sHeap_->GetSharedGCMarker()->ProcessLocalToShare(MAIN_THREAD_INDEX, const_cast<Heap*>(vm->GetHeap()));
});
@ -79,10 +78,8 @@ void SharedGC::Sweep()
};
Runtime::GetInstance()->GetEcmaStringTable()->SweepWeakReference(gcUpdateWeak);
Runtime::GetInstance()->IterateThreadList([&](JSThread *thread) {
if (!thread->GetEcmaVM()->IsInitialized()) {
return;
}
Runtime::GetInstance()->GCIterateThreadList([&](JSThread *thread) {
ASSERT(!thread->InRunningState());
thread->GetCurrentEcmaContext()->ProcessNativeDelete(gcUpdateWeak);
thread->IterateWeakEcmaGlobalStorage(gcUpdateWeak);
});

View File

@ -50,6 +50,7 @@ uintptr_t SharedSparseSpace::AllocateWithoutGC(size_t size)
uintptr_t SharedSparseSpace::Allocate(JSThread *thread, size_t size, bool allowGC)
{
ASSERT(thread->InRunningState());
uintptr_t object = TryAllocate(size);
CHECK_SOBJECT_AND_INC_OBJ_SIZE(size);
if (sweepState_ == SweepState::SWEEPING) {
@ -301,6 +302,7 @@ bool SharedReadOnlySpace::Expand(JSThread *thread)
uintptr_t SharedReadOnlySpace::Allocate(JSThread *thread, size_t size)
{
ASSERT(thread->InRunningState());
LockHolder holder(allocateLock_);
auto object = allocator_.Allocate(size);
if (object != 0) {
@ -321,6 +323,7 @@ SharedHugeObjectSpace::SharedHugeObjectSpace(BaseHeap *heap, HeapRegionAllocator
uintptr_t SharedHugeObjectSpace::Allocate(JSThread *thread, size_t objectSize)
{
ASSERT(thread->InRunningState());
LockHolder lock(allocateLock_);
// In HugeObject allocation, we have a revervation of 8 bytes for markBitSet in objectSize.
// In case Region is not aligned by 16 bytes, HUGE_OBJECT_BITSET_SIZE is 8 bytes more.

View File

@ -100,6 +100,7 @@ HugeMachineCodeSpace::HugeMachineCodeSpace(Heap *heap, HeapRegionAllocator *heap
uintptr_t HugeObjectSpace::Allocate(size_t objectSize, JSThread *thread)
{
ASSERT(thread->InRunningState());
// In HugeObject allocation, we have a revervation of 8 bytes for markBitSet in objectSize.
// In case Region is not aligned by 16 bytes, HUGE_OBJECT_BITSET_SIZE is 8 bytes more.
size_t alignedSize = AlignUp(objectSize + sizeof(Region) + HUGE_OBJECT_BITSET_SIZE, PANDA_POOL_ALIGNMENT_IN_BYTES);

View File

@ -57,6 +57,7 @@ void SparseSpace::ResetTopPointer(uintptr_t top)
uintptr_t SparseSpace::Allocate(size_t size, bool allowGC)
{
ASSERT(localHeap_->GetJSThread()->InRunningState());
auto object = allocator_->Allocate(size);
CHECK_OBJECT_AND_INC_OBJ_SIZE(size);

View File

@ -183,7 +183,7 @@ void VerifyObjectVisitor::VerifyMarkYoung(TaggedObject *object, ObjectSlot slot,
object, slot, value);
}
if (valueRegion->Test(value) && !(valueRegion->InYoungSpace() || valueRegion->InAppSpawnSpace() ||
valueRegion->InReadOnlySpace())) {
valueRegion->InReadOnlySpace() || valueRegion->InSharedHeap())) {
LogErrorForObjSlot(heap_, "Verify MarkYoung: Marked object, slot marked, but NOT in "
"Young/AppSpawn/ReadOnly Space.", object, slot, value);
}

View File

@ -1266,7 +1266,6 @@ public:
static bool DeleteWorker(EcmaVM *hostVm, EcmaVM *workerVm);
static void GetStackBeforeCallNapiSuccess(EcmaVM *vm, bool &getStackBeforeCallNapiSuccess);
static void GetStackAfterCallNapi(EcmaVM *vm);
static PatchErrorCode LoadPatch(EcmaVM *vm, const std::string &patchFileName, const std::string &baseFileName);
static PatchErrorCode LoadPatch(EcmaVM *vm,
const std::string &patchFileName, const void *patchBuffer, size_t patchSize,
@ -1305,7 +1304,8 @@ public:
int32_t triggerMode)> &cb);
static void SetSearchHapPathTracker(EcmaVM *vm, std::function<bool(const std::string moduleName,
std::string &hapPath)> cb);
// only for test
static void TransitionToWaitForTesting(EcmaVM *vm);
private:
static int vmCount_;
static bool initialize_;

View File

@ -3605,6 +3605,12 @@ void JSNApi::DeleteSerializationData(void *data)
#endif
}
// only for test
void JSNApi::TransitionToWaitForTesting(EcmaVM *vm)
{
vm->GetJSThread()->UpdateState(ecmascript::ThreadState::WAIT);
}
void HostPromiseRejectionTracker(const EcmaVM *vm,
const JSHandle<JSPromise> promise,
const JSHandle<JSTaggedValue> reason,

View File

@ -55,12 +55,15 @@ void Runtime::CreateIfFirstVm(const JSRuntimeOptions &options)
void Runtime::InitializeIfFirstVm(EcmaVM *vm)
{
LockHolder lock(*vmCreationLock_);
if (++vmCount_ == 1) {
PreInitialization(vm);
vm->Initialize();
PostInitialization(vm);
} else {
{
LockHolder lock(*vmCreationLock_);
if (++vmCount_ == 1) {
PreInitialization(vm);
vm->Initialize();
PostInitialization(vm);
}
}
if (!vm->IsInitialized()) {
vm->Initialize();
}
}
@ -80,6 +83,7 @@ void Runtime::PostInitialization(const EcmaVM *vm)
// Use the main thread's globalconst after it has initialized,
// and copy shared parts to other thread's later.
globalConstants_ = mainThread_->GlobalConstants();
globalEnv_ = vm->GetGlobalEnv().GetTaggedValue();
SharedHeap::GetInstance()->PostInitialization(globalConstants_, const_cast<EcmaVM*>(vm)->GetJSOptions());
// [[TODO::DaiHN]] need adding root iterate.
SharedModuleManager::GetInstance()->Initialize(vm);
@ -113,14 +117,14 @@ void Runtime::RegisterThread(JSThread* newThread)
void Runtime::UnregisterThread(JSThread* thread)
{
LockHolder lock(threadsLock_);
ASSERT(thread->GetState() != ThreadState::RUNNING);
ASSERT(!thread->InRunningState());
threads_.remove(thread);
}
void Runtime::SuspendAll(JSThread *current)
{
ASSERT(current != nullptr);
ASSERT(current->GetState() != ThreadState::RUNNING);
ASSERT(!current->InRunningState());
ASSERT(!mutatorLock_.HasLock());
SuspendAllThreadsImpl(current);
mutatorLock_.WriteLock();
@ -129,7 +133,7 @@ void Runtime::SuspendAll(JSThread *current)
void Runtime::ResumeAll(JSThread *current)
{
ASSERT(current != nullptr);
ASSERT(current->GetState() != ThreadState::RUNNING);
ASSERT(!current->InRunningState());
ASSERT(mutatorLock_.HasLock());
mutatorLock_.Unlock();
ResumeAllThreadsImpl(current);

View File

@ -56,11 +56,13 @@ public:
}
template<class Callback>
void IterateThreadList(const Callback &cb)
void GCIterateThreadList(const Callback &cb)
{
LockHolder lock(threadsLock_);
for (auto thread : threads_) {
cb(thread);
if (thread->ReadyForGCIterating()) {
cb(thread);
}
}
}
@ -69,6 +71,11 @@ public:
return globalConstants_;
}
JSTaggedValue GetGlobalEnv() const
{
return globalEnv_;
}
inline EcmaStringTable *GetEcmaStringTable() const
{
return stringTable_.get();
@ -89,6 +96,7 @@ private:
MutatorLock mutatorLock_;
const GlobalEnvConstants *globalConstants_ {nullptr};
JSTaggedValue globalEnv_ {JSTaggedValue::Hole()};
JSThread *mainThread_ {nullptr};
// for shared heap.
std::unique_ptr<NativeAreaAllocator> nativeAreaAllocator_;

View File

@ -19,11 +19,9 @@
namespace panda::ecmascript {
RuntimeLockHolder::RuntimeLockHolder(JSThread *thread, Mutex &mtx)
: thread_(thread), mtx_(mtx), should_lock_(!thread->IsThreadSafe())
: thread_(thread), mtx_(mtx)
{
if (should_lock_) {
ThreadStateTransitionScope ts(thread_, ThreadState::WAIT);
mtx.Lock();
}
ThreadStateTransitionScope ts(thread_, ThreadState::WAIT);
mtx.Lock();
}
} // namespace panda::ecmascript

View File

@ -25,15 +25,12 @@ public:
~RuntimeLockHolder()
{
if (should_lock_) {
mtx_.Unlock();
}
mtx_.Unlock();
}
private:
JSThread *thread_;
Mutex &mtx_;
bool should_lock_ {false};
NO_COPY_SEMANTIC(RuntimeLockHolder);
NO_MOVE_SEMANTIC(RuntimeLockHolder);