mirror of
https://gitee.com/openharmony/arkcompiler_ets_runtime
synced 2024-11-23 01:59:58 +00:00
do not get region of derived pointer
Issue: https://gitee.com/openharmony/arkcompiler_ets_runtime/issues/IB2KJA Signed-off-by: ZhouGuangyuan <zhouguangyuan1@huawei.com> Change-Id: Ia1602a0d0ad2ac71ef00fa561f9b6f61b66bf68d
This commit is contained in:
parent
80d5fba233
commit
94c0895cf0
@ -235,13 +235,13 @@ void BuiltinsArrayStubBuilder::Unshift(GateRef glue, GateRef thisValue, GateRef
|
||||
BRANCH_NO_WEIGHT(isIntOrNumberKind, &isIntOrNumber, &isTagged);
|
||||
Bind(&isIntOrNumber);
|
||||
{
|
||||
ArrayCopy<MustOverlap>(glue, arrayStart, moveTo, TruncInt64ToInt32(thisLen),
|
||||
ArrayCopy<MustOverlap>(glue, arrayStart, elements, moveTo, TruncInt64ToInt32(thisLen),
|
||||
MemoryAttribute::NoBarrier());
|
||||
Jump(&afterCopy);
|
||||
}
|
||||
Bind(&isTagged);
|
||||
{
|
||||
ArrayCopy<MustOverlap>(glue, arrayStart, moveTo, TruncInt64ToInt32(thisLen));
|
||||
ArrayCopy<MustOverlap>(glue, arrayStart, elements, moveTo, TruncInt64ToInt32(thisLen));
|
||||
Jump(&afterCopy);
|
||||
}
|
||||
Bind(&afterCopy);
|
||||
@ -3302,7 +3302,7 @@ void BuiltinsArrayStubBuilder::FastToSpliced(GateRef glue, GateRef thisValue, Ga
|
||||
{
|
||||
GateRef srcStart = GetDataPtrInTaggedArray(srcElements);
|
||||
GateRef dstStart = GetDataPtrInTaggedArray(dstElements);
|
||||
ArrayCopyAndHoleToUndefined(glue, srcStart, dstStart, actualStart);
|
||||
ArrayCopyAndHoleToUndefined(glue, srcStart, dstElements, dstStart, actualStart);
|
||||
Jump(&insertArg);
|
||||
}
|
||||
Bind(&insertArg);
|
||||
@ -3327,7 +3327,7 @@ void BuiltinsArrayStubBuilder::FastToSpliced(GateRef glue, GateRef thisValue, Ga
|
||||
GateRef srcStart = GetDataPtrInTaggedArray(srcElements, oldIndex);
|
||||
GateRef dstStart = GetDataPtrInTaggedArray(dstElements, newIndex);
|
||||
GateRef afterLength = Int32Sub(thisLength, oldIndex);
|
||||
ArrayCopyAndHoleToUndefined(glue, srcStart, dstStart, afterLength);
|
||||
ArrayCopyAndHoleToUndefined(glue, srcStart, dstElements, dstStart, afterLength);
|
||||
newIndex = Int32Add(newIndex, afterLength);
|
||||
Jump(&setLength);
|
||||
}
|
||||
|
@ -897,14 +897,14 @@ GateRef NewObjectStubBuilder::CopyArray(GateRef glue, GateRef elements, GateRef
|
||||
BRANCH(checkIsMutantTaggedArray, ©ToMutantTaggedArray, ©ToTaggedArray);
|
||||
Bind(©ToTaggedArray);
|
||||
{
|
||||
ArrayCopy<NotOverlap>(glue, GetDataPtrInTaggedArray(elements), GetDataPtrInTaggedArray(*array),
|
||||
ArrayCopy<NotOverlap>(glue, GetDataPtrInTaggedArray(elements), *array, GetDataPtrInTaggedArray(*array),
|
||||
newLen);
|
||||
Jump(&afterCopy);
|
||||
}
|
||||
Bind(©ToMutantTaggedArray);
|
||||
{
|
||||
ArrayCopy<NotOverlap>(glue, GetDataPtrInTaggedArray(elements), GetDataPtrInTaggedArray(*array), newLen,
|
||||
MemoryAttribute::NoBarrier());
|
||||
ArrayCopy<NotOverlap>(glue, GetDataPtrInTaggedArray(elements), *array, GetDataPtrInTaggedArray(*array),
|
||||
newLen, MemoryAttribute::NoBarrier());
|
||||
Jump(&afterCopy);
|
||||
}
|
||||
Bind(&afterCopy);
|
||||
|
@ -10506,8 +10506,8 @@ GateRef StubBuilder::GetArgumentsElements(GateRef glue, GateRef argvTaggedArray,
|
||||
using CopyKind = StubBuilder::OverlapKind;
|
||||
|
||||
template <>
|
||||
void StubBuilder::ArrayCopy<CopyKind::NotOverlap>(GateRef glue, GateRef src, GateRef dst, GateRef length,
|
||||
MemoryAttribute mAttr)
|
||||
void StubBuilder::ArrayCopy<CopyKind::NotOverlap>(GateRef glue, GateRef srcAddr, GateRef dstObj, GateRef dstAddr,
|
||||
GateRef length, MemoryAttribute mAttr)
|
||||
{
|
||||
auto env = GetEnvironment();
|
||||
Label entry(env);
|
||||
@ -10520,7 +10520,7 @@ void StubBuilder::ArrayCopy<CopyKind::NotOverlap>(GateRef glue, GateRef src, Gat
|
||||
Label storeHead(env);
|
||||
Label enterLoop(env);
|
||||
DEFVARIABLE(offset, VariableType::INT32(), Int32(0));
|
||||
|
||||
GateRef dstOff = PtrSub(TaggedCastToIntPtr(dstAddr), TaggedCastToIntPtr(dstObj));
|
||||
const auto tSize = static_cast<int32_t>(JSTaggedValue::TaggedTypeSize());
|
||||
static_assert((tSize & (tSize - 1)) == 0 && "TaggedTypeSize must be power of 2");
|
||||
static_assert(LOOP_UNROLL_FACTOR == 2 && "changing LOOP_UNROLL_FACTOR also need fix the logic here");
|
||||
@ -10531,8 +10531,8 @@ void StubBuilder::ArrayCopy<CopyKind::NotOverlap>(GateRef glue, GateRef src, Gat
|
||||
{
|
||||
// Now use 2 as loop unroll factor, so only store once if reminder is not 0.
|
||||
// But if using other loop unroll factor, the store head should also be refactored.
|
||||
GateRef value = Load(VariableType::JS_ANY(), src);
|
||||
Store(VariableType::JS_ANY(), glue, dst, IntPtr(0), value, mAttr);
|
||||
GateRef value = Load(VariableType::JS_ANY(), srcAddr);
|
||||
Store(VariableType::JS_ANY(), glue, dstObj, dstOff, value, mAttr);
|
||||
offset = Int32(tSize);
|
||||
Jump(&enterLoop);
|
||||
}
|
||||
@ -10547,10 +10547,10 @@ void StubBuilder::ArrayCopy<CopyKind::NotOverlap>(GateRef glue, GateRef src, Gat
|
||||
{
|
||||
GateRef off1 = ZExtInt32ToPtr(*offset);
|
||||
GateRef off2 = PtrAdd(off1, IntPtr(tSize));
|
||||
GateRef value1 = Load(VariableType::JS_ANY(), src, off1);
|
||||
GateRef value2 = Load(VariableType::JS_ANY(), src, off2);
|
||||
Store(VariableType::JS_ANY(), glue, dst, off1, value1, mAttr);
|
||||
Store(VariableType::JS_ANY(), glue, dst, off2, value2, mAttr);
|
||||
GateRef value1 = Load(VariableType::JS_ANY(), srcAddr, off1);
|
||||
GateRef value2 = Load(VariableType::JS_ANY(), srcAddr, off2);
|
||||
Store(VariableType::JS_ANY(), glue, dstObj, PtrAdd(dstOff, off1), value1, mAttr);
|
||||
Store(VariableType::JS_ANY(), glue, dstObj, PtrAdd(dstOff, off2), value2, mAttr);
|
||||
offset = Int32Add(*offset, Int32(LOOP_UNROLL_FACTOR * tSize));
|
||||
Jump(&endLoop);
|
||||
}
|
||||
@ -10562,8 +10562,8 @@ void StubBuilder::ArrayCopy<CopyKind::NotOverlap>(GateRef glue, GateRef src, Gat
|
||||
}
|
||||
|
||||
template <>
|
||||
void StubBuilder::ArrayCopy<CopyKind::MustOverlap>(GateRef glue, GateRef src, GateRef dst, GateRef length,
|
||||
MemoryAttribute mAttr)
|
||||
void StubBuilder::ArrayCopy<CopyKind::MustOverlap>(GateRef glue, GateRef srcAddr, GateRef dstObj, GateRef dstAddr,
|
||||
GateRef length, MemoryAttribute mAttr)
|
||||
{
|
||||
auto env = GetEnvironment();
|
||||
Label entry(env);
|
||||
@ -10578,7 +10578,7 @@ void StubBuilder::ArrayCopy<CopyKind::MustOverlap>(GateRef glue, GateRef src, Ga
|
||||
const auto tSize = static_cast<int32_t>(JSTaggedValue::TaggedTypeSize());
|
||||
static_assert((tSize & (tSize - 1)) == 0 && "TaggedTypeSize must be power of 2");
|
||||
static_assert(LOOP_UNROLL_FACTOR == 2 && "changing LOOP_UNROLL_FACTOR also need fix the logic here");
|
||||
|
||||
GateRef dstOff = PtrSub(TaggedCastToIntPtr(dstAddr), TaggedCastToIntPtr(dstObj));
|
||||
DEFVARIABLE(offset, VariableType::INT32(), Int32Mul(length, Int32(tSize)));
|
||||
GateRef remainder = Int32And(length, Int32(LOOP_UNROLL_FACTOR - 1));
|
||||
BRANCH_NO_WEIGHT(Int32NotEqual(remainder, Int32(0)), &storeEnd, &enterLoop);
|
||||
@ -10587,8 +10587,8 @@ void StubBuilder::ArrayCopy<CopyKind::MustOverlap>(GateRef glue, GateRef src, Ga
|
||||
// Now use 2 as loop unroll factor, so only store once if reminder is not 0.
|
||||
// But if using other loop unroll factor, the store head should also be refactored.
|
||||
offset = Int32Sub(*offset, Int32(tSize));
|
||||
GateRef value = Load(VariableType::JS_ANY(), src, ZExtInt32ToPtr(*offset));
|
||||
Store(VariableType::JS_ANY(), glue, dst, ZExtInt32ToPtr(*offset), value, mAttr);
|
||||
GateRef value = Load(VariableType::JS_ANY(), srcAddr, ZExtInt32ToPtr(*offset));
|
||||
Store(VariableType::JS_ANY(), glue, dstObj, PtrAdd(dstOff, *offset), value, mAttr);
|
||||
Jump(&enterLoop);
|
||||
}
|
||||
Bind(&enterLoop);
|
||||
@ -10603,10 +10603,10 @@ void StubBuilder::ArrayCopy<CopyKind::MustOverlap>(GateRef glue, GateRef src, Ga
|
||||
offset = Int32Sub(*offset, Int32(LOOP_UNROLL_FACTOR * tSize));
|
||||
GateRef off1 = ZExtInt32ToPtr(*offset);
|
||||
GateRef off2 = PtrAdd(off1, IntPtr(tSize));
|
||||
GateRef value1 = Load(VariableType::JS_ANY(), src, off1);
|
||||
GateRef value2 = Load(VariableType::JS_ANY(), src, off2);
|
||||
Store(VariableType::JS_ANY(), glue, dst, off1, value1, mAttr);
|
||||
Store(VariableType::JS_ANY(), glue, dst, off2, value2, mAttr);
|
||||
GateRef value1 = Load(VariableType::JS_ANY(), srcAddr, off1);
|
||||
GateRef value2 = Load(VariableType::JS_ANY(), srcAddr, off2);
|
||||
Store(VariableType::JS_ANY(), glue, dstObj, PtrAdd(dstOff,off1), value1, mAttr);
|
||||
Store(VariableType::JS_ANY(), glue, dstObj, PtrAdd(dstOff,off2), value2, mAttr);
|
||||
Jump(&endLoop);
|
||||
}
|
||||
Bind(&endLoop);
|
||||
@ -10617,35 +10617,36 @@ void StubBuilder::ArrayCopy<CopyKind::MustOverlap>(GateRef glue, GateRef src, Ga
|
||||
}
|
||||
|
||||
template <>
|
||||
void StubBuilder::ArrayCopy<CopyKind::Unknown>(GateRef glue, GateRef src, GateRef dst, GateRef length,
|
||||
MemoryAttribute mAttr)
|
||||
void StubBuilder::ArrayCopy<CopyKind::Unknown>(GateRef glue, GateRef srcAddr, GateRef dstObj, GateRef dstAddr,
|
||||
GateRef length, MemoryAttribute mAttr)
|
||||
{
|
||||
auto env = GetEnvironment();
|
||||
Label entry(env);
|
||||
env->SubCfgEntry(&entry);
|
||||
Label exit(env);
|
||||
GateRef needRightToLeft = LogicAndBuilder(env)
|
||||
.And(IntPtrGreaterThan(dst, src))
|
||||
.And(IntPtrGreaterThan(PtrAdd(src, ZExtInt32ToPtr(length)), dst))
|
||||
.And(IntPtrGreaterThan(dstAddr, srcAddr))
|
||||
.And(IntPtrGreaterThan(PtrAdd(srcAddr, ZExtInt32ToPtr(length)), dstAddr))
|
||||
.Done();
|
||||
Label leftToRight(env);
|
||||
Label rightToLeft(env);
|
||||
BRANCH_NO_WEIGHT(needRightToLeft, &rightToLeft, &leftToRight);
|
||||
Bind(&rightToLeft);
|
||||
{
|
||||
ArrayCopy<MustOverlap>(glue, src, dst, length, mAttr);
|
||||
ArrayCopy<MustOverlap>(glue, srcAddr, dstObj, dstAddr, length, mAttr);
|
||||
Jump(&exit);
|
||||
}
|
||||
Bind(&leftToRight);
|
||||
{
|
||||
ArrayCopy<NotOverlap>(glue, src, dst, length, mAttr);
|
||||
ArrayCopy<NotOverlap>(glue, srcAddr, dstObj, dstAddr, length, mAttr);
|
||||
Jump(&exit);
|
||||
}
|
||||
Bind(&exit);
|
||||
env->SubCfgExit();
|
||||
}
|
||||
|
||||
void StubBuilder::ArrayCopyAndHoleToUndefined(GateRef glue, GateRef src, GateRef dst, GateRef length, MemoryAttribute mAttr)
|
||||
void StubBuilder::ArrayCopyAndHoleToUndefined(GateRef glue, GateRef srcAddr, GateRef dstObj, GateRef dstAddr,
|
||||
GateRef length, MemoryAttribute mAttr)
|
||||
{
|
||||
auto env = GetEnvironment();
|
||||
Label entry(env);
|
||||
@ -10654,7 +10655,7 @@ void StubBuilder::ArrayCopyAndHoleToUndefined(GateRef glue, GateRef src, GateRef
|
||||
Label begin(env);
|
||||
Label body(env);
|
||||
Label endLoop(env);
|
||||
|
||||
GateRef dstOff = PtrSub(TaggedCastToIntPtr(dstAddr), TaggedCastToIntPtr(dstObj));
|
||||
DEFVARIABLE(index, VariableType::INT32(), Int32(0));
|
||||
Jump(&begin);
|
||||
LoopBegin(&begin);
|
||||
@ -10663,18 +10664,19 @@ void StubBuilder::ArrayCopyAndHoleToUndefined(GateRef glue, GateRef src, GateRef
|
||||
Bind(&body);
|
||||
{
|
||||
GateRef offset = PtrMul(ZExtInt32ToPtr(*index), IntPtr(JSTaggedValue::TaggedTypeSize()));
|
||||
GateRef value = Load(VariableType::JS_ANY(), src, offset);
|
||||
GateRef value = Load(VariableType::JS_ANY(), srcAddr, offset);
|
||||
|
||||
Label isHole(env);
|
||||
Label isNotHole(env);
|
||||
BRANCH_UNLIKELY(TaggedIsHole(value), &isHole, &isNotHole);
|
||||
Bind(&isHole);
|
||||
{
|
||||
Store(VariableType::JS_ANY(), glue, dst, offset, Undefined(), MemoryAttribute::NoBarrier());
|
||||
Store(VariableType::JS_ANY(), glue, dstObj, PtrAdd(dstOff, offset), Undefined(),
|
||||
MemoryAttribute::NoBarrier());
|
||||
Jump(&endLoop);
|
||||
}
|
||||
Bind(&isNotHole);
|
||||
Store(VariableType::JS_ANY(), glue, dst, offset, value, mAttr);
|
||||
Store(VariableType::JS_ANY(), glue, dstObj, PtrAdd(dstOff, offset), value, mAttr);
|
||||
Jump(&endLoop);
|
||||
}
|
||||
}
|
||||
|
@ -917,8 +917,11 @@ public:
|
||||
GateRef ElementsKindIsNumOrHoleNum(GateRef kind);
|
||||
GateRef ElementsKindIsHeapKind(GateRef kind);
|
||||
GateRef ElementsKindHasHole(GateRef kind);
|
||||
void ArrayCopyAndHoleToUndefined(GateRef glue, GateRef src, GateRef dst, GateRef length,
|
||||
MemoryAttribute mAttr = MemoryAttribute::Default());
|
||||
// dstAddr/srcAddr is the address will be copied to/from.
|
||||
// It can be a derived pointer point to the middle of an object.
|
||||
// Note: dstObj is the object address for dstAddr, it must point to the head of an object.
|
||||
void ArrayCopyAndHoleToUndefined(GateRef glue, GateRef srcAddr, GateRef dstObj, GateRef dstAddr,
|
||||
GateRef length, MemoryAttribute mAttr = MemoryAttribute::Default());
|
||||
void MigrateArrayWithKind(GateRef glue, GateRef object, GateRef oldKind, GateRef newKind);
|
||||
GateRef MigrateFromRawValueToHeapValues(GateRef glue, GateRef object, GateRef needCOW, GateRef isIntKind);
|
||||
GateRef MigrateFromHeapValueToRawValue(GateRef glue, GateRef object, GateRef needCOW, GateRef isIntKind);
|
||||
@ -1048,8 +1051,12 @@ public:
|
||||
// Unknown means all the kinds above are possible, it will select the suitable one in runtime.
|
||||
Unknown,
|
||||
};
|
||||
// dstAddr/srcAddr is the address will be copied to/from.
|
||||
// It can be a derived pointer point to the middle of an object.
|
||||
//
|
||||
// Note: dstObj is the object address for dstAddr, it must point to the head of an object.
|
||||
template <OverlapKind kind>
|
||||
void ArrayCopy(GateRef glue, GateRef src, GateRef dst, GateRef length,
|
||||
void ArrayCopy(GateRef glue, GateRef srcAddr, GateRef dstObj, GateRef dstAddr, GateRef length,
|
||||
MemoryAttribute mAttr = MemoryAttribute::Default());
|
||||
protected:
|
||||
static constexpr int LOOP_UNROLL_FACTOR = 2;
|
||||
|
@ -116,26 +116,27 @@ template <Region::RegionSpaceKind kind>
|
||||
ARK_NOINLINE bool BatchBitSet(const JSThread* thread, Region* objectRegion, JSTaggedValue* dst, size_t count);
|
||||
|
||||
template <bool needWriteBarrier, bool maybeOverlap>
|
||||
void Barriers::CopyObject(const JSThread* thread, JSTaggedValue* dst, JSTaggedValue* src, size_t count)
|
||||
void Barriers::CopyObject(const JSThread *thread, const TaggedObject *dstObj, JSTaggedValue *dstAddr,
|
||||
JSTaggedValue *srcAddr, size_t count)
|
||||
{
|
||||
// NOTE: The logic in CopyObject should be synced with WriteBarrier.
|
||||
// if any new feature/bugfix be added in CopyObject, it should also be added to WriteBarrier.
|
||||
|
||||
// step 1. copy from src to dst directly.
|
||||
CopyObjectPrimitive<maybeOverlap>(dst, src, count);
|
||||
CopyObjectPrimitive<maybeOverlap>(dstAddr, srcAddr, count);
|
||||
if constexpr (!needWriteBarrier) {
|
||||
return;
|
||||
}
|
||||
// step 2. According to object region, update the corresponding bit set batch.
|
||||
Region* objectRegion = Region::ObjectAddressToRange(ToUintPtr(dst));
|
||||
Region* objectRegion = Region::ObjectAddressToRange(ToUintPtr(dstObj));
|
||||
if (!objectRegion->InSharedHeap()) {
|
||||
bool allValueNotHeap = false;
|
||||
if (objectRegion->InYoungSpace()) {
|
||||
allValueNotHeap = BatchBitSet<Region::InYoung>(thread, objectRegion, dst, count);
|
||||
allValueNotHeap = BatchBitSet<Region::InYoung>(thread, objectRegion, dstAddr, count);
|
||||
} else if (objectRegion->InGeneralOldSpace()) {
|
||||
allValueNotHeap = BatchBitSet<Region::InGeneralOld>(thread, objectRegion, dst, count);
|
||||
allValueNotHeap = BatchBitSet<Region::InGeneralOld>(thread, objectRegion, dstAddr, count);
|
||||
} else {
|
||||
allValueNotHeap = BatchBitSet<Region::Other>(thread, objectRegion, dst, count);
|
||||
allValueNotHeap = BatchBitSet<Region::Other>(thread, objectRegion, dstAddr, count);
|
||||
}
|
||||
if (allValueNotHeap) {
|
||||
return;
|
||||
@ -148,14 +149,14 @@ void Barriers::CopyObject(const JSThread* thread, JSTaggedValue* dst, JSTaggedVa
|
||||
return;
|
||||
}
|
||||
for (uint32_t i = 0; i < count; i++) {
|
||||
JSTaggedValue taggedValue = *(dst + i);
|
||||
JSTaggedValue taggedValue = *(dstAddr + i);
|
||||
if (!taggedValue.IsHeapObject()) {
|
||||
continue;
|
||||
}
|
||||
Region* valueRegion = Region::ObjectAddressToRange(taggedValue.GetTaggedObject());
|
||||
ASSERT(!objectRegion->InSharedHeap() || valueRegion->InSharedHeap());
|
||||
if (marking && !valueRegion->InSharedHeap()) {
|
||||
const uintptr_t slotAddr = ToUintPtr(dst) + JSTaggedValue::TaggedTypeSize() * i;
|
||||
const uintptr_t slotAddr = ToUintPtr(dstAddr) + JSTaggedValue::TaggedTypeSize() * i;
|
||||
Barriers::Update(thread, slotAddr, objectRegion, taggedValue.GetTaggedObject(), valueRegion);
|
||||
// NOTE: ConcurrentMarking and SharedConcurrentMarking can be enabled at the same time, but a specific
|
||||
// value can't be "not shared heap" and "in SharedSweepableSpace" at the same time. So using "if - else if"
|
||||
|
@ -48,9 +48,18 @@ public:
|
||||
template<bool needWriteBarrier = true>
|
||||
static void SetObject(const JSThread *thread, void *obj, size_t offset, JSTaggedType value);
|
||||
|
||||
// dstAddr/srcAddr is the address will be copied to/from.
|
||||
// It can be a derived pointer point to the middle of an object.
|
||||
//
|
||||
// Note: dstObj is the object address for dstAddr, it must point to the head of an object.
|
||||
template<bool needWriteBarrier, bool maybeOverlap>
|
||||
static void CopyObject(const JSThread *thread, JSTaggedValue* dst, JSTaggedValue* src, size_t count);
|
||||
static void CopyObject(const JSThread *thread, const TaggedObject *dstObj, JSTaggedValue *dstAddr,
|
||||
JSTaggedValue *srcAddr, size_t count);
|
||||
|
||||
// dstAddr/srcAddr is the address will be copied to/from.
|
||||
// It can be a derived pointer point to the middle of an object.
|
||||
//
|
||||
// Note: dstObj is the object address for dstAddr, it must point to the head of an object.
|
||||
template<bool maybeOverlap>
|
||||
static void CopyObjectPrimitive(JSTaggedValue* dst, JSTaggedValue* src, size_t count);
|
||||
static void SynchronizedSetClass(const JSThread *thread, void *obj, JSTaggedType value);
|
||||
|
@ -74,7 +74,7 @@ inline void TaggedArray::Copy(const JSThread* thread, uint32_t dstStart, uint32_
|
||||
size_t taggedTypeSize = JSTaggedValue::TaggedTypeSize();
|
||||
JSTaggedValue* to = reinterpret_cast<JSTaggedValue*>(ToUintPtr(GetData()) + taggedTypeSize * dstStart);
|
||||
JSTaggedValue* from = reinterpret_cast<JSTaggedValue*>(ToUintPtr(srcArray->GetData()) + taggedTypeSize * srcStart);
|
||||
Barriers::CopyObject<needBarrier, false>(thread, to, from, count);
|
||||
Barriers::CopyObject<needBarrier, false>(thread, this, to, from, count);
|
||||
}
|
||||
} // namespace panda::ecmascript
|
||||
#endif // ECMASCRIPT_TAGGED_ARRAY_INL_H
|
||||
|
@ -49,7 +49,7 @@ HWTEST_F_L0(BarrierTest, YoungToYoungBatchCopy)
|
||||
|
||||
JSTaggedValue* to = reinterpret_cast<JSTaggedValue*>(ToUintPtr(dstArray->GetData()));
|
||||
JSTaggedValue* from = reinterpret_cast<JSTaggedValue*>(ToUintPtr(srcArray->GetData()));
|
||||
Barriers::CopyObject<true, false>(thread, to, from, arrayLength);
|
||||
Barriers::CopyObject<true, false>(thread, *dstArray, to, from, arrayLength);
|
||||
|
||||
// young to young, all the bitset should not be changed.
|
||||
dstRegion->IterateAllNewToEdenBits([&NewToEdenBeforeCopy](void* mem) {
|
||||
@ -112,7 +112,7 @@ HWTEST_F_L0(BarrierTest, BatchCopyNoBarrier)
|
||||
JSTaggedValue* to2 = reinterpret_cast<JSTaggedValue*>(ToUintPtr(dstArray2->GetData()));
|
||||
JSTaggedValue* from2 = reinterpret_cast<JSTaggedValue*>(ToUintPtr(srcArray->GetData()));
|
||||
// barrier should also work for no heap value
|
||||
Barriers::CopyObject<true, false>(thread, to2, from2, arrayLength);
|
||||
Barriers::CopyObject<true, false>(thread, *dstArray, to2, from2, arrayLength);
|
||||
// check
|
||||
for (uint32_t i = 0; i < arrayLength; i++) {
|
||||
EXPECT_EQ(dstArray2->Get(thread, i), srcArray->Get(thread, i));
|
||||
@ -146,7 +146,7 @@ HWTEST_F_L0(BarrierTest, LocalToShareBatchCopy)
|
||||
|
||||
JSTaggedValue* to = reinterpret_cast<JSTaggedValue*>(ToUintPtr(dstArray->GetData()));
|
||||
JSTaggedValue* from = reinterpret_cast<JSTaggedValue*>(ToUintPtr(srcArray->GetData()));
|
||||
Barriers::CopyObject<true, false>(thread, to, from, arrayLength);
|
||||
Barriers::CopyObject<true, false>(thread, *dstArray, to, from, arrayLength);
|
||||
|
||||
std::set<uintptr_t> LocalToShareSlot;
|
||||
for (uint32_t i = 0; i < arrayLength; i++) {
|
||||
@ -202,7 +202,7 @@ HWTEST_F_L0(BarrierTest, LocalToReadOnlyShareBatchCopy)
|
||||
|
||||
JSTaggedValue* to = reinterpret_cast<JSTaggedValue*>(ToUintPtr(dstArray->GetData()));
|
||||
JSTaggedValue* from = reinterpret_cast<JSTaggedValue*>(ToUintPtr(srcArray->GetData()));
|
||||
Barriers::CopyObject<true, false>(thread, to, from, arrayLength);
|
||||
Barriers::CopyObject<true, false>(thread, *dstArray, to, from, arrayLength);
|
||||
|
||||
std::set<uintptr_t> LocalToShareSlot;
|
||||
for (uint32_t i = 0; i < arrayLength; i++) {
|
||||
@ -256,7 +256,7 @@ HWTEST_F_L0(BarrierTest, LocalToShareMixBatchCopy)
|
||||
|
||||
JSTaggedValue* to = reinterpret_cast<JSTaggedValue*>(ToUintPtr(dstArray->GetData()));
|
||||
JSTaggedValue* from = reinterpret_cast<JSTaggedValue*>(ToUintPtr(srcArray->GetData()));
|
||||
Barriers::CopyObject<true, false>(thread, to, from, arrayLength);
|
||||
Barriers::CopyObject<true, false>(thread, *dstArray, to, from, arrayLength);
|
||||
|
||||
std::set<uintptr_t> LocalToShareSlot;
|
||||
for (uint32_t i = 0; i < arrayLength; i++) {
|
||||
@ -324,7 +324,7 @@ HWTEST_F_L0(BarrierTest, OldToNewBatchCopy)
|
||||
|
||||
JSTaggedValue* to = reinterpret_cast<JSTaggedValue*>(ToUintPtr(dstArray->GetData()));
|
||||
JSTaggedValue* from = reinterpret_cast<JSTaggedValue*>(ToUintPtr(srcArray->GetData()));
|
||||
Barriers::CopyObject<true, false>(thread, to, from, arrayLength);
|
||||
Barriers::CopyObject<true, false>(thread, *dstArray, to, from, arrayLength);
|
||||
|
||||
// young to young, all the bitset should not be changed.
|
||||
dstRegion->IterateAllNewToEdenBits([&OldToNewSlot, &OldToNewBeforeCopy, &dstArray, arrayLength](void* mem) {
|
||||
|
Loading…
Reference in New Issue
Block a user