Reason:optimize fastcall

Description:optimize fastcall
Issue:https://gitee.com/openharmony/arkcompiler_ets_runtime/issues/I78093?from=project-issue

Signed-off-by: wupengyong <wupengyong@huawei.com>
Change-Id: I1685c6b1341c82161d18e827f5449f90f7b1b634
This commit is contained in:
wupengyong 2023-06-01 19:50:59 +08:00
parent fe69c3eee3
commit efc0b12047
40 changed files with 834 additions and 495 deletions

View File

@ -355,6 +355,7 @@ void Builtins::Initialize(const JSHandle<GlobalEnv> &env, JSThread *thread, bool
InitializeCjsExports(env);
InitializeCjsRequire(env);
InitializeDefaultExportOfScript(env);
InitializeFunctionHclassForOptimized(env);
JSHandle<JSHClass> generatorFuncClass =
factory_->CreateFunctionClass(FunctionKind::GENERATOR_FUNCTION, JSFunction::SIZE, JSType::JS_GENERATOR_FUNCTION,
env->GetGeneratorFunctionPrototype());
@ -372,6 +373,29 @@ void Builtins::Initialize(const JSHandle<GlobalEnv> &env, JSThread *thread, bool
thread_->ResetGuardians();
}
void Builtins::InitializeFunctionHclassForOptimized(const JSHandle<GlobalEnv> &env) const
{
#define JSFUNCTION_JCLASS_LIST(V) \
V(FunctionClassWithProto, FunctionKind::BASE_CONSTRUCTOR, JSFunction::SIZE, FUNCTION, Function) \
V(FunctionClassWithoutProto, FunctionKind::NORMAL_FUNCTION, JSFunction::SIZE, FUNCTION, Function) \
V(GeneratorFunctionClass, FunctionKind::GENERATOR_FUNCTION, \
JSFunction::SIZE, GENERATOR_FUNCTION, GeneratorFunction) \
V(AsyncFunctionClass, FunctionKind::ASYNC_FUNCTION, JSAsyncFunction::SIZE, ASYNC_FUNCTION, AsyncFunction) \
V(AsyncGeneratorFunctionClass, FunctionKind::ASYNC_GENERATOR_FUNCTION, \
JSFunction::SIZE, ASYNC_GENERATOR_FUNCTION, AsyncGeneratorFunction) \
#define INITIALIZE_FUNCTION_HCLASS_FOR_OPTIMIZED(name, kind, size, type, prototype) \
JSHandle<JSHClass> name##Optimized = factory_->CreateFunctionClass(kind, size, \
JSType::JS_##type, env->Get##prototype##Prototype(), true, false); \
env->Set##name##Optimized(thread_, name##Optimized); \
JSHandle<JSHClass> name##FastCall = factory_->CreateFunctionClass(kind, size, \
JSType::JS_##type, env->Get##prototype##Prototype(), true, true); \
env->Set##name##OptimizedWithFastCall(thread_, name##FastCall);
JSFUNCTION_JCLASS_LIST(INITIALIZE_FUNCTION_HCLASS_FOR_OPTIMIZED)
#undef INITIALIZE_FUNCTION_HCLASS_FOR_OPTIMIZED
#undef JSFUNCTION_JCLASS_LIST
}
void Builtins::SetLazyAccessor(const JSHandle<JSObject> &object, const JSHandle<JSTaggedValue> &key,
const JSHandle<AccessorData> &accessor) const
{

View File

@ -73,6 +73,8 @@ private:
void InitializeGlobalObject(const JSHandle<GlobalEnv> &env, const JSHandle<JSObject> &globalObject);
void InitializeFunctionHclassForOptimized(const JSHandle<GlobalEnv> &env) const;
void InitializeFunction(const JSHandle<GlobalEnv> &env, const JSHandle<JSHClass> &emptyFuncClass) const;
void InitializeObject(const JSHandle<GlobalEnv> &env, const JSHandle<JSObject> &objFuncPrototype,

View File

@ -219,7 +219,8 @@ void AOTFileManager::SetAOTMainFuncEntry(JSHandle<JSFunction> mainFunc, const JS
#endif
}
void AOTFileManager::SetAOTFuncEntry(const JSPandaFile *jsPandaFile, Method *method, uint32_t entryIndex)
void AOTFileManager::SetAOTFuncEntry(const JSPandaFile *jsPandaFile, Method *method,
uint32_t entryIndex, bool *canFastCall)
{
AnFileDataManager *anFileDataManager = AnFileDataManager::GetInstance();
uint32_t anFileInfoIndex = jsPandaFile->GetAOTFileInfoIndex();
@ -235,25 +236,8 @@ void AOTFileManager::SetAOTFuncEntry(const JSPandaFile *jsPandaFile, Method *met
method->SetDeoptThreshold(vm_->GetJSOptions().GetDeoptThreshold());
method->SetCodeEntryAndMarkAOT(codeEntry);
method->SetIsFastCall(entry.isFastCall_);
}
void AOTFileManager::SetAOTFuncEntryForLiteral(const JSPandaFile *jsPandaFile, const TaggedArray *literal,
const AOTLiteralInfo *entryIndexes)
{
size_t elementsLen = literal->GetLength();
JSTaggedValue value = JSTaggedValue::Undefined();
int pos = 0;
for (size_t i = 0; i < elementsLen; i++) {
value = literal->Get(i);
if (value.IsJSFunction()) {
JSTaggedValue index = entryIndexes->Get(pos++);
int entryIndex = index.GetInt();
// -1 : this jsfunction is a large function
if (entryIndex == -1) {
continue;
}
SetAOTFuncEntry(jsPandaFile, JSFunction::Cast(value)->GetCallTarget(), static_cast<uint32_t>(entryIndex));
}
if (canFastCall != nullptr) {
*canFastCall = entry.isFastCall_;
}
}

View File

@ -68,9 +68,8 @@ public:
uint32_t GetAnFileIndex(const JSPandaFile *jsPandaFile) const;
void SetAOTMainFuncEntry(JSHandle<JSFunction> mainFunc, const JSPandaFile *jsPandaFile,
std::string_view entryPoint);
void SetAOTFuncEntry(const JSPandaFile *jsPandaFile, Method *method, uint32_t entryIndex);
void SetAOTFuncEntryForLiteral(const JSPandaFile *jsPandaFile, const TaggedArray *literal,
const AOTLiteralInfo *entryIndexes);
void SetAOTFuncEntry(const JSPandaFile *jsPandaFile, Method *method,
uint32_t entryIndex, bool *canFastCall = nullptr);
bool LoadAiFile([[maybe_unused]] const std::string &filename);
void LoadAiFile(const JSPandaFile *jsPandaFile);
kungfu::ArkStackMapParser* GetStackMapParser() const;

View File

@ -572,25 +572,6 @@ void CircuitBuilder::StoreHClass(GateRef glue, GateRef object, GateRef hClass)
Store(VariableType::JS_POINTER(), glue, object, IntPtr(TaggedObject::HCLASS_OFFSET), hClass);
}
inline GateRef CircuitBuilder::HasAotCode(GateRef method)
{
GateRef callFieldOffset = IntPtr(Method::CALL_FIELD_OFFSET);
GateRef callfield = Load(VariableType::INT64(), method, callFieldOffset);
return Int64NotEqual(
Int64And(
Int64LSR(callfield, Int64(MethodLiteral::IsAotCodeBit::START_BIT)),
Int64((1LU << MethodLiteral::IsAotCodeBit::SIZE) - 1)),
Int64(0));
}
inline GateRef CircuitBuilder::HasAotCodeAndFastCall(GateRef method)
{
GateRef callFieldOffset = IntPtr(Method::CALL_FIELD_OFFSET);
GateRef callfield = Load(VariableType::INT64(), method, callFieldOffset);
return Int64Equal(Int64And(callfield, Int64(Method::AOT_FASTCALL_BITS << MethodLiteral::IsAotCodeBit::START_BIT)),
Int64(Method::AOT_FASTCALL_BITS << MethodLiteral::IsAotCodeBit::START_BIT));
}
inline GateRef CircuitBuilder::IsJSFunction(GateRef obj)
{
GateRef objectType = GetObjectType(LoadHClass(obj));
@ -601,6 +582,40 @@ inline GateRef CircuitBuilder::IsJSFunction(GateRef obj)
return BoolAnd(greater, less);
}
inline GateRef CircuitBuilder::IsJSFunctionWithBit(GateRef obj)
{
GateRef hClass = LoadHClass(obj);
GateRef bitfieldOffset = Int32(JSHClass::BIT_FIELD_OFFSET);
GateRef bitfield = Load(VariableType::INT32(), hClass, bitfieldOffset);
return NotEqual(Int32And(bitfield, Int32(1LU << JSHClass::IsJSFunctionBit::START_BIT)), Int32(0));
}
inline GateRef CircuitBuilder::IsOptimized(GateRef obj)
{
GateRef hClass = LoadHClass(obj);
GateRef bitfieldOffset = Int32(JSHClass::BIT_FIELD_OFFSET);
GateRef bitfield = Load(VariableType::INT32(), hClass, bitfieldOffset);
return NotEqual(Int32And(bitfield, Int32(1LU << JSHClass::IsOptimizedBit::START_BIT)), Int32(0));
}
inline GateRef CircuitBuilder::IsOptimizedWithBitField(GateRef bitfield)
{
return NotEqual(Int32And(bitfield, Int32(1LU << JSHClass::IsOptimizedBit::START_BIT)), Int32(0));
}
inline GateRef CircuitBuilder::CanFastCall(GateRef obj)
{
GateRef hClass = LoadHClass(obj);
GateRef bitfieldOffset = Int32(JSHClass::BIT_FIELD_OFFSET);
GateRef bitfield = Load(VariableType::INT32(), hClass, bitfieldOffset);
return NotEqual(Int32And(bitfield, Int32(1LU << JSHClass::CanFastCallBit::START_BIT)), Int32(0));
}
inline GateRef CircuitBuilder::CanFastCallWithBitField(GateRef bitfield)
{
return NotEqual(Int32And(bitfield, Int32(1LU << JSHClass::CanFastCallBit::START_BIT)), Int32(0));
}
GateRef CircuitBuilder::IsJsType(GateRef obj, JSType type)
{
GateRef objectType = GetObjectType(LoadHClass(obj));
@ -687,6 +702,11 @@ GateRef CircuitBuilder::IsClassConstructor(GateRef object)
Int32(0));
}
GateRef CircuitBuilder::IsClassConstructorWithBitField(GateRef bitfield)
{
return NotEqual(Int32And(bitfield, Int32(1LU << JSHClass::ClassConstructorBit::START_BIT)), Int32(0));
}
GateRef CircuitBuilder::IsConstructor(GateRef object)
{
GateRef hClass = LoadHClass(object);

View File

@ -396,6 +396,19 @@ GateRef CircuitBuilder::CallTargetCheck(GateRef function, GateRef id, GateRef pa
return ret;
}
GateRef CircuitBuilder::JSCallTargetFromDefineFuncCheck(GateType type, GateRef func)
{
auto currentLabel = env_->GetCurrentLabel();
auto currentControl = currentLabel->GetControl();
auto currentDepend = currentLabel->GetDepend();
auto frameState = acc_.FindNearestFrameState(currentDepend);
GateRef ret = GetCircuit()->NewGate(circuit_->JSCallTargetFromDefineFuncCheck(static_cast<size_t>(type.Value())),
MachineType::I1, {currentControl, currentDepend, func, frameState}, GateType::NJSValue());
currentLabel->SetControl(ret);
currentLabel->SetDepend(ret);
return ret;
}
GateRef CircuitBuilder::JSCallTargetTypeCheck(GateType type, GateRef func, GateRef methodIndex)
{
auto currentLabel = env_->GetCurrentLabel();

View File

@ -253,6 +253,7 @@ public:
GateRef ObjectTypeCheck(GateType type, GateRef gate, GateRef hclassOffset);
GateRef TryPrimitiveTypeCheck(GateType type, GateRef gate);
GateRef CallTargetCheck(GateRef function, GateRef id, GateRef param, const char* comment = nullptr);
GateRef JSCallTargetFromDefineFuncCheck(GateType type, GateRef func);
GateRef JSCallTargetTypeCheck(GateType type, GateRef func, GateRef methodIndex);
GateRef JSFastCallTargetTypeCheck(GateType type, GateRef func, GateRef methodIndex);
GateRef JSCallThisTargetTypeCheck(GateType type, GateRef func);
@ -511,9 +512,12 @@ public:
GateRef StoreConstOffset(VariableType type, GateRef receiver, size_t offset, GateRef value);
// Object Operations
inline GateRef LoadHClass(GateRef object);
inline GateRef HasAotCode(GateRef method);
inline GateRef HasAotCodeAndFastCall(GateRef method);
inline GateRef IsJSFunction(GateRef obj);
inline GateRef IsJSFunctionWithBit(GateRef obj);
inline GateRef IsOptimized(GateRef obj);
inline GateRef IsOptimizedWithBitField(GateRef bitfield);
inline GateRef CanFastCall(GateRef obj);
inline GateRef CanFastCallWithBitField(GateRef bitfield);
inline GateRef IsDictionaryMode(GateRef object);
inline void StoreHClass(GateRef glue, GateRef object, GateRef hClass);
inline GateRef IsJsType(GateRef object, JSType type);
@ -526,6 +530,7 @@ public:
inline GateRef DoubleIsINF(GateRef x);
inline GateRef IsDictionaryElement(GateRef hClass);
inline GateRef IsClassConstructor(GateRef object);
inline GateRef IsClassConstructorWithBitField(GateRef bitfield);
inline GateRef IsConstructor(GateRef object);
inline GateRef IsClassPrototype(GateRef object);
inline GateRef IsExtensible(GateRef object);

View File

@ -716,7 +716,9 @@ void JsBoundCallInternalStubBuilder::GenerateCircuit()
Int64((1LU << MethodLiteral::NumArgsBits::SIZE) - 1));
GateRef expectedArgc = Int64Add(expectedNum, Int64(NUM_MANDATORY_JSFUNC_ARGS));
GateRef actualArgc = Int64Sub(argc, IntPtr(NUM_MANDATORY_JSFUNC_ARGS));
Branch(HasAotCodeAndFastCall(method), &methodIsFastCall, &notFastCall);
GateRef hClass = LoadHClass(func);
GateRef bitfield = Load(VariableType::INT32(), hClass, Int32(JSHClass::BIT_FIELD_OFFSET));
Branch(CanFastCallWithBitField(bitfield), &methodIsFastCall, &notFastCall);
Bind(&methodIsFastCall);
{
Branch(Int64LessThanOrEqual(expectedArgc, argc), &fastCall, &fastCallBridge);
@ -801,31 +803,28 @@ void JsProxyCallInternalStubBuilder::GenerateCircuit()
Branch(TaggedIsHeapObject(target), &isHeapObject, &slowPath);
Bind(&isHeapObject);
{
Branch(IsJSFunction(target), &isJsFcuntion, &slowPath);
Bind(&isJsFcuntion);
GateRef hClass = LoadHClass(target);
GateRef bitfield = Load(VariableType::INT32(), hClass, Int32(JSHClass::BIT_FIELD_OFFSET));
Branch(IsClassConstructorFromBitField(bitfield), &slowPath, &notCallConstructor);
Bind(&notCallConstructor);
GateRef actualArgc = Int64Sub(argc, IntPtr(NUM_MANDATORY_JSFUNC_ARGS));
GateRef actualArgv = PtrAdd(argv, IntPtr(NUM_MANDATORY_JSFUNC_ARGS * sizeof(JSTaggedValue)));
Branch(CanFastCallWithBitField(bitfield), &fastCall, &notFastCall);
Bind(&fastCall);
{
Branch(IsClassConstructor(target), &slowPath, &notCallConstructor);
Bind(&notCallConstructor);
GateRef meth = GetMethodFromFunction(target);
GateRef actualArgc = Int64Sub(argc, IntPtr(NUM_MANDATORY_JSFUNC_ARGS));
GateRef actualArgv = PtrAdd(argv, IntPtr(NUM_MANDATORY_JSFUNC_ARGS * sizeof(JSTaggedValue)));
Branch(HasAotCodeAndFastCall(meth), &fastCall, &notFastCall);
Bind(&fastCall);
result = CallNGCRuntime(glue, RTSTUB_ID(JSFastCallWithArgV),
{ glue, target, thisTarget, actualArgc, actualArgv });
Jump(&exit);
}
Bind(&notFastCall);
{
Branch(IsOptimizedWithBitField(bitfield), &slowCall, &slowPath);
Bind(&slowCall);
{
result = CallNGCRuntime(glue, RTSTUB_ID(JSFastCallWithArgV),
{ glue, target, thisTarget, actualArgc, actualArgv });
result = CallNGCRuntime(glue, RTSTUB_ID(JSCallWithArgV),
{ glue, actualArgc, target, newTarget, thisTarget, actualArgv });
Jump(&exit);
}
Bind(&notFastCall);
{
Branch(HasAotCode(meth), &slowCall, &slowPath);
Bind(&slowCall);
{
result = CallNGCRuntime(glue, RTSTUB_ID(JSCallWithArgV),
{ glue, actualArgc, target, newTarget, thisTarget, actualArgv });
Jump(&exit);
}
}
}
}
Bind(&slowPath);
@ -851,30 +850,28 @@ void JsProxyCallInternalStubBuilder::GenerateCircuit()
Branch(TaggedIsHeapObject(method), &isHeapObject1, &slowPath1);
Bind(&isHeapObject1);
{
Branch(IsJSFunction(method), &isJsFcuntion1, &slowPath1);
Bind(&isJsFcuntion1);
GateRef hClass = LoadHClass(method);
GateRef bitfield = Load(VariableType::INT32(), hClass, Int32(JSHClass::BIT_FIELD_OFFSET));
Branch(IsClassConstructor(method), &slowPath1, &notCallConstructor1);
Bind(&notCallConstructor1);
GateRef meth = GetMethodFromFunction(method);
GateRef code = GetAotCodeAddr(meth);
Branch(CanFastCallWithBitField(bitfield), &fastCall1, &notFastCall1);
Bind(&fastCall1);
{
Branch(IsClassConstructor(method), &slowPath1, &notCallConstructor1);
Bind(&notCallConstructor1);
GateRef meth = GetMethodFromFunction(method);
GateRef code = GetAotCodeAddr(meth);
Branch(HasAotCodeAndFastCall(meth), &fastCall1, &notFastCall1);
Bind(&fastCall1);
result = FastCallOptimized(glue, code,
{ glue, method, handler, target, thisTarget, arrHandle });
Jump(&exit);
}
Bind(&notFastCall1);
{
Branch(IsOptimizedWithBitField(bitfield), &slowCall1, &slowPath1);
Bind(&slowCall1);
{
result = FastCallOptimized(glue, code,
{ glue, method, handler, target, thisTarget, arrHandle });
result = CallOptimized(glue, code,
{ glue, numArgs, method, Undefined(), handler, target, thisTarget, arrHandle });
Jump(&exit);
}
Bind(&notFastCall1);
{
Branch(HasAotCode(meth), &slowCall1, &slowPath1);
Bind(&slowCall1);
{
result = CallOptimized(glue, code,
{ glue, numArgs, method, Undefined(), handler, target, thisTarget, arrHandle });
Jump(&exit);
}
}
}
}
Bind(&slowPath1);

View File

@ -179,7 +179,8 @@ GateType GateAccessor::GetParamGateType(GateRef gate) const
GetOpCode(gate) == OpCode::JSCALLTARGET_TYPE_CHECK ||
GetOpCode(gate) == OpCode::JSCALLTHISTARGET_TYPE_CHECK ||
GetOpCode(gate) == OpCode::JSFASTCALLTARGET_TYPE_CHECK ||
GetOpCode(gate) == OpCode::JSFASTCALLTHISTARGET_TYPE_CHECK);
GetOpCode(gate) == OpCode::JSFASTCALLTHISTARGET_TYPE_CHECK ||
GetOpCode(gate) == OpCode::JSCALLTARGET_FROM_DEFINEFUNC_CHECK);
Gate *gatePtr = circuit_->LoadGatePtr(gate);
GateTypeAccessor accessor(gatePtr->GetOneParameterMetaData()->GetValue());
return accessor.GetGateType();

View File

@ -269,20 +269,21 @@ std::string MachineTypeToStr(MachineType machineType);
V(DependSelector, DEPEND_SELECTOR, GateFlags::FIXED, 1, value, 0) \
GATE_META_DATA_LIST_WITH_VALUE_IN(V)
#define GATE_META_DATA_LIST_WITH_GATE_TYPE(V) \
V(PrimitiveTypeCheck, PRIMITIVE_TYPE_CHECK, GateFlags::CHECKABLE, 1, 1, 1) \
V(ObjectTypeCheck, OBJECT_TYPE_CHECK, GateFlags::CHECKABLE, 1, 1, 2) \
V(JSCallTargetTypeCheck, JSCALLTARGET_TYPE_CHECK, GateFlags::CHECKABLE, 1, 1, 2) \
V(JSFastCallTargetTypeCheck, JSFASTCALLTARGET_TYPE_CHECK, GateFlags::CHECKABLE, 1, 1, 2) \
V(JSCallThisTargetTypeCheck, JSCALLTHISTARGET_TYPE_CHECK, GateFlags::CHECKABLE, 1, 1, 1) \
V(JSFastCallThisTargetTypeCheck, JSFASTCALLTHISTARGET_TYPE_CHECK, GateFlags::CHECKABLE, 1, 1, 1) \
V(TypedArrayCheck, TYPED_ARRAY_CHECK, GateFlags::CHECKABLE, 1, 1, 1) \
V(IndexCheck, INDEX_CHECK, GateFlags::CHECKABLE, 1, 1, 2) \
V(TypedUnaryOp, TYPED_UNARY_OP, GateFlags::NO_WRITE, 1, 1, 1) \
V(TypedConditionJump, TYPED_CONDITION_JUMP, GateFlags::NO_WRITE, 1, 1, 1) \
V(TypedConvert, TYPE_CONVERT, GateFlags::NO_WRITE, 1, 1, 1) \
V(CheckAndConvert, CHECK_AND_CONVERT, GateFlags::NO_WRITE, 1, 1, 1) \
V(Convert, CONVERT, GateFlags::NONE_FLAG, 0, 0, 1) \
#define GATE_META_DATA_LIST_WITH_GATE_TYPE(V) \
V(PrimitiveTypeCheck, PRIMITIVE_TYPE_CHECK, GateFlags::CHECKABLE, 1, 1, 1) \
V(ObjectTypeCheck, OBJECT_TYPE_CHECK, GateFlags::CHECKABLE, 1, 1, 2) \
V(JSCallTargetFromDefineFuncCheck, JSCALLTARGET_FROM_DEFINEFUNC_CHECK, GateFlags::CHECKABLE, 1, 1, 1) \
V(JSCallTargetTypeCheck, JSCALLTARGET_TYPE_CHECK, GateFlags::CHECKABLE, 1, 1, 2) \
V(JSFastCallTargetTypeCheck, JSFASTCALLTARGET_TYPE_CHECK, GateFlags::CHECKABLE, 1, 1, 2) \
V(JSCallThisTargetTypeCheck, JSCALLTHISTARGET_TYPE_CHECK, GateFlags::CHECKABLE, 1, 1, 1) \
V(JSFastCallThisTargetTypeCheck, JSFASTCALLTHISTARGET_TYPE_CHECK, GateFlags::CHECKABLE, 1, 1, 1) \
V(TypedArrayCheck, TYPED_ARRAY_CHECK, GateFlags::CHECKABLE, 1, 1, 1) \
V(IndexCheck, INDEX_CHECK, GateFlags::CHECKABLE, 1, 1, 2) \
V(TypedUnaryOp, TYPED_UNARY_OP, GateFlags::NO_WRITE, 1, 1, 1) \
V(TypedConditionJump, TYPED_CONDITION_JUMP, GateFlags::NO_WRITE, 1, 1, 1) \
V(TypedConvert, TYPE_CONVERT, GateFlags::NO_WRITE, 1, 1, 1) \
V(CheckAndConvert, CHECK_AND_CONVERT, GateFlags::NO_WRITE, 1, 1, 1) \
V(Convert, CONVERT, GateFlags::NONE_FLAG, 0, 0, 1) \
V(JSInlineTargetTypeCheck, JSINLINETARGET_TYPE_CHECK, GateFlags::CHECKABLE, 1, 1, 2)
#define GATE_META_DATA_LIST_WITH_VALUE(V) \

View File

@ -2292,6 +2292,11 @@ LLVMValueRef LLVMIRBuilder::ConvertBoolToTaggedBoolean(GateRef gate)
LLVMValueRef LLVMIRBuilder::ConvertInt32ToTaggedInt(GateRef gate)
{
LLVMValueRef value = gate2LValue_[gate];
return ConvertInt32ToTaggedInt(value);
}
LLVMValueRef LLVMIRBuilder::ConvertInt32ToTaggedInt(LLVMValueRef value)
{
LLVMValueRef e1Value = LLVMBuildSExt(builder_, value, LLVMInt64TypeInContext(context_), "");
auto tagMask = LLVMConstInt(LLVMInt64TypeInContext(context_), JSTaggedValue::TAG_INT, 0);
LLVMValueRef result = LLVMBuildOr(builder_, e1Value, tagMask, "");
@ -2335,6 +2340,16 @@ void LLVMIRBuilder::SaveDeoptVregInfo(std::vector<LLVMValueRef> &values, int32_t
values.emplace_back(ConvertToTagged(gate));
}
void LLVMIRBuilder::SaveDeoptVregInfoWithI64(std::vector<LLVMValueRef> &values, int32_t index, size_t curDepth,
size_t shift, GateRef gate)
{
LLVMValueRef value = LLVMBuildIntCast2(builder_, gate2LValue_.at(gate),
LLVMInt32TypeInContext(context_), 1, "");
int32_t encodeIndex = Deoptimizier::EncodeDeoptVregIndex(index, curDepth, shift);
values.emplace_back(LLVMConstInt(LLVMInt32TypeInContext(context_), encodeIndex, false));
values.emplace_back(ConvertInt32ToTaggedInt(value));
}
void LLVMIRBuilder::VisitDeoptCheck(GateRef gate)
{
LLVMValueRef glue = gate2LValue_.at(acc_.GetGlueFromArgList());
@ -2345,7 +2360,8 @@ void LLVMIRBuilder::VisitDeoptCheck(GateRef gate)
params.push_back(glue); // glue
GateRef deoptType = acc_.GetValueIn(gate, 2); // 2: deopt type
uint64_t v = acc_.GetConstantValue(deoptType);
params.push_back(LLVMConstInt(LLVMInt64TypeInContext(context_), v, false)); // deoptType
params.push_back(ConvertInt32ToTaggedInt(LLVMConstInt(LLVMInt32TypeInContext(context_),
static_cast<uint32_t>(v), false))); // deoptType
LLVMValueRef callee = GetExperimentalDeopt(module_);
LLVMTypeRef funcType = GetExperimentalDeoptTy();
@ -2354,7 +2370,8 @@ void LLVMIRBuilder::VisitDeoptCheck(GateRef gate)
if (acc_.GetOpCode(deoptFrameState) == OpCode::FRAME_STATE_CHAIN) {
depth = acc_.GetNumValueIn(deoptFrameState) - 1;
}
params.push_back(LLVMConstInt(LLVMInt64Type(), depth, false));
params.push_back(ConvertInt32ToTaggedInt(LLVMConstInt(LLVMInt32TypeInContext(context_),
static_cast<uint32_t>(depth), false)));
size_t shift = Deoptimizier::ComputeShift(depth);
ArgumentAccessor argAcc(const_cast<Circuit *>(circuit_));
for (size_t curDepth = 0; curDepth <= depth; curDepth++) {
@ -2408,7 +2425,7 @@ void LLVMIRBuilder::VisitDeoptCheck(GateRef gate)
int32_t specThisIndex = static_cast<int32_t>(SpecVregIndex::THIS_OBJECT_INDEX);
SaveDeoptVregInfo(values, specThisIndex, curDepth, shift, thisObj);
int32_t specArgcIndex = static_cast<int32_t>(SpecVregIndex::ACTUAL_ARGC_INDEX);
SaveDeoptVregInfo(values, specArgcIndex, curDepth, shift, actualArgc);
SaveDeoptVregInfoWithI64(values, specArgcIndex, curDepth, shift, actualArgc);
}
LLVMValueRef runtimeCall =
LLVMBuildCall3(builder_, funcType, callee, params.data(), params.size(), "", values.data(), values.size());

View File

@ -408,9 +408,12 @@ private:
LLVMValueRef ConvertToTagged(GateRef gate);
LLVMValueRef ConvertBoolToTaggedBoolean(GateRef gate);
LLVMValueRef ConvertInt32ToTaggedInt(GateRef gate);
LLVMValueRef ConvertInt32ToTaggedInt(LLVMValueRef value);
LLVMValueRef ConvertFloat64ToTaggedDouble(GateRef gate);
void SaveDeoptVregInfo(std::vector<LLVMValueRef> &values, int32_t index, size_t curDepth, size_t shift,
GateRef gate);
void SaveDeoptVregInfoWithI64(std::vector<LLVMValueRef> &values, int32_t index, size_t curDepth, size_t shift,
GateRef gate);
const CompilationConfig *compCfg_ {nullptr};
const std::vector<std::vector<GateRef>> *scheduledGates_ {nullptr};

View File

@ -3037,66 +3037,65 @@ void SlowPathLowering::LowerFastCall(GateRef gate, GateRef glue, GateRef func, G
builder_.Branch(builder_.TaggedIsHeapObject(func), &isHeapObject, &slowPath);
builder_.Bind(&isHeapObject);
{
builder_.Branch(builder_.IsJSFunction(func), &isJsFcuntion, &slowPath);
builder_.Bind(&isJsFcuntion);
{
if (!isNew) {
builder_.Branch(builder_.IsClassConstructor(func), &slowPath, &notCallConstructor);
builder_.Bind(&notCallConstructor);
} else {
builder_.Branch(builder_.IsClassConstructor(func), &isCallConstructor, &slowPath);
builder_.Bind(&isCallConstructor);
}
GateRef hClass = builder_.LoadHClass(func);
GateRef bitfield = builder_.Load(VariableType::INT32(), hClass, builder_.Int32(JSHClass::BIT_FIELD_OFFSET));
if (!isNew) {
builder_.Branch(builder_.IsClassConstructorWithBitField(bitfield), &slowPath, &notCallConstructor);
builder_.Bind(&notCallConstructor);
} else {
builder_.Branch(builder_.IsClassConstructorWithBitField(bitfield), &isCallConstructor, &slowPath);
builder_.Bind(&isCallConstructor);
}
if (!isNew) {
GateRef method = builder_.GetMethodFromFunction(func);
if (!isNew) {
builder_.Branch(builder_.HasAotCodeAndFastCall(method), &fastCall, &notFastCall);
builder_.Bind(&fastCall);
{
GateRef expectedArgc = builder_.Int64Add(builder_.GetExpectedNumOfArgs(method),
builder_.Int64(NUM_MANDATORY_JSFUNC_ARGS));
builder_.Branch(builder_.Int64LessThanOrEqual(expectedArgc, argc), &call, &callBridge);
builder_.Bind(&call);
{
GateRef code = builder_.GetCodeAddr(method);
auto depend = builder_.GetDepend();
const CallSignature *cs = RuntimeStubCSigns::GetOptimizedFastCallSign();
result->WriteVariable(builder_.Call(cs, glue, code, depend, argsFastCall, gate));
builder_.Jump(exit);
}
builder_.Bind(&callBridge);
{
const CallSignature *cs = RuntimeStubCSigns::Get(RTSTUB_ID(OptimizedFastCallAndPushUndefined));
GateRef target = builder_.IntPtr(RTSTUB_ID(OptimizedFastCallAndPushUndefined));
auto depend = builder_.GetDepend();
result->WriteVariable(builder_.Call(cs, glue, target, depend, args, gate));
builder_.Jump(exit);
}
}
builder_.Bind(&notFastCall);
}
builder_.Branch(builder_.HasAotCode(method), &slowCall, &slowPath);
builder_.Bind(&slowCall);
builder_.Branch(builder_.CanFastCallWithBitField(bitfield), &fastCall, &notFastCall);
builder_.Bind(&fastCall);
{
GateRef expectedArgc = builder_.Int64Add(builder_.GetExpectedNumOfArgs(method),
builder_.Int64(NUM_MANDATORY_JSFUNC_ARGS));
builder_.Branch(builder_.Int64LessThanOrEqual(expectedArgc, argc), &call1, &callBridge1);
builder_.Bind(&call1);
builder_.Branch(builder_.Int64LessThanOrEqual(expectedArgc, argc), &call, &callBridge);
builder_.Bind(&call);
{
GateRef code = builder_.GetCodeAddr(method);
auto depend = builder_.GetDepend();
const CallSignature *cs = RuntimeStubCSigns::GetOptimizedCallSign();
result->WriteVariable(builder_.Call(cs, glue, code, depend, args, gate));
const CallSignature *cs = RuntimeStubCSigns::GetOptimizedFastCallSign();
result->WriteVariable(builder_.Call(cs, glue, code, depend, argsFastCall, gate));
builder_.Jump(exit);
}
builder_.Bind(&callBridge1);
builder_.Bind(&callBridge);
{
const CallSignature *cs = RuntimeStubCSigns::Get(RTSTUB_ID(OptimizedCallAndPushUndefined));
GateRef target = builder_.IntPtr(RTSTUB_ID(OptimizedCallAndPushUndefined));
const CallSignature *cs = RuntimeStubCSigns::Get(RTSTUB_ID(OptimizedFastCallAndPushUndefined));
GateRef target = builder_.IntPtr(RTSTUB_ID(OptimizedFastCallAndPushUndefined));
auto depend = builder_.GetDepend();
result->WriteVariable(builder_.Call(cs, glue, target, depend, args, gate));
builder_.Jump(exit);
}
}
builder_.Bind(&notFastCall);
}
builder_.Branch(builder_.IsOptimizedWithBitField(bitfield), &slowCall, &slowPath);
builder_.Bind(&slowCall);
{
GateRef method = builder_.GetMethodFromFunction(func);
GateRef expectedArgc = builder_.Int64Add(builder_.GetExpectedNumOfArgs(method),
builder_.Int64(NUM_MANDATORY_JSFUNC_ARGS));
builder_.Branch(builder_.Int64LessThanOrEqual(expectedArgc, argc), &call1, &callBridge1);
builder_.Bind(&call1);
{
GateRef code = builder_.GetCodeAddr(method);
auto depend = builder_.GetDepend();
const CallSignature *cs = RuntimeStubCSigns::GetOptimizedCallSign();
result->WriteVariable(builder_.Call(cs, glue, code, depend, args, gate));
builder_.Jump(exit);
}
builder_.Bind(&callBridge1);
{
const CallSignature *cs = RuntimeStubCSigns::Get(RTSTUB_ID(OptimizedCallAndPushUndefined));
GateRef target = builder_.IntPtr(RTSTUB_ID(OptimizedCallAndPushUndefined));
auto depend = builder_.GetDepend();
result->WriteVariable(builder_.Call(cs, glue, target, depend, args, gate));
builder_.Jump(exit);
}
}
}
builder_.Bind(&slowPath);
@ -3116,6 +3115,7 @@ void SlowPathLowering::LowerFastCall(GateRef gate, GateRef glue, GateRef func, G
}
}
}
void SlowPathLowering::LowerTypedCall(GateRef gate)
{
Environment env(gate, circuit_, &builder_);

View File

@ -2151,20 +2151,14 @@ inline GateRef StubBuilder::IsNativeMethod(GateRef method)
Int64(0));
}
inline GateRef StubBuilder::HasAotCode(GateRef method)
inline GateRef StubBuilder::IsOptimizedWithBitField(GateRef bitfield)
{
GateRef callFieldOffset = IntPtr(Method::CALL_FIELD_OFFSET);
GateRef callfield = Load(VariableType::INT64(), method, callFieldOffset);
return Int64NotEqual(
Int64And(
Int64LSR(callfield, Int64(MethodLiteral::IsAotCodeBit::START_BIT)),
Int64((1LU << MethodLiteral::IsAotCodeBit::SIZE) - 1)),
Int64(0));
return env_->GetBuilder()->IsOptimizedWithBitField(bitfield);
}
inline GateRef StubBuilder::HasAotCodeAndFastCall(GateRef method)
inline GateRef StubBuilder::CanFastCallWithBitField(GateRef bitfield)
{
return env_->GetBuilder()->HasAotCodeAndFastCall(method);
return env_->GetBuilder()->CanFastCallWithBitField(bitfield);
}
inline GateRef StubBuilder::GetExpectedNumOfArgs(GateRef method)

View File

@ -4852,19 +4852,17 @@ GateRef StubBuilder::JSCallDispatch(GateRef glue, GateRef func, GateRef actualNu
// save pc
SavePcIfNeeded(glue);
GateRef bitfield = 0;
if (!AssemblerModule::IsCallNew(mode)) {
Branch(TaggedIsHeapObject(func), &funcIsHeapObject, &funcNotCallable);
Bind(&funcIsHeapObject);
GateRef hclass = LoadHClass(func);
bitfield = Load(VariableType::INT32(), hclass, IntPtr(JSHClass::BIT_FIELD_OFFSET));
Branch(IsCallableFromBitField(bitfield), &funcIsCallable, &funcNotCallable);
Bind(&funcNotCallable);
{
CallRuntime(glue, RTSTUB_ID(ThrowNotCallableException), {});
Jump(&exit);
}
Bind(&funcIsCallable);
Branch(TaggedIsHeapObject(func), &funcIsHeapObject, &funcNotCallable);
Bind(&funcIsHeapObject);
GateRef hclass = LoadHClass(func);
bitfield = Load(VariableType::INT32(), hclass, IntPtr(JSHClass::BIT_FIELD_OFFSET));
Branch(IsCallableFromBitField(bitfield), &funcIsCallable, &funcNotCallable);
Bind(&funcNotCallable);
{
CallRuntime(glue, RTSTUB_ID(ThrowNotCallableException), {});
Jump(&exit);
}
Bind(&funcIsCallable);
GateRef method = GetMethodFromJSFunction(func);
GateRef callField = GetCallFieldFromMethod(method);
GateRef isNativeMask = Int64(static_cast<uint64_t>(1) << MethodLiteral::IsNativeBit::START_BIT);
@ -5000,9 +4998,7 @@ GateRef StubBuilder::JSCallDispatch(GateRef glue, GateRef func, GateRef actualNu
}
Bind(&funcNotClassConstructor);
} else {
GateRef hclass = LoadHClass(func);
GateRef bitfield1 = Load(VariableType::INT32(), hclass, IntPtr(JSHClass::BIT_FIELD_OFFSET));
Branch(IsClassConstructorFromBitField(bitfield1), &funcIsClassConstructor, &methodNotAot);
Branch(IsClassConstructorFromBitField(bitfield), &funcIsClassConstructor, &methodNotAot);
Bind(&funcIsClassConstructor);
}
GateRef sp = 0;
@ -5020,8 +5016,7 @@ GateRef StubBuilder::JSCallDispatch(GateRef glue, GateRef func, GateRef actualNu
GateRef newTarget = Undefined();
GateRef thisValue = Undefined();
GateRef realNumArgs = Int64Add(ZExtInt32ToInt64(actualNumArgs), Int64(NUM_MANDATORY_JSFUNC_ARGS));
GateRef isFastMask = Int64(0x5LL << MethodLiteral::IsAotCodeBit::START_BIT);
Branch(Int64Equal(Int64And(callField, isFastMask), isFastMask), &methodIsFastCall, &methodNotFastCall);
Branch(CanFastCallWithBitField(bitfield), &methodIsFastCall, &methodNotFastCall);
Bind(&methodIsFastCall);
{
GateRef expectedNum = Int64And(Int64LSR(callField, Int64(MethodLiteral::NumArgsBits::START_BIT)),
@ -5180,8 +5175,7 @@ GateRef StubBuilder::JSCallDispatch(GateRef glue, GateRef func, GateRef actualNu
}
Bind(&methodNotFastCall);
GateRef isAotMask = Int64(static_cast<uint64_t>(1) << MethodLiteral::IsAotCodeBit::START_BIT);
Branch(Int64Equal(Int64And(callField, isAotMask), Int64(0)), &methodNotAot, &methodisAot);
Branch(IsOptimizedWithBitField(bitfield), &methodisAot, &methodNotAot);
Bind(&methodisAot);
{
GateRef expectedNum = Int64And(Int64LSR(callField, Int64(MethodLiteral::NumArgsBits::START_BIT)),

View File

@ -602,8 +602,8 @@ public:
GateRef IsBoundFunction(GateRef obj);
GateRef GetMethodFromJSFunction(GateRef jsfunc);
GateRef IsNativeMethod(GateRef method);
GateRef HasAotCode(GateRef method);
GateRef HasAotCodeAndFastCall(GateRef method);
GateRef IsOptimizedWithBitField(GateRef bitfield);
GateRef CanFastCallWithBitField(GateRef bitfield);
GateRef GetExpectedNumOfArgs(GateRef method);
GateRef GetMethod(GateRef glue, GateRef obj, GateRef key, GateRef profileTypeInfo, GateRef slotId);
// proxy operator

View File

@ -651,10 +651,7 @@ void OptimizedCall::JSBoundFunctionCallInternal(ExtendedAssembler *assembler, Re
__ Ldr(hclass, MemoryOperand(boundTarget, 0));
__ Ldr(hclass, MemoryOperand(hclass, JSHClass::BIT_FIELD_OFFSET));
__ Tbnz(hclass, JSHClass::ClassConstructorBit::START_BIT, &slowCall);
Register callField(X9);
__ Ldr(Register(X8), MemoryOperand(boundTarget, JSFunction::METHOD_OFFSET));
__ Ldr(callField, MemoryOperand(Register(X8), Method::CALL_FIELD_OFFSET));
__ Tbz(callField, MethodLiteral::IsAotCodeBit::START_BIT, &slowCall);
__ Tbz(hclass, JSHClass::IsOptimizedBit::START_BIT, &slowCall);
__ Bind(&aotCall);
{
// output: glue:x0 argc:x1 calltarget:x2 argv:x3 this:x4 newtarget:x5

View File

@ -638,15 +638,11 @@ void OptimizedCall::JSBoundFunctionCallInternal(ExtendedAssembler *assembler, Re
JSCallCheck(assembler, rax, &slowCall, &slowCall, &isJsFunc); // jsfunc -> rsi hclassfiled -> rax
__ Jmp(&slowCall);
Register jsfunc = rsi;
Register methodCallField = rcx;
Register method = rdx;
__ Bind(&isJsFunc);
{
__ Btq(JSHClass::ClassConstructorBit::START_BIT, rax); // is CallConstructor
__ Jb(&slowCall);
__ Mov(Operand(rsi, JSFunctionBase::METHOD_OFFSET), method); // get method
__ Mov(Operand(method, Method::CALL_FIELD_OFFSET), methodCallField); // get call field
__ Btq(MethodLiteral::IsAotCodeBit::START_BIT, methodCallField); // is aot
__ Btq(JSHClass::IsOptimizedBit::START_BIT, rax); // is aot
__ Jnb(&slowCall);
__ Bind(&aotCall);
{

View File

@ -859,14 +859,29 @@ void TSHCRLowering::CheckCallTargetAndLowerCall(GateRef gate, GateRef func, Glob
acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), result);
}
} else {
auto op = acc_.GetOpCode(func);
if (!tsManager_->FastCallFlagIsVaild(funcGt)) {
return;
}
if (op == OpCode::JS_BYTECODE && (acc_.GetByteCodeOpcode(func) == EcmaOpcode::DEFINEFUNC_IMM8_ID16_IMM8 ||
acc_.GetByteCodeOpcode(func) == EcmaOpcode::DEFINEFUNC_IMM16_ID16_IMM8)) {
if (tsManager_->CanFastCall(funcGt)) {
builder_.JSCallTargetFromDefineFuncCheck(funcType, func);
GateRef result = builder_.TypedFastCall(gate, argsFastCall);
acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), result);
} else {
builder_.JSCallTargetFromDefineFuncCheck(funcType, func);
GateRef result = builder_.TypedCall(gate, args);
acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), result);
}
return;
}
int methodIndex = tsManager_->GetMethodIndex(funcGt);
if (!tsManager_->MethodOffsetIsVaild(funcGt) || !tsManager_->FastCallFlagIsVaild(funcGt)
|| methodIndex == -1) {
if (!tsManager_->MethodOffsetIsVaild(funcGt) || methodIndex == -1) {
return;
}
if (tsManager_->CanFastCall(funcGt)) {
builder_.JSFastCallTargetTypeCheck(funcType, func, builder_.IntPtr(methodIndex));
GateRef result = builder_.TypedFastCall(gate, argsFastCall);
acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), result);
} else {
@ -884,18 +899,9 @@ void TSHCRLowering::LowerTypedCallArg0(GateRef gate)
if (!tsManager_->IsFunctionTypeKind(funcType)) {
return;
}
GlobalTSTypeRef funcGt = funcType.GetGTRef();
uint32_t len = tsManager_->GetFunctionTypeLength(funcGt);
if (len != 0) {
return;
}
GateRef actualArgc = builder_.Int64(BytecodeCallArgc::ComputeCallArgc(acc_.GetNumValueIn(gate),
EcmaOpcode::CALLARG0_IMM8));
GateRef newTarget = builder_.Undefined();
GateRef thisObj = builder_.Undefined();
std::vector<GateRef> argsFastCall { glue_, func, thisObj };
std::vector<GateRef> args { glue_, actualArgc, func, newTarget, thisObj };
CheckCallTargetAndLowerCall(gate, func, funcGt, funcType, args, argsFastCall);
LowerTypedCall(gate, func, actualArgc, funcType, 0);
}
void TSHCRLowering::LowerTypedCallArg1(GateRef gate)
@ -905,11 +911,6 @@ void TSHCRLowering::LowerTypedCallArg1(GateRef gate)
if (!tsManager_->IsFunctionTypeKind(funcType)) {
return;
}
GlobalTSTypeRef funcGt = funcType.GetGTRef();
uint32_t len = tsManager_->GetFunctionTypeLength(funcGt);
if (len != 1) { // 1: 1 params
return;
}
GateRef a0Value = acc_.GetValueIn(gate, 0);
GateType a0Type = acc_.GetGateType(a0Value);
BuiltinsStubCSigns::ID id = GetBuiltinId(func);
@ -919,11 +920,7 @@ void TSHCRLowering::LowerTypedCallArg1(GateRef gate)
} else {
GateRef actualArgc = builder_.Int64(BytecodeCallArgc::ComputeCallArgc(acc_.GetNumValueIn(gate),
EcmaOpcode::CALLARG1_IMM8_V8));
GateRef newTarget = builder_.Undefined();
GateRef thisObj = builder_.Undefined();
std::vector<GateRef> argsFastCall { glue_, func, thisObj, a0Value };
std::vector<GateRef> args { glue_, actualArgc, func, newTarget, thisObj, a0Value };
CheckCallTargetAndLowerCall(gate, func, funcGt, funcType, args, argsFastCall);
LowerTypedCall(gate, func, actualArgc, funcType, 1);
}
}
@ -934,20 +931,9 @@ void TSHCRLowering::LowerTypedCallArg2(GateRef gate)
if (!tsManager_->IsFunctionTypeKind(funcType)) {
return;
}
GlobalTSTypeRef funcGt = funcType.GetGTRef();
uint32_t len = tsManager_->GetFunctionTypeLength(funcGt);
if (len != 2) { // 2: 2 params
return;
}
GateRef actualArgc = builder_.Int64(BytecodeCallArgc::ComputeCallArgc(acc_.GetNumValueIn(gate),
EcmaOpcode::CALLARGS2_IMM8_V8_V8));
GateRef newTarget = builder_.Undefined();
GateRef thisObj = builder_.Undefined();
GateRef a0 = acc_.GetValueIn(gate, 0);
GateRef a1 = acc_.GetValueIn(gate, 1); // 1:first parameter
std::vector<GateRef> argsFastCall { glue_, func, thisObj, a0, a1 };
std::vector<GateRef> args { glue_, actualArgc, func, newTarget, thisObj, a0, a1 };
CheckCallTargetAndLowerCall(gate, func, funcGt, funcType, args, argsFastCall);
LowerTypedCall(gate, func, actualArgc, funcType, 2); // 2: 2 params
}
void TSHCRLowering::LowerTypedCallArg3(GateRef gate)
@ -957,21 +943,9 @@ void TSHCRLowering::LowerTypedCallArg3(GateRef gate)
if (!tsManager_->IsFunctionTypeKind(funcType)) {
return;
}
GlobalTSTypeRef funcGt = funcType.GetGTRef();
uint32_t len = tsManager_->GetFunctionTypeLength(funcGt);
if (len != 3) { // 3: 3 params
return;
}
GateRef actualArgc = builder_.Int64(BytecodeCallArgc::ComputeCallArgc(acc_.GetNumValueIn(gate),
EcmaOpcode::CALLARGS3_IMM8_V8_V8_V8));
GateRef newTarget = builder_.Undefined();
GateRef thisObj = builder_.Undefined();
GateRef a0 = acc_.GetValueIn(gate, 0);
GateRef a1 = acc_.GetValueIn(gate, 1);
GateRef a2 = acc_.GetValueIn(gate, 2);
std::vector<GateRef> argsFastCall { glue_, func, thisObj, a0, a1, a2 };
std::vector<GateRef> args { glue_, actualArgc, func, newTarget, thisObj, a0, a1, a2 };
CheckCallTargetAndLowerCall(gate, func, funcGt, funcType, args, argsFastCall);
LowerTypedCall(gate, func, actualArgc, funcType, 3); // 3: 3 params
}
void TSHCRLowering::LowerTypedCallrange(GateRef gate)
@ -988,28 +962,27 @@ void TSHCRLowering::LowerTypedCallrange(GateRef gate)
if (!tsManager_->IsFunctionTypeKind(funcType)) {
return;
}
LowerTypedCall(gate, func, actualArgc, funcType, argc);
}
void TSHCRLowering::LowerTypedCall(GateRef gate, GateRef func, GateRef actualArgc, GateType funcType, uint32_t argc)
{
GlobalTSTypeRef funcGt = funcType.GetGTRef();
uint32_t len = tsManager_->GetFunctionTypeLength(funcGt);
if (len != static_cast<uint32_t>(argc)) {
return;
}
GateRef newTarget = builder_.Undefined();
GateRef thisObj = builder_.Undefined();
vec.emplace_back(glue_);
vec.emplace_back(actualArgc);
vec.emplace_back(func);
vec.emplace_back(newTarget);
vec.emplace_back(thisObj);
for (size_t i = 0; i < argc; i++) {
vec.emplace_back(acc_.GetValueIn(gate, i));
std::vector<GateRef> argsFastCall { glue_, func, thisObj};
std::vector<GateRef> args { glue_, actualArgc, func, newTarget, thisObj };
for (uint32_t i = 0; i < argc; i++) {
GateRef value = acc_.GetValueIn(gate, i);
argsFastCall.emplace_back(value);
args.emplace_back(value);
}
vec1.emplace_back(glue_);
vec1.emplace_back(func);
vec1.emplace_back(thisObj);
for (size_t i = 0; i < argc; i++) {
vec1.emplace_back(acc_.GetValueIn(gate, i));
for (uint32_t i = argc; i < len; i++) {
argsFastCall.emplace_back(builder_.Undefined());
args.emplace_back(builder_.Undefined());
}
CheckCallTargetAndLowerCall(gate, func, funcGt, funcType, vec, vec1);
CheckCallTargetAndLowerCall(gate, func, funcGt, funcType, args, argsFastCall);
}
bool TSHCRLowering::IsLoadVtable(GateRef func)
@ -1021,17 +994,12 @@ bool TSHCRLowering::IsLoadVtable(GateRef func)
return true;
}
bool TSHCRLowering::CanOptimizeAsFastCall(GateRef func, uint32_t len)
bool TSHCRLowering::CanOptimizeAsFastCall(GateRef func)
{
GateType funcType = acc_.GetGateType(func);
if (!tsManager_->IsFunctionTypeKind(funcType)) {
return false;
}
GlobalTSTypeRef funcGt = funcType.GetGTRef();
uint32_t length = tsManager_->GetFunctionTypeLength(funcGt);
if (len != length) {
return false;
}
auto op = acc_.GetOpCode(func);
if (op != OpCode::LOAD_PROPERTY || !acc_.IsVtable(func)) {
return false;
@ -1061,27 +1029,18 @@ void TSHCRLowering::LowerTypedCallthis0(GateRef gate)
// 2: number of value inputs
ASSERT(acc_.GetNumValueIn(gate) == 2);
GateRef func = acc_.GetValueIn(gate, 1);
if (!CanOptimizeAsFastCall(func, 0)) {
if (!CanOptimizeAsFastCall(func)) {
return;
}
GateRef actualArgc = builder_.Int64(BytecodeCallArgc::ComputeCallArgc(acc_.GetNumValueIn(gate),
EcmaOpcode::CALLTHIS0_IMM8_V8));
GateRef newTarget = builder_.Undefined();
GateRef thisObj = acc_.GetValueIn(gate, 0);
GateType funcType = acc_.GetGateType(func);
GlobalTSTypeRef funcGt = funcType.GetGTRef();
std::vector<GateRef> argsFastCall { glue_, func, thisObj };
std::vector<GateRef> args { glue_, actualArgc, func, newTarget, thisObj };
CheckThisCallTargetAndLowerCall(gate, func, funcGt, funcType, args, argsFastCall);
LowerTypedThisCall(gate, func, actualArgc, 1);
}
void TSHCRLowering::LowerTypedCallthis1(GateRef gate)
{
// 3: number of value inputs
ASSERT(acc_.GetNumValueIn(gate) == 3);
GateRef thisObj = acc_.GetValueIn(gate, 0);
GateRef a0 = acc_.GetValueIn(gate, 1); // 1:parameter index
GateType a0Type = acc_.GetGateType(a0);
GateRef func = acc_.GetValueIn(gate, 2); // 2:function
@ -1090,17 +1049,12 @@ void TSHCRLowering::LowerTypedCallthis1(GateRef gate)
AddProfiling(gate);
SpeculateCallBuiltin(gate, func, a0, id);
} else {
if (!CanOptimizeAsFastCall(func, 1)) {
if (!CanOptimizeAsFastCall(func)) {
return;
}
GateType funcType = acc_.GetGateType(func);
GateRef actualArgc = builder_.Int64(BytecodeCallArgc::ComputeCallArgc(acc_.GetNumValueIn(gate),
EcmaOpcode::CALLTHIS1_IMM8_V8_V8));
GateRef newTarget = builder_.Undefined();
GlobalTSTypeRef funcGt = funcType.GetGTRef();
std::vector<GateRef> argsFastCall { glue_, func, thisObj, a0 };
std::vector<GateRef> args { glue_, actualArgc, func, newTarget, thisObj, a0 };
CheckThisCallTargetAndLowerCall(gate, func, funcGt, funcType, args, argsFastCall);
LowerTypedThisCall(gate, func, actualArgc, 1);
}
}
@ -1109,22 +1063,12 @@ void TSHCRLowering::LowerTypedCallthis2(GateRef gate)
// 4: number of value inputs
ASSERT(acc_.GetNumValueIn(gate) == 4);
GateRef func = acc_.GetValueIn(gate, 3); // 3: func
if (!CanOptimizeAsFastCall(func, 2)) { // 2: 2 params
if (!CanOptimizeAsFastCall(func)) {
return;
}
GateType funcType = acc_.GetGateType(func);
GateRef actualArgc = builder_.Int64(BytecodeCallArgc::ComputeCallArgc(acc_.GetNumValueIn(gate),
EcmaOpcode::CALLTHIS2_IMM8_V8_V8_V8));
GateRef newTarget = builder_.Undefined();
GateRef thisObj = acc_.GetValueIn(gate, 0);
GateRef a0Value = acc_.GetValueIn(gate, 1);
GateRef a1Value = acc_.GetValueIn(gate, 2);
GlobalTSTypeRef funcGt = funcType.GetGTRef();
std::vector<GateRef> argsFastCall { glue_, func, thisObj, a0Value, a1Value };
std::vector<GateRef> args { glue_, actualArgc, func, newTarget, thisObj, a0Value, a1Value };
CheckThisCallTargetAndLowerCall(gate, func, funcGt, funcType, args, argsFastCall);
LowerTypedThisCall(gate, func, actualArgc, 2); // 2: 2 params
}
void TSHCRLowering::LowerTypedCallthis3(GateRef gate)
@ -1132,28 +1076,38 @@ void TSHCRLowering::LowerTypedCallthis3(GateRef gate)
// 5: number of value inputs
ASSERT(acc_.GetNumValueIn(gate) == 5);
GateRef func = acc_.GetValueIn(gate, 4); // 4: func
if (!CanOptimizeAsFastCall(func, 3)) { // 3: 3 params
if (!CanOptimizeAsFastCall(func)) {
return;
}
GateType funcType = acc_.GetGateType(func);
GateRef actualArgc = builder_.Int64(BytecodeCallArgc::ComputeCallArgc(acc_.GetNumValueIn(gate),
EcmaOpcode::CALLTHIS3_IMM8_V8_V8_V8_V8));
LowerTypedThisCall(gate, func, actualArgc, 3); // 3: 3 params
}
void TSHCRLowering::LowerTypedThisCall(GateRef gate, GateRef func, GateRef actualArgc, uint32_t argc)
{
GateType funcType = acc_.GetGateType(func);
GlobalTSTypeRef funcGt = funcType.GetGTRef();
uint32_t len = tsManager_->GetFunctionTypeLength(funcGt);
GateRef newTarget = builder_.Undefined();
GateRef thisObj = acc_.GetValueIn(gate, 0);
GateRef a0Value = acc_.GetValueIn(gate, 1);
GateRef a1Value = acc_.GetValueIn(gate, 2);
GateRef a2Value = acc_.GetValueIn(gate, 3);
GlobalTSTypeRef funcGt = funcType.GetGTRef();
std::vector<GateRef> argsFastCall { glue_, func, thisObj, a0Value, a1Value, a2Value };
std::vector<GateRef> args { glue_, actualArgc, func, newTarget, thisObj, a0Value, a1Value, a2Value };
std::vector<GateRef> argsFastCall { glue_, func, thisObj};
std::vector<GateRef> args { glue_, actualArgc, func, newTarget, thisObj };
for (uint32_t i = 0; i < argc; i++) {
GateRef value = acc_.GetValueIn(gate, i + 1);
argsFastCall.emplace_back(value);
args.emplace_back(value);
}
for (uint32_t i = argc; i < len; i++) {
argsFastCall.emplace_back(builder_.Undefined());
args.emplace_back(builder_.Undefined());
}
CheckThisCallTargetAndLowerCall(gate, func, funcGt, funcType, args, argsFastCall);
}
void TSHCRLowering::LowerTypedCallthisrange(GateRef gate)
{
std::vector<GateRef> vec;
// this
size_t fixedInputsNum = 1;
ASSERT(acc_.GetNumValueIn(gate) - fixedInputsNum >= 0);
@ -1162,42 +1116,10 @@ void TSHCRLowering::LowerTypedCallthisrange(GateRef gate)
EcmaOpcode::CALLTHISRANGE_IMM8_IMM8_V8));
const size_t callTargetIndex = 1; // 1: acc
GateRef func = acc_.GetValueIn(gate, numIns - callTargetIndex); // acc
if (!CanOptimizeAsFastCall(func, numIns - 2)) { // 2 :func and thisobj
if (!CanOptimizeAsFastCall(func)) {
return;
}
GateType funcType = acc_.GetGateType(func);
GateRef thisObj = acc_.GetValueIn(gate, 0);
GateRef newTarget = builder_.Undefined();
GlobalTSTypeRef funcGt = funcType.GetGTRef();
if (!tsManager_->FastCallFlagIsVaild(funcGt)) {
return;
}
if (tsManager_->CanFastCall(funcGt)) {
vec.emplace_back(glue_);
vec.emplace_back(func);
vec.emplace_back(thisObj);
// add common args
for (size_t i = fixedInputsNum; i < numIns - callTargetIndex; i++) {
vec.emplace_back(acc_.GetValueIn(gate, i));
}
builder_.JSFastCallThisTargetTypeCheck(funcType, func);
GateRef result = builder_.TypedFastCall(gate, vec);
acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), result);
} else {
vec.emplace_back(glue_);
vec.emplace_back(actualArgc);
vec.emplace_back(func);
vec.emplace_back(newTarget);
vec.emplace_back(thisObj);
// add common args
for (size_t i = fixedInputsNum; i < numIns - callTargetIndex; i++) {
vec.emplace_back(acc_.GetValueIn(gate, i));
}
builder_.JSCallThisTargetTypeCheck(funcType, func);
GateRef result = builder_.TypedCall(gate, vec);
acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), result);
}
LowerTypedThisCall(gate, func, actualArgc, numIns - callTargetIndex - fixedInputsNum);
}
void TSHCRLowering::LowerTypedCreateEmptyArray(GateRef gate)

View File

@ -111,8 +111,10 @@ private:
void LowerTypedCallthis2(GateRef gate);
void LowerTypedCallthis3(GateRef gate);
void LowerTypedCallthisrange(GateRef gate);
void LowerTypedCall(GateRef gate, GateRef func, GateRef actualArgc, GateType funcType, uint32_t argc);
void LowerTypedThisCall(GateRef gate, GateRef func, GateRef actualArgc, uint32_t argc);
bool IsLoadVtable(GateRef func);
bool CanOptimizeAsFastCall(GateRef func, uint32_t len);
bool CanOptimizeAsFastCall(GateRef func);
void CheckCallTargetAndLowerCall(GateRef gate, GateRef func, GlobalTSTypeRef funcGt,
GateType funcType, const std::vector<GateRef> &args, const std::vector<GateRef> &argsFastCall);
void CheckThisCallTargetAndLowerCall(GateRef gate, GateRef func, GlobalTSTypeRef funcGt,

View File

@ -62,6 +62,9 @@ void TypeMCRLowering::LowerType(GateRef gate)
case OpCode::INDEX_CHECK:
LowerIndexCheck(gate);
break;
case OpCode::JSCALLTARGET_FROM_DEFINEFUNC_CHECK:
LowerJSCallTargetFromDefineFuncCheck(gate);
break;
case OpCode::JSCALLTARGET_TYPE_CHECK:
LowerJSCallTargetTypeCheck(gate);
break;
@ -907,6 +910,22 @@ void TypeMCRLowering::LowerTypedCallBuitin(GateRef gate)
lowering.LowerTypedCallBuitin(gate);
}
void TypeMCRLowering::LowerJSCallTargetFromDefineFuncCheck(GateRef gate)
{
Environment env(gate, circuit_, &builder_);
auto type = acc_.GetParamGateType(gate);
if (tsManager_->IsFunctionTypeKind(type)) {
GateRef frameState = GetFrameState(gate);
auto func = acc_.GetValueIn(gate, 0);
GateRef check = builder_.IsOptimized(func);
builder_.DeoptCheck(check, frameState, DeoptType::NOTJSCALLTGT);
acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), Circuit::NullGate());
} else {
LOG_COMPILER(FATAL) << "this branch is unreachable";
UNREACHABLE();
}
}
void TypeMCRLowering::LowerJSCallTargetTypeCheck(GateRef gate)
{
Environment env(gate, circuit_, &builder_);
@ -918,17 +937,15 @@ void TypeMCRLowering::LowerJSCallTargetTypeCheck(GateRef gate)
auto func = acc_.GetValueIn(gate, 0);
auto methodIndex = acc_.GetValueIn(gate, 1);
GateRef isObj = builder_.TaggedIsHeapObject(func);
GateRef isJsFunc = builder_.IsJSFunction(func);
GateRef isOptimized = builder_.IsOptimized(func);
GateRef funcMethodTarget = builder_.GetMethodFromFunction(func);
GateRef isAot = builder_.HasAotCode(funcMethodTarget);
GateRef checkFunc = builder_.BoolAnd(isObj, isJsFunc);
GateRef checkAot = builder_.BoolAnd(checkFunc, isAot);
GateRef checkFunc = builder_.BoolAnd(isObj, isOptimized);
GateRef methodTarget = GetObjectFromConstPool(jsFunc, methodIndex);
GateRef check = builder_.BoolAnd(checkAot, builder_.Equal(funcMethodTarget, methodTarget));
GateRef check = builder_.BoolAnd(checkFunc, builder_.Equal(funcMethodTarget, methodTarget));
builder_.DeoptCheck(check, frameState, DeoptType::NOTJSCALLTGT);
acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), Circuit::NullGate());
} else {
LOG_ECMA(FATAL) << "this branch is unreachable";
LOG_COMPILER(FATAL) << "this branch is unreachable";
UNREACHABLE();
}
}
@ -944,17 +961,15 @@ void TypeMCRLowering::LowerJSFastCallTargetTypeCheck(GateRef gate)
auto func = acc_.GetValueIn(gate, 0);
auto methodIndex = acc_.GetValueIn(gate, 1);
GateRef isObj = builder_.TaggedIsHeapObject(func);
GateRef isJsFunc = builder_.IsJSFunction(func);
GateRef canFastCall = builder_.CanFastCall(func);
GateRef funcMethodTarget = builder_.GetMethodFromFunction(func);
GateRef canFastCall = builder_.HasAotCodeAndFastCall(funcMethodTarget);
GateRef checkFunc = builder_.BoolAnd(isObj, isJsFunc);
GateRef checkAot = builder_.BoolAnd(checkFunc, canFastCall);
GateRef checkFunc = builder_.BoolAnd(isObj, canFastCall);
GateRef methodTarget = GetObjectFromConstPool(jsFunc, methodIndex);
GateRef check = builder_.BoolAnd(checkAot, builder_.Equal(funcMethodTarget, methodTarget));
GateRef check = builder_.BoolAnd(checkFunc, builder_.Equal(funcMethodTarget, methodTarget));
builder_.DeoptCheck(check, frameState, DeoptType::NOTJSFASTCALLTGT);
acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), Circuit::NullGate());
} else {
LOG_ECMA(FATAL) << "this branch is unreachable";
LOG_COMPILER(FATAL) << "this branch is unreachable";
UNREACHABLE();
}
}
@ -967,15 +982,12 @@ void TypeMCRLowering::LowerJSCallThisTargetTypeCheck(GateRef gate)
GateRef frameState = GetFrameState(gate);
auto func = acc_.GetValueIn(gate, 0);
GateRef isObj = builder_.TaggedIsHeapObject(func);
GateRef isJsFunc = builder_.IsJSFunction(func);
GateRef checkFunc = builder_.BoolAnd(isObj, isJsFunc);
GateRef funcMethodTarget = builder_.GetMethodFromFunction(func);
GateRef isAot = builder_.HasAotCode(funcMethodTarget);
GateRef check = builder_.BoolAnd(checkFunc, isAot);
GateRef isOptimized = builder_.IsOptimized(func);
GateRef check = builder_.BoolAnd(isObj, isOptimized);
builder_.DeoptCheck(check, frameState, DeoptType::NOTJSCALLTGT);
acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), Circuit::NullGate());
} else {
LOG_ECMA(FATAL) << "this branch is unreachable";
LOG_COMPILER(FATAL) << "this branch is unreachable";
UNREACHABLE();
}
}
@ -988,15 +1000,12 @@ void TypeMCRLowering::LowerJSFastCallThisTargetTypeCheck(GateRef gate)
GateRef frameState = GetFrameState(gate);
auto func = acc_.GetValueIn(gate, 0);
GateRef isObj = builder_.TaggedIsHeapObject(func);
GateRef isJsFunc = builder_.IsJSFunction(func);
GateRef checkFunc = builder_.BoolAnd(isObj, isJsFunc);
GateRef funcMethodTarget = builder_.GetMethodFromFunction(func);
GateRef canFastCall = builder_.HasAotCodeAndFastCall(funcMethodTarget);
GateRef check = builder_.BoolAnd(checkFunc, canFastCall);
GateRef canFastCall = builder_.CanFastCall(func);
GateRef check = builder_.BoolAnd(isObj, canFastCall);
builder_.DeoptCheck(check, frameState, DeoptType::NOTJSFASTCALLTGT);
acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), Circuit::NullGate());
} else {
LOG_ECMA(FATAL) << "this branch is unreachable";
LOG_COMPILER(FATAL) << "this branch is unreachable";
UNREACHABLE();
}
}

View File

@ -152,6 +152,7 @@ private:
void LowerFloat32ArrayStoreElement(GateRef gate, GateRef glue);
void LowerTypedCallBuitin(GateRef gate);
void LowerCallTargetCheck(GateRef gate);
void LowerJSCallTargetFromDefineFuncCheck(GateRef gate);
void LowerJSCallTargetTypeCheck(GateRef gate);
void LowerJSFastCallTargetTypeCheck(GateRef gate);
void LowerJSCallThisTargetTypeCheck(GateRef gate);

View File

@ -113,7 +113,7 @@ void Deoptimizier::CollectVregs(const std::vector<kungfu::ARKDeopt>& deoptBundle
size_t curDepth = DecodeDeoptDepth(id, shift);
OffsetType vregId = static_cast<OffsetType>(DecodeVregIndex(id, shift));
if (vregId != static_cast<OffsetType>(SpecVregIndex::PC_OFFSET_INDEX)) {
deoptVregs_.insert({{curDepth, vregId}, JSTaggedValue(v)});
deoptVregs_.insert({{curDepth, vregId}, JSHandle<JSTaggedValue>(thread_, JSTaggedValue(v))});
} else {
pc_.insert({curDepth, static_cast<size_t>(v)});
}
@ -320,7 +320,7 @@ bool Deoptimizier::CollectVirtualRegisters(Method* method, FrameWriter *frameWri
int32_t declaredNumArgs = 0;
if (curDepth == 0) {
actualNumArgs = static_cast<int32_t>(GetDeoptValue(curDepth,
static_cast<int32_t>(SpecVregIndex::ACTUAL_ARGC_INDEX)).GetRawData());
static_cast<int32_t>(SpecVregIndex::ACTUAL_ARGC_INDEX)).GetInt());
declaredNumArgs = static_cast<int32_t>(method->GetNumArgsWithCallField());
} else {
// inline method actualNumArgs equal to declaredNumArgs
@ -355,17 +355,20 @@ bool Deoptimizier::CollectVirtualRegisters(Method* method, FrameWriter *frameWri
// [reserved args]
if (method->HaveThisWithCallField()) {
JSTaggedValue value = deoptVregs_.at({curDepth, static_cast<OffsetType>(SpecVregIndex::THIS_OBJECT_INDEX)});
JSTaggedValue value = deoptVregs_.at(
{curDepth, static_cast<OffsetType>(SpecVregIndex::THIS_OBJECT_INDEX)}).GetTaggedValue();
frameWriter->PushValue(value.GetRawData());
virtualIndex--;
}
if (method->HaveNewTargetWithCallField()) {
JSTaggedValue value = deoptVregs_.at({curDepth, static_cast<OffsetType>(SpecVregIndex::NEWTARGET_INDEX)});
JSTaggedValue value = deoptVregs_.at(
{curDepth, static_cast<OffsetType>(SpecVregIndex::NEWTARGET_INDEX)}).GetTaggedValue();
frameWriter->PushValue(value.GetRawData());
virtualIndex--;
}
if (method->HaveFuncWithCallField()) {
JSTaggedValue value = deoptVregs_.at({curDepth, static_cast<OffsetType>(SpecVregIndex::FUNC_INDEX)});
JSTaggedValue value = deoptVregs_.at(
{curDepth, static_cast<OffsetType>(SpecVregIndex::FUNC_INDEX)}).GetTaggedValue();
frameWriter->PushValue(value.GetRawData());
virtualIndex--;
}
@ -525,7 +528,13 @@ void Deoptimizier::UpdateAndDumpDeoptInfo(kungfu::DeoptType type)
method->SetDeoptType(type);
method->SetDeoptThreshold(--deoptThreshold);
} else {
method->ClearAOTFlags();
FunctionKind kind = method->GetFunctionKind();
ObjectFactory *factory = thread_->GetEcmaVM()->GetFactory();
JSHandle<ECMAObject> jsFunc(thread_, CallTarget);
JSHandle<JSHClass> oldHclass(thread_, jsFunc->GetClass());
// instead of hclass by non_optimized hclass when method ClearAOTFlags
JSHandle<JSHClass> newHClass = factory->GetNonOptimizedHclass(oldHclass, kind);
jsFunc->SetClass(newHClass);
}
}

View File

@ -162,7 +162,7 @@ private:
if (!HasDeoptValue(curDepth, index)) {
return JSTaggedValue::Undefined();
}
return deoptVregs_.at({curDepth, static_cast<OffsetType>(index)});
return deoptVregs_.at({curDepth, static_cast<OffsetType>(index)}).GetTaggedValue();
}
Method* GetMethod(JSTaggedValue &target);
void RelocateCalleeSave();
@ -173,7 +173,7 @@ private:
size_t numCalleeRegs_ {0};
AsmStackContext stackContext_;
std::map<std::pair<size_t, OffsetType>, JSTaggedValue> deoptVregs_;
std::map<std::pair<size_t, OffsetType>, JSHandle<JSTaggedValue>> deoptVregs_;
struct Context context_ {0, 0, {}};
std::unordered_map<size_t, size_t> pc_;
std::unordered_map<size_t, size_t> jumpSize_;

View File

@ -83,7 +83,8 @@ class JSThread;
V(JSTaggedValue, BuiltinsWeakMapFunction, BUILTINS_WEAK_MAP_FUNCTION_INDEX) \
V(JSTaggedValue, BuiltinsWeakSetFunction, BUILTINS_WEAK_SET_FUNCTION_INDEX) \
V(JSTaggedValue, BuiltinsWeakRefFunction, BUILTINS_WEAK_REF_FUNCTION_INDEX) \
V(JSTaggedValue, BuiltinsFinalizationRegistryFunction, BUILTINS_FINALIZATION_REGISTRY_FUNCTION_INDEX) \
V(JSTaggedValue, BuiltinsFinalizationRegistryFunction, \
BUILTINS_FINALIZATION_REGISTRY_FUNCTION_INDEX) \
V(JSTaggedValue, MapPrototype, MAP_PROTOTYPE_INDEX) \
V(JSTaggedValue, MathFunction, MATH_FUNCTION_INDEX) \
V(JSTaggedValue, AtomicsFunction, ATOMICS_FUNCTION_INDEX) \
@ -139,25 +140,42 @@ class JSThread;
V(JSTaggedValue, DynamicImportJob, DYNAMIC_IMPORT_JOB_INDEX) \
V(JSTaggedValue, TemplateMap, TEMPLATE_MAP_INDEX) \
V(JSTaggedValue, FunctionClassWithProto, FUNCTION_CLASS_WITH_PROTO) \
V(JSTaggedValue, FunctionClassWithProtoOptimized, FUNCTION_CLASS_WITH_PROTO_OPTIMIZED) \
V(JSTaggedValue, FunctionClassWithProtoOptimizedWithFastCall, \
FUNCTION_CLASS_WITH_PROTO_OPTIMIZED_WITH_FAST_CALL) \
V(JSTaggedValue, FunctionClassWithoutProto, FUNCTION_CLASS_WITHOUT_PROTO) \
V(JSTaggedValue, FunctionClassWithoutProtoOptimized, FUNCTION_CLASS_WITHOUT_PROTO_OPTIMIZED) \
V(JSTaggedValue, FunctionClassWithoutProtoOptimizedWithFastCall, \
FUNCTION_CLASS_WITHOUT_PROTO_OPTIMIZED_WITH_FAST_CALL) \
V(JSTaggedValue, FunctionClassWithoutName, FUNCTION_CLASS_WITHOUT_NAME) \
V(JSTaggedValue, ArgumentsClass, ARGUMENTS_CLASS) \
V(JSTaggedValue, ArgumentsCallerAccessor, ARGUMENTS_CALLER_ACCESSOR) \
V(JSTaggedValue, ArgumentsCalleeAccessor, ARGUMENTS_CALLEE_ACCESSOR) \
V(JSTaggedValue, AsyncFunctionClass, ASYNC_FUNCTION_CLASS) \
V(JSTaggedValue, AsyncFunctionClassOptimized, ASYNC_FUNCTION_CLASS_OPTIMIZED) \
V(JSTaggedValue, AsyncFunctionClassOptimizedWithFastCall, \
ASYNC_FUNCTION_CLASS_OPTIMIZED_WITH_FAST_CALL) \
V(JSTaggedValue, AsyncAwaitStatusFunctionClass, ASYNC_AWAIT_STATUS_FUNCTION_CLASS) \
V(JSTaggedValue, PromiseReactionFunctionClass, PROMISE_REACTION_FUNCTION_CLASS) \
V(JSTaggedValue, PromiseExecutorFunctionClass, PROMISE_EXECUTOR_FUNCTION_CLASS) \
V(JSTaggedValue, GeneratorFunctionClass, GENERATOR_FUNCTION_CLASS) \
V(JSTaggedValue, GeneratorFunctionClassOptimized, GENERATOR_FUNCTION_CLASS_OPTIMIZED) \
V(JSTaggedValue, GeneratorFunctionClassOptimizedWithFastCall, \
GENERATOR_FUNCTION_CLASS_OPTIMIZED_WITH_FAST_CALL) \
V(JSTaggedValue, AsyncGeneratorFunctionClass, ASYNC_GENERATOR_FUNCTION_CLASS) \
V(JSTaggedValue, AsyncGeneratorFunctionClassOptimized, \
ASYNC_GENERATOR_FUNCTION_CLASS_OPTIMIZED) \
V(JSTaggedValue, AsyncGeneratorFunctionClassOptimizedWithFastCall, \
ASYNC_GENERATOR_FUNCTION_CLASS_FUNCTION_CLASS_OPTIMIZED_WITH_FAST_CALL) \
V(JSTaggedValue, PromiseAllResolveElementFunctionClass, PROMISE_ALL_RESOLVE_ELEMENT_FUNC_CLASS) \
V(JSTaggedValue, PromiseAnyRejectElementFunctionClass, PROMISE_ANY_REJECT_ELEMENT_FUNC_CLASS) \
V(JSTaggedValue, PromiseAllSettledElementFunctionClass, PROMISE_ALL_SETTLED_ELEMENT_FUNC_CLASS) \
V(JSTaggedValue, PromiseFinallyFunctionClass, PROMISE_FINALLY_FUNC_CLASS) \
V(JSTaggedValue, PromiseValueThunkOrThrowerFunctionClass, PROMISE_VALUE_THUNK_OR_THROWER_FUNCTION_CLASS) \
V(JSTaggedValue, PromiseValueThunkOrThrowerFunctionClass, \
PROMISE_VALUE_THUNK_OR_THROWER_FUNCTION_CLASS) \
V(JSTaggedValue, AsyncGeneratorResNextRetProRstFtnClass, \
ASYNC_GENERATOR_RESUME_NEXT_RETURN_PROCESSOR_RST_FTN_CLASS) \
V(JSTaggedValue, AsyncFromSyncIterUnwarpClass, ASYNC_FROM_SYNC_ITER_UNWARP_CLASS) \
ASYNC_GENERATOR_RESUME_NEXT_RETURN_PROCESSOR_RST_FTN_CLASS) \
V(JSTaggedValue, AsyncFromSyncIterUnwarpClass, ASYNC_FROM_SYNC_ITER_UNWARP_CLASS) \
V(JSTaggedValue, ProxyRevocFunctionClass, PROXY_REVOC_FUNCTION_CLASS) \
V(JSTaggedValue, NativeErrorFunctionClass, NATIVE_ERROR_FUNCTION_CLASS) \
V(JSTaggedValue, SpecificTypedArrayFunctionClass, SPERCIFIC_TYPED_ARRAY_FUNCTION_CLASS) \

View File

@ -124,7 +124,8 @@ void TransitionsDictionary::Rehash(const JSThread *thread, TransitionsDictionary
}
// class JSHClass
void JSHClass::Initialize(const JSThread *thread, uint32_t size, JSType type, uint32_t inlinedProps)
void JSHClass::Initialize(const JSThread *thread, uint32_t size, JSType type, uint32_t inlinedProps,
bool isOptimized, bool canFastCall)
{
DISALLOW_GARBAGE_COLLECTION;
ClearBitField();
@ -136,6 +137,11 @@ void JSHClass::Initialize(const JSThread *thread, uint32_t size, JSType type, ui
SetObjectSize(size);
SetLayout(thread, JSTaggedValue::Null());
}
if (type >= JSType::JS_FUNCTION_FIRST && type <= JSType::JS_FUNCTION_LAST) {
SetIsJSFunction(true);
SetIsOptimized(isOptimized);
SetCanFastCall(canFastCall);
}
SetPrototype(thread, JSTaggedValue::Null());
SetObjectType(type);

View File

@ -306,6 +306,9 @@ public:
using GlobalConstOrBuiltinsObjectBit = ClassPrototypeBit::NextFlag; // 23
using IsTSBit = GlobalConstOrBuiltinsObjectBit::NextFlag; // 24
using LevelBit = IsTSBit::NextField<uint32_t, LEVEL_BTTFIELD_NUM>; // 29
using IsJSFunctionBit = LevelBit::NextFlag; // 30
using IsOptimizedBit = IsJSFunctionBit::NextFlag; // 31
using CanFastCallBit = IsOptimizedBit::NextFlag; // 32
static constexpr int DEFAULT_CAPACITY_OF_IN_OBJECTS = 4;
static constexpr int MAX_CAPACITY_OF_OUT_OBJECTS =
@ -327,7 +330,8 @@ public:
inline bool HasReferenceField();
// size need to add inlined property numbers
void Initialize(const JSThread *thread, uint32_t size, JSType type, uint32_t inlinedProps);
void Initialize(const JSThread *thread, uint32_t size, JSType type, uint32_t inlinedProps,
bool isOptimized = false, bool canFastCall = false);
static JSHandle<JSHClass> Clone(const JSThread *thread, const JSHandle<JSHClass> &jshclass,
bool withoutInlinedProperties = false);
@ -451,6 +455,27 @@ public:
IsTSBit::Set<uint32_t>(flag, GetBitFieldAddr());
}
inline void SetIsJSFunction(bool flag) const
{
IsJSFunctionBit::Set<uint32_t>(flag, GetBitFieldAddr());
}
inline void ClearOptimizedFlags() const
{
SetIsOptimized(false);
SetCanFastCall(false);
}
inline void SetIsOptimized(bool flag) const
{
IsOptimizedBit::Set<uint32_t>(flag, GetBitFieldAddr());
}
inline void SetCanFastCall(bool flag) const
{
CanFastCallBit::Set<uint32_t>(flag, GetBitFieldAddr());
}
inline bool IsJSObject() const
{
JSType jsType = GetObjectType();
@ -1170,6 +1195,24 @@ public:
return IsTSBit::Decode(bits);
}
inline bool IsJSFunctionFromBitField() const
{
uint32_t bits = GetBitField();
return IsJSFunctionBit::Decode(bits);
}
inline bool IsOptimized() const
{
uint32_t bits = GetBitField();
return IsOptimizedBit::Decode(bits);
}
inline bool CanFastCall() const
{
uint32_t bits = GetBitField();
return CanFastCallBit::Decode(bits);
}
inline bool IsGeneratorFunction() const
{
return GetObjectType() == JSType::JS_GENERATOR_FUNCTION;

View File

@ -302,8 +302,9 @@ JSHandle<JSFunction> ClassHelper::DefineClassFromExtractor(JSThread *thread, con
nonStaticProperties);
JSHandle<JSObject> prototype = factory->NewOldSpaceJSObject(prototypeHClass);
JSHandle<Method> method(thread, Method::Cast(extractor->GetConstructorMethod().GetTaggedObject()));
constructorHClass->SetIsOptimized(method->IsAotWithCallField());
constructorHClass->SetCanFastCall(method->IsFastCall());
// Allocate to non-movable space for PGO
JSHandle<JSFunction> constructor = factory->NewJSFunctionByHClass(method, constructorHClass,
MemSpaceType::NON_MOVABLE);
@ -390,6 +391,8 @@ JSHandle<JSFunction> ClassHelper::DefineClassWithIHClass(JSThread *thread, const
JSHandle<JSObject> prototype(thread, ihclass->GetProto());
JSHandle<Method> method(thread, Method::Cast(extractor->GetConstructorMethod().GetTaggedObject()));
constructorHClass->SetIsOptimized(method->IsAotWithCallField());
constructorHClass->SetCanFastCall(method->IsFastCall());
JSHandle<JSFunction> constructor = factory->NewJSFunctionByHClass(method, constructorHClass,
MemSpaceType::NON_MOVABLE);

View File

@ -16,6 +16,7 @@
#include "ecmascript/jspandafile/literal_data_extractor.h"
#include "ecmascript/base/string_helper.h"
#include "ecmascript/compiler/aot_file/aot_file_manager.h"
#include "ecmascript/ecma_string.h"
#include "ecmascript/global_env.h"
#include "ecmascript/js_thread.h"
@ -211,30 +212,19 @@ JSHandle<TaggedArray> LiteralDataExtractor::EnumerateLiteralVals(JSThread *threa
JSHandle<JSFunction> LiteralDataExtractor::DefineMethodInLiteral(JSThread *thread, const JSPandaFile *jsPandaFile,
uint32_t offset, JSHandle<ConstantPool> constpool,
FunctionKind kind, uint16_t length,
const CString &entryPoint)
const CString &entryPoint,
bool isLoadedAOT, uint32_t entryIndex)
{
EcmaVM *vm = thread->GetEcmaVM();
ObjectFactory *factory = vm->GetFactory();
JSHandle<GlobalEnv> env = vm->GetGlobalEnv();
auto methodLiteral = jsPandaFile->FindMethodLiteral(offset);
ASSERT(methodLiteral != nullptr);
methodLiteral->SetFunctionKind(kind);
JSHandle<Method> method;
if (jsPandaFile->IsNewVersion()) {
method = Method::Create(thread, jsPandaFile, methodLiteral);
} else {
method = factory->NewMethod(methodLiteral);
method->SetConstantPool(thread, constpool);
}
JSHandle<JSHClass> functionClass;
if (kind == FunctionKind::NORMAL_FUNCTION) {
functionClass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProto());
} else {
functionClass = JSHandle<JSHClass>::Cast(env->GetGeneratorFunctionClass());
}
JSHandle<JSFunction> jsFunc = factory->NewJSFunctionByHClass(method, functionClass, MemSpaceType::OLD_SPACE);
bool canFastCall = false;
JSHandle<Method> method = factory->NewMethod(
jsPandaFile, methodLiteral, constpool, entryIndex, isLoadedAOT, &canFastCall);
JSHandle<JSFunction> jsFunc = factory->NewJSFunction(method, kind, isLoadedAOT, canFastCall);
jsFunc->SetPropertyInlinedProps(thread, JSFunction::LENGTH_INLINE_PROPERTY_INDEX, JSTaggedValue(length));
CString moduleName = jsPandaFile->GetJSPandaFileDesc();
@ -289,7 +279,8 @@ void LiteralDataExtractor::GetMethodOffsets(const JSPandaFile *jsPandaFile, Enti
void LiteralDataExtractor::ExtractObjectDatas(JSThread *thread, const JSPandaFile *jsPandaFile, EntityId id,
JSMutableHandle<TaggedArray> elements,
JSMutableHandle<TaggedArray> properties,
JSHandle<ConstantPool> constpool, const CString &entry)
JSHandle<ConstantPool> constpool, const CString &entry,
bool isLoadedAOT, JSHandle<AOTLiteralInfo> entryIndexes)
{
ObjectFactory *factory = thread->GetEcmaVM()->GetFactory();
LiteralDataAccessor lda = jsPandaFile->GetLiteralDataAccessor();
@ -300,10 +291,11 @@ void LiteralDataExtractor::ExtractObjectDatas(JSThread *thread, const JSPandaFil
uint32_t ppos = 0;
const uint8_t pairSize = 2;
uint32_t methodId = 0;
int pos = 0;
FunctionKind kind;
lda.EnumerateLiteralVals(
id, [elements, properties, &epos, &ppos, factory, thread, jsPandaFile,
&methodId, &kind, &constpool, &entry](const LiteralValue &value, const LiteralTag &tag) {
id, [elements, properties, &entryIndexes, &pos, &epos, &ppos, factory, thread, jsPandaFile,
&methodId, &kind, &constpool, &entry, &isLoadedAOT](const LiteralValue &value, const LiteralTag &tag) {
JSTaggedValue jt = JSTaggedValue::Null();
bool flag = false;
switch (tag) {
@ -341,8 +333,18 @@ void LiteralDataExtractor::ExtractObjectDatas(JSThread *thread, const JSPandaFil
}
case LiteralTag::METHODAFFILIATE: {
uint16_t length = std::get<uint16_t>(value);
int entryIndex = 0;
bool needSetAotFlag = (isLoadedAOT && (epos % pairSize == 0) && !flag);
if (needSetAotFlag) {
entryIndex = entryIndexes->Get(pos++).GetInt();
// -1 : this jsfunction is a large function
if (entryIndex == -1) {
needSetAotFlag = false;
}
}
JSHandle<JSFunction> jsFunc =
DefineMethodInLiteral(thread, jsPandaFile, methodId, constpool, kind, length, entry);
DefineMethodInLiteral(thread, jsPandaFile, methodId, constpool, kind,
length, entry, needSetAotFlag, entryIndex);
jt = jsFunc.GetTaggedValue();
break;
}
@ -361,7 +363,7 @@ void LiteralDataExtractor::ExtractObjectDatas(JSThread *thread, const JSPandaFil
}
}
if (tag != LiteralTag::METHOD && tag != LiteralTag::GENERATORMETHOD) {
if (epos % pairSize == 0 && !flag) {
if ((epos % pairSize == 0) && !flag) {
properties->Set(thread, ppos++, jt);
} else {
elements->Set(thread, epos++, jt);
@ -372,7 +374,8 @@ void LiteralDataExtractor::ExtractObjectDatas(JSThread *thread, const JSPandaFil
JSHandle<TaggedArray> LiteralDataExtractor::GetDatasIgnoreType(JSThread *thread, const JSPandaFile *jsPandaFile,
EntityId id, JSHandle<ConstantPool> constpool,
const CString &entryPoint)
const CString &entryPoint,
bool isLoadedAOT, JSHandle<AOTLiteralInfo> entryIndexes)
{
ObjectFactory *factory = thread->GetEcmaVM()->GetFactory();
LiteralDataAccessor lda = jsPandaFile->GetLiteralDataAccessor();
@ -381,8 +384,10 @@ JSHandle<TaggedArray> LiteralDataExtractor::GetDatasIgnoreType(JSThread *thread,
uint32_t pos = 0;
uint32_t methodId = 0;
FunctionKind kind;
int index = 0;
lda.EnumerateLiteralVals(
id, [literals, &pos, factory, thread, jsPandaFile, &methodId, &kind, &constpool, &entryPoint]
id, [literals, &pos, factory, thread, jsPandaFile,
&methodId, &kind, &constpool, &entryPoint, &entryIndexes, &index, isLoadedAOT]
(const LiteralValue &value, const LiteralTag &tag) {
JSTaggedValue jt = JSTaggedValue::Null();
switch (tag) {
@ -416,8 +421,17 @@ JSHandle<TaggedArray> LiteralDataExtractor::GetDatasIgnoreType(JSThread *thread,
}
case LiteralTag::METHODAFFILIATE: {
uint16_t length = std::get<uint16_t>(value);
int entryIndex = 0;
bool needSetAotFlag = isLoadedAOT;
if (isLoadedAOT) {
entryIndex = entryIndexes->Get(index++).GetInt();
if (entryIndex == -1) {
needSetAotFlag = false;
}
}
JSHandle<JSFunction> jsFunc =
DefineMethodInLiteral(thread, jsPandaFile, methodId, constpool, kind, length, entryPoint);
DefineMethodInLiteral(thread, jsPandaFile, methodId, constpool,
kind, length, entryPoint, needSetAotFlag, entryIndex);
jt = jsFunc.GetTaggedValue();
break;
}

View File

@ -37,12 +37,15 @@ public:
JSHandle<ConstantPool> constpool, const CString &entryPoint = "");
static void ExtractObjectDatas(JSThread *thread, const JSPandaFile *jsPandaFile, EntityId id,
JSMutableHandle<TaggedArray> elements, JSMutableHandle<TaggedArray> properties,
JSHandle<ConstantPool> constpool, const CString &entryPoint = "");
JSHandle<ConstantPool> constpool, const CString &entryPoint = "",
bool isLoadedAOT = false,
JSHandle<AOTLiteralInfo> entryIndexes = JSHandle<AOTLiteralInfo>());
static JSHandle<TaggedArray> GetDatasIgnoreType(JSThread *thread, const JSPandaFile *jsPandaFile, size_t index,
JSHandle<ConstantPool> constpool, const CString &entryPoint = "");
static JSHandle<TaggedArray> GetDatasIgnoreType(JSThread *thread, const JSPandaFile *jsPandaFile, EntityId id,
JSHandle<ConstantPool> constpool, const CString &entryPoint = "");
JSHandle<ConstantPool> constpool, const CString &entryPoint = "",
bool isLoadedAOT = false, JSHandle<AOTLiteralInfo> entryIndexes = JSHandle<AOTLiteralInfo>());
static JSHandle<TaggedArray> GetDatasIgnoreTypeForClass(JSThread *thread, const JSPandaFile *jsPandaFile,
size_t index, JSHandle<ConstantPool> constpool,
const CString &entryPoint = "");
@ -50,7 +53,8 @@ public:
static JSHandle<JSFunction> DefineMethodInLiteral(JSThread *thread, const JSPandaFile *jsPandaFile,
uint32_t offset, JSHandle<ConstantPool> constpool,
FunctionKind kind, uint16_t length,
const CString &entryPoint = "");
const CString &entryPoint = "",
bool isLoadedAOT = false, uint32_t entryIndex = 0);
static void PUBLIC_API GetMethodOffsets(const JSPandaFile *jsPandaFile, size_t index,
std::vector<uint32_t> &methodOffsets);

View File

@ -205,11 +205,9 @@ public:
EntityId id = constpoolHandle->GetEntityId(index);
MethodLiteral *methodLiteral = jsPandaFile->FindMethodLiteral(id.GetOffset());
ASSERT(methodLiteral != nullptr);
JSHandle<Method> method = Method::Create(thread, jsPandaFile, methodLiteral);
if (isLoadedAOT && hasEntryIndex) {
vm->GetAOTFileManager()->SetAOTFuncEntry(jsPandaFile, *method, entryIndex);
}
ObjectFactory *factory = vm->GetFactory();
JSHandle<Method> method = factory->NewMethod(jsPandaFile, methodLiteral, constpoolHandle,
entryIndex, isLoadedAOT && hasEntryIndex);
constpoolHandle->SetObjectToCache(thread, index, method.GetTaggedValue());
return method.GetTaggedValue();
}
@ -234,14 +232,11 @@ public:
ObjectFactory *factory = vm->GetFactory();
ASSERT(jsPandaFile->IsNewVersion());
panda_file::File::EntityId literalId = constpool->GetEntityId(literal);
bool needSetAotFlag = isLoadedAOT && !entryIndexes.GetTaggedValue().IsUndefined();
JSHandle<TaggedArray> literalArray = LiteralDataExtractor::GetDatasIgnoreType(
thread, jsPandaFile, literalId, constpool, entry);
thread, jsPandaFile, literalId, constpool, entry, needSetAotFlag, entryIndexes);
JSHandle<ClassLiteral> classLiteral = factory->NewClassLiteral();
classLiteral->SetArray(thread, literalArray);
if (isLoadedAOT && !entryIndexes.GetTaggedValue().IsUndefined()) {
vm->GetAOTFileManager()->SetAOTFuncEntryForLiteral(jsPandaFile, *literalArray, *entryIndexes);
}
val = classLiteral.GetTaggedValue();
constpool->SetObjectToCache(thread, literal, val);
}
@ -267,19 +262,18 @@ public:
}
if (val.IsHole()) {
EcmaVM *vm = thread->GetEcmaVM();
JSHandle<ConstantPool> constpoolHandle(thread, constpool);
ASSERT(jsPandaFile->IsNewVersion());
panda_file::File::EntityId id = taggedPool->GetEntityId(index);
bool needSetAotFlag = isLoadedAOT && !entryIndexes.GetTaggedValue().IsUndefined();
// New inst
switch (type) {
case ConstPoolType::OBJECT_LITERAL: {
JSMutableHandle<TaggedArray> elements(thread, JSTaggedValue::Undefined());
JSMutableHandle<TaggedArray> properties(thread, JSTaggedValue::Undefined());
LiteralDataExtractor::ExtractObjectDatas(
thread, jsPandaFile, id, elements, properties, constpoolHandle, entry);
LiteralDataExtractor::ExtractObjectDatas(thread, jsPandaFile, id, elements,
properties, constpoolHandle, entry, needSetAotFlag, entryIndexes);
JSHandle<JSObject> obj = JSObject::CreateObjectFromProperties(thread, properties);
JSMutableHandle<JSTaggedValue> key(thread, JSTaggedValue::Undefined());
JSMutableHandle<JSTaggedValue> valueHandle(thread, JSTaggedValue::Undefined());
@ -292,21 +286,15 @@ public:
valueHandle.Update(elements->Get(i + 1));
JSObject::DefinePropertyByLiteral(thread, obj, key, valueHandle);
}
if (isLoadedAOT && !entryIndexes.GetTaggedValue().IsUndefined()) {
vm->GetAOTFileManager()->SetAOTFuncEntryForLiteral(jsPandaFile, *properties, *entryIndexes);
}
val = obj.GetTaggedValue();
break;
}
case ConstPoolType::ARRAY_LITERAL: {
JSHandle<TaggedArray> literal = LiteralDataExtractor::GetDatasIgnoreType(
thread, jsPandaFile, id, constpoolHandle, entry);
JSHandle<TaggedArray> literal = LiteralDataExtractor::GetDatasIgnoreType(thread, jsPandaFile, id,
constpoolHandle, entry, needSetAotFlag, entryIndexes);
uint32_t length = literal->GetLength();
JSHandle<JSArray> arr(JSArray::ArrayCreate(thread, JSTaggedNumber(length)));
arr->SetElements(thread, literal);
if (isLoadedAOT && !entryIndexes.GetTaggedValue().IsUndefined()) {
vm->GetAOTFileManager()->SetAOTFuncEntryForLiteral(jsPandaFile, *literal, *entryIndexes);
}
val = arr.GetTaggedValue();
break;
}

View File

@ -395,6 +395,7 @@ public:
void ClearAOTFlags()
{
SetAotCodeBit(false);
SetIsFastCall(false);
SetDeoptType(kungfu::DeoptType::NOTCHECK);
SetCodeEntryOrLiteral(reinterpret_cast<uintptr_t>(nullptr));
}
@ -460,8 +461,7 @@ public:
private:
static JSHandle<Method> Create(JSThread *thread, const JSPandaFile *jsPandaFile, MethodLiteral *methodLiteral);
friend class ConstantPool;
friend class LiteralDataExtractor;
friend class ObjectFactory;
};
} // namespace panda::ecmascript

View File

@ -380,11 +380,17 @@ void ObjectFactory::NewJSRegExpByteCodeData(const JSHandle<JSRegExp> &regexp, vo
regexp->SetLength(static_cast<uint32_t>(size));
}
JSHandle<JSHClass> ObjectFactory::NewEcmaHClass(uint32_t size, JSType type, const JSHandle<JSTaggedValue> &prototype)
JSHandle<JSHClass> ObjectFactory::NewEcmaHClass(uint32_t size, JSType type, const JSHandle<JSTaggedValue> &prototype,
bool isOptimized, bool canFastCall)
{
JSHandle<JSHClass> newClass = NewEcmaHClass(size, type);
newClass->SetPrototype(thread_, prototype.GetTaggedValue());
return newClass;
NewObjectHook();
uint32_t classSize = JSHClass::SIZE;
auto *newClass = static_cast<JSHClass *>(heap_->AllocateNonMovableOrHugeObject(
JSHClass::Cast(thread_->GlobalConstants()->GetHClassClass().GetTaggedObject()), classSize));
newClass->Initialize(thread_, size, type, JSHClass::DEFAULT_CAPACITY_OF_IN_OBJECTS, isOptimized, canFastCall);
JSHandle<JSHClass> hclass(thread_, newClass);
hclass->SetPrototype(thread_, prototype.GetTaggedValue());
return hclass;
}
JSHandle<JSObject> ObjectFactory::NewJSObject(const JSHandle<JSHClass> &jshclass)
@ -1427,10 +1433,11 @@ JSHandle<JSFunction> ObjectFactory::NewJSFunction(const JSHandle<GlobalEnv> &env
}
JSHandle<JSHClass> ObjectFactory::CreateFunctionClass(FunctionKind kind, uint32_t size, JSType type,
const JSHandle<JSTaggedValue> &prototype)
const JSHandle<JSTaggedValue> &prototype,
bool isOptimized, bool canFastCall)
{
const GlobalEnvConstants *globalConst = thread_->GlobalConstants();
JSHandle<JSHClass> functionClass = NewEcmaHClass(size, type, prototype);
JSHandle<JSHClass> functionClass = NewEcmaHClass(size, type, prototype, isOptimized, canFastCall);
{
functionClass->SetCallable(true);
// FunctionKind = BASE_CONSTRUCTOR
@ -1606,6 +1613,22 @@ JSHandle<Method> ObjectFactory::NewMethod(const MethodLiteral *methodLiteral, Me
return method;
}
JSHandle<Method> ObjectFactory::NewMethod(const JSPandaFile *jsPandaFile, MethodLiteral *methodLiteral,
JSHandle<ConstantPool> constpool, uint32_t entryIndex, bool needSetAotFlag, bool *canFastCall)
{
JSHandle<Method> method;
if (jsPandaFile->IsNewVersion()) {
method = Method::Create(thread_, jsPandaFile, methodLiteral);
} else {
method = NewMethod(methodLiteral);
method->SetConstantPool(thread_, constpool);
}
if (needSetAotFlag) {
vm_->GetAOTFileManager()->SetAOTFuncEntry(jsPandaFile, *method, entryIndex, canFastCall);
}
return method;
}
JSHandle<JSFunction> ObjectFactory::NewJSNativeErrorFunction(const JSHandle<GlobalEnv> &env, const void *nativeFunc)
{
JSHandle<Method> target = NewMethodForNativeFunction(nativeFunc, FunctionKind::BUILTIN_CONSTRUCTOR);
@ -4077,4 +4100,204 @@ JSHandle<ClassLiteral> ObjectFactory::NewClassLiteral()
return classLiteral;
}
JSHandle<JSFunction> ObjectFactory::NewJSFunction(const JSHandle<Method> &methodHandle)
{
JSHandle<GlobalEnv> env = vm_->GetGlobalEnv();
FunctionKind kind = methodHandle->GetFunctionKind();
JSHandle<JSHClass> hclass;
switch (kind) {
case FunctionKind::NORMAL_FUNCTION:
case FunctionKind::BASE_CONSTRUCTOR: {
if (methodHandle->IsAotWithCallField()) {
if (methodHandle->IsFastCall()) {
hclass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithProtoOptimizedWithFastCall());
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithProtoOptimized());
}
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithProto());
}
break;
}
case FunctionKind::ARROW_FUNCTION: {
if (methodHandle->IsAotWithCallField()) {
if (methodHandle->IsFastCall()) {
hclass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProtoOptimizedWithFastCall());
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProtoOptimized());
}
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProto());
}
break;
}
case FunctionKind::GENERATOR_FUNCTION: {
if (methodHandle->IsAotWithCallField()) {
if (methodHandle->IsFastCall()) {
hclass = JSHandle<JSHClass>::Cast(env->GetGeneratorFunctionClassOptimizedWithFastCall());
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetGeneratorFunctionClassOptimized());
}
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetGeneratorFunctionClass());
}
break;
}
case FunctionKind::CONCURRENT_FUNCTION:
case FunctionKind::ASYNC_FUNCTION: {
if (methodHandle->IsAotWithCallField()) {
if (methodHandle->IsFastCall()) {
hclass = JSHandle<JSHClass>::Cast(env->GetAsyncFunctionClassOptimizedWithFastCall());
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetAsyncFunctionClassOptimized());
}
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetAsyncFunctionClass());
}
break;
}
case FunctionKind::ASYNC_GENERATOR_FUNCTION: {
if (methodHandle->IsAotWithCallField()) {
if (methodHandle->IsFastCall()) {
hclass = JSHandle<JSHClass>::Cast(env->GetAsyncGeneratorFunctionClassOptimizedWithFastCall());
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetAsyncGeneratorFunctionClassOptimized());
}
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetAsyncGeneratorFunctionClass());
}
break;
}
case FunctionKind::ASYNC_ARROW_FUNCTION: {
if (methodHandle->IsAotWithCallField()) {
if (methodHandle->IsFastCall()) {
hclass = JSHandle<JSHClass>::Cast(env->GetAsyncFunctionClassOptimizedWithFastCall());
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetAsyncFunctionClassOptimized());
}
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetAsyncFunctionClass());
}
break;
}
default:
LOG_ECMA(FATAL) << "this branch is unreachable";
UNREACHABLE();
}
JSHandle<JSFunction> jsfunc = NewJSFunctionByHClass(methodHandle, hclass);
ASSERT_NO_ABRUPT_COMPLETION(thread_);
return jsfunc;
}
JSHandle<JSFunction> ObjectFactory::NewJSFunction(const JSHandle<Method> &methodHandle,
const JSHandle<JSTaggedValue> &homeObject)
{
ASSERT(homeObject->IsECMAObject());
JSHandle<GlobalEnv> env = vm_->GetGlobalEnv();
JSHandle<JSHClass> hclass;
if (methodHandle->IsAotWithCallField()) {
if (methodHandle->IsFastCall()) {
hclass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProtoOptimizedWithFastCall());
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProtoOptimized());
}
} else {
hclass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProto());
}
JSHandle<JSFunction> jsFunc = NewJSFunctionByHClass(methodHandle, hclass);
jsFunc->SetHomeObject(thread_, homeObject);
ASSERT_NO_ABRUPT_COMPLETION(thread_);
return jsFunc;
}
JSHandle<JSFunction> ObjectFactory::NewJSFunction(const JSHandle<Method> &methodHandle, FunctionKind kind,
bool isOptimized, bool canFastCall)
{
JSHandle<JSHClass> functionClass;
JSHandle<GlobalEnv> env = vm_->GetGlobalEnv();
if (isOptimized) {
if (kind == FunctionKind::NORMAL_FUNCTION) {
if (canFastCall) {
functionClass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProtoOptimizedWithFastCall());
} else {
functionClass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProtoOptimized());
}
} else {
if (canFastCall) {
functionClass = JSHandle<JSHClass>::Cast(env->GetGeneratorFunctionClassOptimizedWithFastCall());
} else {
functionClass = JSHandle<JSHClass>::Cast(env->GetGeneratorFunctionClassOptimized());
}
}
} else {
if (kind == FunctionKind::NORMAL_FUNCTION) {
functionClass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProto());
} else {
functionClass = JSHandle<JSHClass>::Cast(env->GetGeneratorFunctionClass());
}
}
return NewJSFunctionByHClass(methodHandle, functionClass, MemSpaceType::OLD_SPACE);
}
JSHandle<JSHClass> ObjectFactory::GetNonOptimizedHclass(JSHandle<JSHClass> oldHClassHandle, FunctionKind kind)
{
JSHandle<GlobalEnv> env = vm_->GetGlobalEnv();
JSHandle<JSTaggedValue> oldHclass(oldHClassHandle);
switch (kind) {
case FunctionKind::NORMAL_FUNCTION:
case FunctionKind::BASE_CONSTRUCTOR: {
if (oldHclass == env->GetFunctionClassWithProtoOptimizedWithFastCall() ||
oldHclass == env->GetFunctionClassWithProtoOptimized()) {
return JSHandle<JSHClass>::Cast(env->GetFunctionClassWithProto());
} else if (oldHclass == env->GetFunctionClassWithoutProtoOptimizedWithFastCall() ||
oldHclass == env->GetFunctionClassWithoutProtoOptimized()) {
return JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProto());
}
break;
}
case FunctionKind::ARROW_FUNCTION: {
if (oldHclass == env->GetFunctionClassWithoutProtoOptimizedWithFastCall() ||
oldHclass == env->GetFunctionClassWithoutProtoOptimized()) {
return JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProto());
}
break;
}
case FunctionKind::GENERATOR_FUNCTION: {
if (oldHclass == env->GetGeneratorFunctionClassOptimizedWithFastCall() ||
oldHclass == env->GetGeneratorFunctionClassOptimized()) {
return JSHandle<JSHClass>::Cast(env->GetGeneratorFunctionClass());
}
break;
}
case FunctionKind::CONCURRENT_FUNCTION:
case FunctionKind::ASYNC_FUNCTION: {
if (oldHclass == env->GetAsyncFunctionClassOptimizedWithFastCall() ||
oldHclass == env->GetAsyncFunctionClassOptimized()) {
return JSHandle<JSHClass>::Cast(env->GetAsyncFunctionClass());
}
break;
}
case FunctionKind::ASYNC_GENERATOR_FUNCTION: {
if (oldHclass == env->GetAsyncGeneratorFunctionClassOptimizedWithFastCall() ||
oldHclass == env->GetAsyncGeneratorFunctionClassOptimized()) {
return JSHandle<JSHClass>::Cast(env->GetAsyncGeneratorFunctionClass());
}
break;
}
case FunctionKind::ASYNC_ARROW_FUNCTION: {
if (oldHclass == env->GetAsyncFunctionClassOptimizedWithFastCall() ||
oldHclass == env->GetAsyncFunctionClassOptimized()) {
return JSHandle<JSHClass>::Cast(env->GetAsyncFunctionClass());
}
break;
}
default:
break;
}
// hclass isn't initialized hclass, so can't clear optimizedFlags directly
JSHandle<JSHClass> newHClass = JSHClass::Clone(thread_, oldHClassHandle);
newHClass->ClearOptimizedFlags();
return newHClass;
}
} // namespace panda::ecmascript

View File

@ -226,6 +226,14 @@ public:
// use for method
JSHandle<JSFunction> NewJSFunction(const JSHandle<GlobalEnv> &env, const JSHandle<Method> &method);
JSHandle<JSFunction> NewJSFunction(const JSHandle<Method> &methodHandle);
JSHandle<JSFunction> NewJSFunction(const JSHandle<Method> &methodHandle,
const JSHandle<JSTaggedValue> &homeObject);
JSHandle<JSFunction> NewJSFunction(const JSHandle<Method> &methodHandle, FunctionKind kind,
bool isOptimized, bool canFastCall);
JSHandle<JSFunction> NewJSNativeErrorFunction(const JSHandle<GlobalEnv> &env, const void *nativeFunc = nullptr);
JSHandle<JSFunction> NewSpecificTypedArrayFunction(const JSHandle<GlobalEnv> &env,
@ -476,6 +484,9 @@ public:
FunctionKind kind = FunctionKind::NORMAL_FUNCTION);
JSHandle<Method> NewMethod(const MethodLiteral *methodLiteral, MemSpaceType spaceType = OLD_SPACE);
JSHandle<Method> NewMethod(const JSPandaFile *jsPandaFile, MethodLiteral *methodLiteral,
JSHandle<ConstantPool> constpool, uint32_t entryIndex, bool needSetAotFlag, bool *canFastCall = nullptr);
// used for creating jsobject by constructor
JSHandle<JSObject> NewJSObjectByConstructor(const JSHandle<JSFunction> &constructor,
const JSHandle<JSTaggedValue> &newTarget);
@ -535,7 +546,8 @@ public:
JSHandle<JSObject> NewJSObject(const JSHandle<JSHClass> &jshclass);
// used for creating jshclass in Builtins, Function, Class_Linker
JSHandle<JSHClass> NewEcmaHClass(uint32_t size, JSType type, const JSHandle<JSTaggedValue> &prototype);
JSHandle<JSHClass> NewEcmaHClass(uint32_t size, JSType type, const JSHandle<JSTaggedValue> &prototype,
bool isOptimized = false, bool canFastCall = false);
// It is used to provide iterators for non ECMA standard jsapi containers.
JSHandle<JSAPIPlainArray> NewJSAPIPlainArray(uint32_t capacity);
@ -615,6 +627,7 @@ public:
// ---------------------------------------Used by AOT------------------------------------------------
JSHandle<AOTLiteralInfo> NewAOTLiteralInfo(uint32_t length, JSTaggedValue initVal = JSTaggedValue::Hole());
JSHandle<VTable> NewVTable(uint32_t length, JSTaggedValue initVal = JSTaggedValue::Hole());
JSHandle<JSHClass> GetNonOptimizedHclass(JSHandle<JSHClass> oldHClassHandle, FunctionKind kind);
private:
friend class GlobalEnv;
@ -660,7 +673,8 @@ private:
JSHandle<JSHClass> CreateObjectClass(const JSHandle<TaggedArray> &keys, const JSHandle<TaggedArray> &values);
JSHandle<JSHClass> CreateObjectClass(const JSHandle<TaggedArray> &properties, size_t length);
JSHandle<JSHClass> CreateFunctionClass(FunctionKind kind, uint32_t size, JSType type,
const JSHandle<JSTaggedValue> &prototype);
const JSHandle<JSTaggedValue> &prototype,
bool isOptimized = false, bool canFastCall = false);
JSHandle<JSHClass> CreateDefaultClassPrototypeHClass(JSHClass *hclass);
JSHandle<JSHClass> CreateDefaultClassConstructorHClass(JSHClass *hclass);

View File

@ -1858,51 +1858,8 @@ JSTaggedValue RuntimeStubs::RuntimeNewObjRange(JSThread *thread, const JSHandle<
JSTaggedValue RuntimeStubs::RuntimeDefinefunc(JSThread *thread, const JSHandle<Method> &methodHandle)
{
JSHandle<GlobalEnv> env = thread->GetEcmaVM()->GetGlobalEnv();
ObjectFactory *factory = thread->GetEcmaVM()->GetFactory();
JSHandle<JSFunction> jsFunc;
FunctionKind kind = methodHandle->GetFunctionKind();
switch (kind) {
case FunctionKind::NORMAL_FUNCTION:
case FunctionKind::BASE_CONSTRUCTOR: {
auto hclass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithProto());
jsFunc = factory->NewJSFunctionByHClass(methodHandle, hclass);
break;
}
case FunctionKind::ARROW_FUNCTION: {
auto normalClass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProto());
jsFunc = factory->NewJSFunctionByHClass(methodHandle, normalClass);
break;
}
case FunctionKind::GENERATOR_FUNCTION: {
auto generatorClass = JSHandle<JSHClass>::Cast(env->GetGeneratorFunctionClass());
jsFunc = factory->NewJSFunctionByHClass(methodHandle, generatorClass);
break;
}
case FunctionKind::CONCURRENT_FUNCTION:
case FunctionKind::ASYNC_FUNCTION: {
auto asyncClass = JSHandle<JSHClass>::Cast(env->GetAsyncFunctionClass());
jsFunc = factory->NewJSFunctionByHClass(methodHandle, asyncClass);
break;
}
case FunctionKind::ASYNC_GENERATOR_FUNCTION: {
auto asyncGeneratorClass = JSHandle<JSHClass>::Cast(env->GetAsyncGeneratorFunctionClass());
jsFunc = factory->NewJSFunctionByHClass(methodHandle, asyncGeneratorClass);
break;
}
case FunctionKind::ASYNC_ARROW_FUNCTION: {
// Add hclass for async arrow function
auto asyncClass = JSHandle<JSHClass>::Cast(env->GetAsyncFunctionClass());
jsFunc = factory->NewJSFunctionByHClass(methodHandle, asyncClass);
break;
}
default:
LOG_ECMA(FATAL) << "this branch is unreachable";
UNREACHABLE();
}
ASSERT_NO_ABRUPT_COMPLETION(thread);
return jsFunc.GetTaggedValue();
return factory->NewJSFunction(methodHandle).GetTaggedValue();
}
JSTaggedValue RuntimeStubs::RuntimeCreateRegExpWithLiteral(JSThread *thread,
@ -2019,15 +1976,8 @@ JSTaggedValue RuntimeStubs::RuntimeCreateObjectWithExcludedKeys(JSThread *thread
JSTaggedValue RuntimeStubs::RuntimeDefineMethod(JSThread *thread, const JSHandle<Method> &methodHandle,
const JSHandle<JSTaggedValue> &homeObject)
{
ASSERT(homeObject->IsECMAObject());
JSHandle<GlobalEnv> env = thread->GetEcmaVM()->GetGlobalEnv();
ObjectFactory *factory = thread->GetEcmaVM()->GetFactory();
JSHandle<JSHClass> hclass = JSHandle<JSHClass>::Cast(env->GetFunctionClassWithoutProto());
JSHandle<JSFunction> jsFunc = factory->NewJSFunctionByHClass(methodHandle, hclass);
jsFunc->SetHomeObject(thread, homeObject);
ASSERT_NO_ABRUPT_COMPLETION(thread);
return jsFunc.GetTaggedValue();
return factory->NewJSFunction(methodHandle, homeObject).GetTaggedValue();
}
JSTaggedValue RuntimeStubs::RuntimeCallSpread(JSThread *thread,

View File

@ -360,9 +360,9 @@ void RuntimeStubs::DebugPrint(int fmtMessageId, ...)
va_start(args, fmtMessageId);
std::string result = base::StringHelper::Vformat(format.c_str(), args);
if (MessageString::IsBuiltinsStubMessageString(fmtMessageId)) {
LOG_BUILTINS(DEBUG) << result;
LOG_BUILTINS(ERROR) << result;
} else {
LOG_ECMA(DEBUG) << result;
LOG_ECMA(ERROR) << result;
}
va_end(args);
}
@ -2242,12 +2242,17 @@ void RuntimeStubs::SaveFrameToContext(JSThread *thread, JSHandle<GeneratorContex
regsArray->Set(thread, i, value);
}
context->SetRegsArray(thread, regsArray.GetTaggedValue());
JSTaggedValue function = frameHandler.GetFunction();
Method *method = JSFunction::Cast(function.GetTaggedObject())->GetCallTarget();
JSHandle<JSTaggedValue> function(thread, frameHandler.GetFunction());
Method *method = JSFunction::Cast(function->GetTaggedObject())->GetCallTarget();
if (method->IsAotWithCallField()) {
method->ClearAOTFlags();
FunctionKind kind = method->GetFunctionKind();
JSHandle<JSFunction> jsFunc(function);
JSHandle<JSHClass> oldHclass(thread, jsFunc->GetClass());
// instead of hclass by non_optimized hclass when method ClearAOTFlags
JSHandle<JSHClass> newHClass = factory->GetNonOptimizedHclass(oldHclass, kind);
jsFunc->SetClass(newHClass);
}
context->SetMethod(thread, function);
context->SetMethod(thread, function.GetTaggedValue());
context->SetThis(thread, frameHandler.GetThis());
BytecodeInstruction ins(frameHandler.GetPc());
@ -2288,15 +2293,14 @@ JSTaggedValue RuntimeStubs::CallBoundFunction(EcmaRuntimeCallInfo *info)
DEF_RUNTIME_STUBS(DeoptHandler)
{
RUNTIME_STUBS_HEADER(DeoptHandler);
uintptr_t *args = reinterpret_cast<uintptr_t *>(argv);
size_t depth = GetTArg(argv, argc, 1);
size_t depth = static_cast<size_t>(GetArg(argv, argc, 1).GetInt());
Deoptimizier deopt(thread, depth);
std::vector<kungfu::ARKDeopt> deoptBundle;
deopt.CollectDeoptBundleVec(deoptBundle);
ASSERT(!deoptBundle.empty());
size_t shift = Deoptimizier::ComputeShift(depth);
deopt.CollectVregs(deoptBundle, shift);
kungfu::DeoptType type = static_cast<kungfu::DeoptType>(args[0]);
kungfu::DeoptType type = static_cast<kungfu::DeoptType>(GetArg(argv, argc, 0).GetInt());
deopt.UpdateAndDumpDeoptInfo(type);
return deopt.ConstructAsmInterpretFrame();
}

View File

@ -76,6 +76,7 @@ group("ark_aot_ts_test") {
"definencfunc",
"delobjprop",
"deopt",
"deopt_clear_aotflags",
"destructuring",
"div",
"duplicatefunctions",

View File

@ -0,0 +1,18 @@
# Copyright (c) 2022 Huawei Device Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import("//arkcompiler/ets_runtime/test/test_helper.gni")
host_aot_test_action("deopt_clear_aotflags") {
deps = []
}

View File

@ -0,0 +1,38 @@
/*
* Copyright (c) 2023 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
declare function print(arg:any):void;
function foo(v: number): Object {
function tryHello(v: number): void {
let a: number = 1;
let ret: number = a + v;
print(ret);
}
if (v < 0) {
tryHello.x = 1;
tryHello.y = 1;
return tryHello;
} else {
tryHello.y = 1;
return tryHello;
}
}
const func1 = foo(-1);
const func2 = foo(2);
for (let i = 0; i < 11; i++) {
func1(undefined);
}
func2(3);

View File

@ -0,0 +1,25 @@
# Copyright (c) 2022 Huawei Device Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
NaN
NaN
NaN
NaN
NaN
NaN
NaN
NaN
NaN
NaN
NaN
4