mirror of
https://gitee.com/openharmony/arkcompiler_ets_runtime
synced 2024-11-23 10:09:54 +00:00
jit && dfx
Signed-off-by: wangyue <guowanlong@huawei.com> Change-Id: I4010825e9e665a5c5d6ed45aedb05e0e0bef2098
This commit is contained in:
parent
5ff446a42b
commit
9c9fd70e2c
@ -207,12 +207,23 @@ public:
|
||||
return static_cast<size_t>(gateCount_ - 1);
|
||||
}
|
||||
|
||||
bool IsOptimizedOrFastJit() const
|
||||
{
|
||||
return IsOptimizedJSFunctionFrame() || IsFastJitFunctionFrame();
|
||||
}
|
||||
|
||||
bool IsOptimizedJSFunctionFrame() const
|
||||
{
|
||||
return frameType_ == FrameType::OPTIMIZED_JS_FUNCTION_FRAME
|
||||
|| frameType_ == FrameType::OPTIMIZED_JS_FAST_CALL_FUNCTION_FRAME;
|
||||
}
|
||||
|
||||
bool IsFastJitFunctionFrame() const
|
||||
{
|
||||
return frameType_ == FrameType::FASTJIT_FUNCTION_FRAME
|
||||
|| frameType_ == FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME;
|
||||
}
|
||||
|
||||
bool GetDebugInfo(GateRef g, size_t &index) const;
|
||||
|
||||
bool IsOsr() const
|
||||
|
@ -802,7 +802,7 @@ GateRef CircuitBuilder::GetObjectFromConstPool(GateRef glue, GateRef hirGate, Ga
|
||||
Label unshareCpMiss(env_);
|
||||
|
||||
// HirGate Can not be a nullGate in Aot
|
||||
if (GetCircuit()->IsOptimizedJSFunctionFrame() && hirGate == Circuit::NullGate()) {
|
||||
if (GetCircuit()->IsOptimizedOrFastJit() && hirGate == Circuit::NullGate()) {
|
||||
hirGate = index;
|
||||
}
|
||||
// Call runtime to create unshared constpool when current context's cache is hole in multi-thread.
|
||||
|
@ -151,7 +151,8 @@ public:
|
||||
|
||||
virtual void GenerateCode(Circuit *circuit, const ControlFlowGraph &graph, const CompilationConfig *cfg,
|
||||
const MethodLiteral *methodLiteral, const JSPandaFile *jsPandaFile,
|
||||
const std::string &methodName, bool enableOptInlining, bool enableBranchProfiling) = 0;
|
||||
const std::string &methodName, const FrameType frameType,
|
||||
bool enableOptInlining, bool enableBranchProfiling) = 0;
|
||||
};
|
||||
|
||||
class CodeGenerator {
|
||||
@ -174,10 +175,10 @@ public:
|
||||
}
|
||||
|
||||
void Run(Circuit *circuit, const ControlFlowGraph &graph, const CompilationConfig *cfg,
|
||||
const MethodLiteral *methodLiteral, const JSPandaFile *jsPandaFile,
|
||||
const MethodLiteral *methodLiteral, const JSPandaFile *jsPandaFile, const FrameType frameType,
|
||||
bool enableOptInlining, bool enableOptBranchProfiling)
|
||||
{
|
||||
impl_->GenerateCode(circuit, graph, cfg, methodLiteral, jsPandaFile, methodName_,
|
||||
impl_->GenerateCode(circuit, graph, cfg, methodLiteral, jsPandaFile, methodName_, frameType,
|
||||
enableOptInlining, enableOptBranchProfiling);
|
||||
}
|
||||
|
||||
|
@ -284,10 +284,10 @@ void LLVMIRGeneratorImpl::GenerateCodeForStub(Circuit *circuit, const ControlFlo
|
||||
void LLVMIRGeneratorImpl::GenerateCode(Circuit *circuit, const ControlFlowGraph &graph, const CompilationConfig *cfg,
|
||||
const panda::ecmascript::MethodLiteral *methodLiteral,
|
||||
const JSPandaFile *jsPandaFile, const std::string &methodName,
|
||||
bool enableOptInlining, bool enableOptBranchProfiling)
|
||||
const FrameType frameType, bool enableOptInlining, bool enableOptBranchProfiling)
|
||||
{
|
||||
auto function = module_->AddFunc(methodLiteral, jsPandaFile);
|
||||
circuit->SetFrameType(FrameType::OPTIMIZED_JS_FUNCTION_FRAME);
|
||||
circuit->SetFrameType(frameType);
|
||||
CallSignature::CallConv conv;
|
||||
if (methodLiteral->IsFastCall()) {
|
||||
conv = CallSignature::CallConv::CCallConv;
|
||||
|
@ -134,7 +134,7 @@ public:
|
||||
const CompilationConfig *cfg) override;
|
||||
void GenerateCode(Circuit *circuit, const ControlFlowGraph &graph, const CompilationConfig *cfg,
|
||||
const MethodLiteral *methodLiteral, const JSPandaFile *jsPandaFile, const std::string &methodName,
|
||||
bool enableOptInlining, bool enableBranchProfiling) override;
|
||||
const FrameType frameType, bool enableOptInlining, bool enableBranchProfiling) override;
|
||||
|
||||
bool IsLogEnabled() const
|
||||
{
|
||||
|
@ -394,12 +394,57 @@ void LLVMIRBuilder::GenPrologue()
|
||||
SaveFrameTypeOnFrame(frameType, builder_);
|
||||
}
|
||||
}
|
||||
} else if (frameType == FrameType::FASTJIT_FUNCTION_FRAME) {
|
||||
reservedSlotsSize = FASTJITFunctionFrame::ComputeReservedPcOffset(slotSize_);
|
||||
LLVMAddTargetDependentFunctionAttr(function_, "frame-reserved-slots",
|
||||
std::to_string(reservedSlotsSize).c_str());
|
||||
auto ArgList = circuit_->GetArgRoot();
|
||||
auto uses = acc_.Uses(ArgList);
|
||||
for (auto useIt = uses.begin(); useIt != uses.end(); ++useIt) {
|
||||
int argth = static_cast<int>(acc_.TryGetValue(*useIt));
|
||||
LLVMValueRef value = LLVMGetParam(function_, argth);
|
||||
int funcIndex = 0;
|
||||
if (isFastCallAot_) {
|
||||
frameType = FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME;
|
||||
funcIndex = static_cast<int>(FastCallArgIdx::FUNC);
|
||||
} else {
|
||||
funcIndex = static_cast<int>(CommonArgIdx::FUNC);
|
||||
}
|
||||
if (argth == funcIndex) {
|
||||
SaveByteCodePcOnOptJSFuncFrame(value);
|
||||
SaveJSFuncOnOptJSFuncFrame(value);
|
||||
SaveFrameTypeOnFrame(frameType, builder_);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOG_COMPILER(FATAL) << "frameType interpret type error !";
|
||||
ASSERT_PRINT(static_cast<uintptr_t>(frameType), "is not support !");
|
||||
}
|
||||
}
|
||||
|
||||
void LLVMIRBuilder::SaveByteCodePcOnOptJSFuncFrame(LLVMValueRef value)
|
||||
{
|
||||
ASSERT(circuit_->GetFrameType() == FrameType::FASTJIT_FUNCTION_FRAME);
|
||||
// load method
|
||||
LLVMValueRef func = LLVMBuildPtrToInt(builder_, value, slotType_, "cast_to_i64");
|
||||
LLVMValueRef offsetMethod = LLVMConstInt(GetInt64T(), JSFunctionBase::METHOD_OFFSET, false);
|
||||
LLVMValueRef addrMethod = LLVMBuildAdd(builder_, func, offsetMethod, "");
|
||||
LLVMValueRef method = LLVMBuildLoad(builder_, addrMethod, "");
|
||||
// load byteCodePc
|
||||
LLVMValueRef offsetByteCodePc = LLVMConstInt(GetInt64T(), Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET, false);
|
||||
LLVMValueRef addrByteCodePc = LLVMBuildAdd(builder_, method, offsetByteCodePc, "");
|
||||
LLVMValueRef byteCodePc = LLVMBuildLoad(builder_, addrByteCodePc, "");
|
||||
// push byteCodePc
|
||||
LLVMValueRef llvmFpAddr = CallingFp(module_, builder_, false);
|
||||
LLVMValueRef frameAddr = LLVMBuildPtrToInt(builder_, llvmFpAddr, slotType_, "cast_int_t");
|
||||
size_t reservedOffset = FASTJITFunctionFrame::ComputeReservedPcOffset(slotSize_);
|
||||
LLVMValueRef byteCodePcSlotAddr = LLVMBuildSub(builder_, frameAddr, LLVMConstInt(slotType_,
|
||||
reservedOffset, false), "");
|
||||
LLVMValueRef byteCodePcAddr = LLVMBuildIntToPtr(builder_, byteCodePcSlotAddr,
|
||||
LLVMPointerType(slotType_, 0), "byteCodePc.Addr");
|
||||
LLVMBuildStore(builder_, byteCodePc, byteCodePcAddr);
|
||||
}
|
||||
|
||||
void LLVMIRBuilder::SaveFrameTypeOnFrame(FrameType frameType, LLVMBuilderRef builder)
|
||||
{
|
||||
LLVMValueRef llvmFpAddr = CallingFp(module_, builder, false);
|
||||
@ -575,7 +620,8 @@ bool LLVMIRBuilder::IsOptimized() const
|
||||
|
||||
bool LLVMIRBuilder::IsOptimizedJSFunction() const
|
||||
{
|
||||
return circuit_->GetFrameType() == FrameType::OPTIMIZED_JS_FUNCTION_FRAME;
|
||||
return circuit_->GetFrameType() == FrameType::OPTIMIZED_JS_FUNCTION_FRAME ||
|
||||
circuit_->GetFrameType() == FrameType::FASTJIT_FUNCTION_FRAME;
|
||||
}
|
||||
|
||||
void LLVMIRBuilder::VisitRuntimeCall(GateRef gate, const std::vector<GateRef> &inList)
|
||||
@ -1318,10 +1364,15 @@ void LLVMIRBuilder::VisitParameter(GateRef gate)
|
||||
|
||||
void LLVMIRBuilder::SaveJSFuncOnOptJSFuncFrame(LLVMValueRef value)
|
||||
{
|
||||
ASSERT(circuit_->GetFrameType() == FrameType::OPTIMIZED_JS_FUNCTION_FRAME);
|
||||
ASSERT(IsOptimizedJSFunction());
|
||||
size_t reservedOffset = 0;
|
||||
LLVMValueRef llvmFpAddr = CallingFp(module_, builder_, false);
|
||||
LLVMValueRef frameAddr = LLVMBuildPtrToInt(builder_, llvmFpAddr, slotType_, "cast_int_t");
|
||||
size_t reservedOffset = OptimizedJSFunctionFrame::ComputeReservedJSFuncOffset(slotSize_);
|
||||
if (circuit_->GetFrameType() == FrameType::OPTIMIZED_JS_FUNCTION_FRAME) {
|
||||
reservedOffset = OptimizedJSFunctionFrame::ComputeReservedJSFuncOffset(slotSize_);
|
||||
} else {
|
||||
reservedOffset = FASTJITFunctionFrame::ComputeReservedJSFuncOffset(slotSize_);
|
||||
}
|
||||
LLVMValueRef frameJSFuncSlotAddr = LLVMBuildSub(builder_, frameAddr, LLVMConstInt(slotType_,
|
||||
reservedOffset, false), "");
|
||||
LLVMValueRef jsFuncAddr = LLVMBuildIntToPtr(builder_, frameJSFuncSlotAddr,
|
||||
|
@ -414,6 +414,7 @@ private:
|
||||
void ComputeArgCountAndExtraInfo(size_t &actualNumArgs, LLVMValueRef &pcOffset, GateRef &frameArgs,
|
||||
const std::vector<GateRef> &inList, CallExceptionKind kind);
|
||||
void SaveLexicalEnvOnOptJSFuncFrame(LLVMValueRef value);
|
||||
void SaveByteCodePcOnOptJSFuncFrame(LLVMValueRef value);
|
||||
void SaveJSFuncOnOptJSFuncFrame(LLVMValueRef value);
|
||||
void SaveFrameTypeOnFrame(FrameType frameType, LLVMBuilderRef builder);
|
||||
void UpdateLeaveFrame(LLVMValueRef glue);
|
||||
|
@ -103,9 +103,10 @@ void LiteCGIRGeneratorImpl::GenerateCodeForStub(Circuit *circuit, const ControlF
|
||||
void LiteCGIRGeneratorImpl::GenerateCode(Circuit *circuit, const ControlFlowGraph &graph, const CompilationConfig *cfg,
|
||||
const panda::ecmascript::MethodLiteral *methodLiteral,
|
||||
const JSPandaFile *jsPandaFile, const std::string &methodName,
|
||||
bool enableOptInlining, [[maybe_unused]] bool enableBranchProfiling)
|
||||
const FrameType frameType,
|
||||
bool enableOptInlining, [[maybe_unused]]bool enableBranchProfiling)
|
||||
{
|
||||
circuit->SetFrameType(FrameType::OPTIMIZED_JS_FUNCTION_FRAME);
|
||||
circuit->SetFrameType(frameType);
|
||||
CallSignature::CallConv conv;
|
||||
if (methodLiteral->IsFastCall()) {
|
||||
conv = CallSignature::CallConv::CCallConv;
|
||||
|
@ -43,7 +43,7 @@ public:
|
||||
const CompilationConfig *cfg) override;
|
||||
void GenerateCode(Circuit *circuit, const ControlFlowGraph &graph, const CompilationConfig *cfg,
|
||||
const MethodLiteral *methodLiteral, const JSPandaFile *jsPandaFile, const std::string &methodName,
|
||||
bool enableOptInlining, bool enableBranchProfiling) override;
|
||||
const FrameType frameType, bool enableOptInlining, bool enableBranchProfiling) override;
|
||||
|
||||
bool IsLogEnabled() const
|
||||
{
|
||||
|
@ -310,8 +310,8 @@ void LiteCGIRBuilder::GenPrologue(maple::litecg::Function &function)
|
||||
reservedSlotsSize = OptimizedJSFunctionFrame::ComputeReservedJSFuncOffset(slotSize_);
|
||||
lmirBuilder_->SetFuncFrameResverdSlot(reservedSlotsSize);
|
||||
if (circuit_->IsOsr()) {
|
||||
SaveFrameTypeOnFrame(methodLiteral_->IsFastCall() ? FrameType::OPTIMIZED_JS_FAST_CALL_FUNCTION_FRAME
|
||||
: frameType);
|
||||
SaveFrameTypeOnFrame(methodLiteral_->IsFastCall() ? FrameType::OPTIMIZED_JS_FAST_CALL_FUNCTION_FRAME :
|
||||
frameType);
|
||||
return;
|
||||
}
|
||||
auto ArgList = circuit_->GetArgRoot();
|
||||
@ -331,18 +331,80 @@ void LiteCGIRBuilder::GenPrologue(maple::litecg::Function &function)
|
||||
SaveFrameTypeOnFrame(frameType);
|
||||
}
|
||||
}
|
||||
} else if (frameType == FrameType::FASTJIT_FUNCTION_FRAME) {
|
||||
reservedSlotsSize = FASTJITFunctionFrame::ComputeReservedPcOffset(slotSize_);
|
||||
lmirBuilder_->SetFuncFrameResverdSlot(reservedSlotsSize);
|
||||
if (circuit_->IsOsr()) {
|
||||
SaveFrameTypeOnFrame(methodLiteral_->IsFastCall() ? FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME :
|
||||
frameType);
|
||||
return;
|
||||
}
|
||||
auto ArgList = circuit_->GetArgRoot();
|
||||
auto uses = acc_.Uses(ArgList);
|
||||
for (auto useIt = uses.begin(); useIt != uses.end(); ++useIt) {
|
||||
int argth = static_cast<int>(acc_.TryGetValue(*useIt));
|
||||
Var &value = lmirBuilder_->GetParam(function, argth);
|
||||
int funcIndex = 0;
|
||||
if (methodLiteral_->IsFastCall()) {
|
||||
frameType = FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME;
|
||||
funcIndex = static_cast<int>(FastCallArgIdx::FUNC);
|
||||
} else {
|
||||
funcIndex = static_cast<int>(CommonArgIdx::FUNC);
|
||||
}
|
||||
if (argth == funcIndex) {
|
||||
SaveByteCodePcOnOptJSFuncFrame(value);
|
||||
SaveJSFuncOnOptJSFuncFrame(value);
|
||||
SaveFrameTypeOnFrame(frameType);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOG_COMPILER(FATAL) << "frameType interpret type error !";
|
||||
ASSERT_PRINT(static_cast<uintptr_t>(frameType), "is not support !");
|
||||
}
|
||||
}
|
||||
|
||||
void LiteCGIRBuilder::SaveJSFuncOnOptJSFuncFrame(maple::litecg::Var &value)
|
||||
void LiteCGIRBuilder::SaveByteCodePcOnOptJSFuncFrame(maple::litecg::Var &value)
|
||||
{
|
||||
ASSERT(circuit_->GetFrameType() == FrameType::OPTIMIZED_JS_FUNCTION_FRAME);
|
||||
ASSERT(circuit_->GetFrameType() == FrameType::FASTJIT_FUNCTION_FRAME);
|
||||
// load method
|
||||
Expr func = lmirBuilder_->Cvt(lmirBuilder_->i64PtrType, slotType_, lmirBuilder_->GenExprFromVar(value));
|
||||
Expr offsetMethod = lmirBuilder_->ConstVal(
|
||||
lmirBuilder_->CreateIntConst(lmirBuilder_->i64PtrType, JSFunctionBase::METHOD_OFFSET));
|
||||
Expr addrMethod = lmirBuilder_->Add(lmirBuilder_->i64PtrType, func, offsetMethod);
|
||||
Expr method = lmirBuilder_->Iread(
|
||||
lmirBuilder_->i64PtrType, addrMethod, lmirBuilder_->CreatePtrType(lmirBuilder_->i64PtrType));
|
||||
// load byteCodePc
|
||||
Expr offsetByteCodePc = lmirBuilder_->ConstVal(
|
||||
lmirBuilder_->CreateIntConst(lmirBuilder_->i64PtrType, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET));
|
||||
Expr addrByteCodePc = lmirBuilder_->Add(lmirBuilder_->i64PtrType, method, offsetByteCodePc);
|
||||
Expr byteCodePc = lmirBuilder_->Iread(
|
||||
lmirBuilder_->i64PtrType, addrByteCodePc, lmirBuilder_->CreatePtrType(lmirBuilder_->i64PtrType));
|
||||
// push byteCodePc
|
||||
Expr fpAddr = CallingFp(false);
|
||||
Expr frameAddr = lmirBuilder_->Cvt(fpAddr.GetType(), lmirBuilder_->i64Type, fpAddr);
|
||||
size_t reservedOffset = OptimizedJSFunctionFrame::ComputeReservedJSFuncOffset(slotSize_);
|
||||
size_t reservedOffset = FASTJITFunctionFrame::ComputeReservedPcOffset(slotSize_);
|
||||
Expr frameByteCodePcSlotAddr =
|
||||
lmirBuilder_->Sub(frameAddr.GetType(), frameAddr,
|
||||
lmirBuilder_->ConstVal(lmirBuilder_->CreateIntConst(slotType_, reservedOffset)));
|
||||
Expr byteCodePcAddr =
|
||||
lmirBuilder_->Cvt(frameByteCodePcSlotAddr.GetType(),
|
||||
lmirBuilder_->CreatePtrType(slotType_), frameByteCodePcSlotAddr);
|
||||
auto &stmt = lmirBuilder_->Iassign(byteCodePc, byteCodePcAddr, byteCodePcAddr.GetType());
|
||||
lmirBuilder_->AppendStmt(GetFirstBB(), stmt);
|
||||
}
|
||||
|
||||
void LiteCGIRBuilder::SaveJSFuncOnOptJSFuncFrame(maple::litecg::Var &value)
|
||||
{
|
||||
ASSERT(IsOptimizedJSFunction());
|
||||
Expr fpAddr = CallingFp(false);
|
||||
Expr frameAddr = lmirBuilder_->Cvt(fpAddr.GetType(), lmirBuilder_->i64Type, fpAddr);
|
||||
size_t reservedOffset = 0;
|
||||
if (circuit_->GetFrameType() == FrameType::OPTIMIZED_JS_FUNCTION_FRAME) {
|
||||
reservedOffset = OptimizedJSFunctionFrame::ComputeReservedJSFuncOffset(slotSize_);
|
||||
} else {
|
||||
reservedOffset = FASTJITFunctionFrame::ComputeReservedJSFuncOffset(slotSize_);
|
||||
}
|
||||
|
||||
Expr frameJSFuncSlotAddr =
|
||||
lmirBuilder_->Sub(frameAddr.GetType(), frameAddr,
|
||||
lmirBuilder_->ConstVal(lmirBuilder_->CreateIntConst(slotType_, reservedOffset)));
|
||||
@ -961,7 +1023,8 @@ Expr LiteCGIRBuilder::GetFunction(BB &bb, Expr glue, const CallSignature *signat
|
||||
|
||||
bool LiteCGIRBuilder::IsOptimizedJSFunction() const
|
||||
{
|
||||
return circuit_->GetFrameType() == FrameType::OPTIMIZED_JS_FUNCTION_FRAME;
|
||||
return circuit_->GetFrameType() == FrameType::OPTIMIZED_JS_FUNCTION_FRAME ||
|
||||
circuit_->GetFrameType() == FrameType::FASTJIT_FUNCTION_FRAME;
|
||||
}
|
||||
|
||||
bool LiteCGIRBuilder::IsOptimized() const
|
||||
@ -2042,7 +2105,7 @@ void LiteCGIRBuilder::VisitInitVreg(GateRef gate)
|
||||
Expr addrFunc = lmirBuilder_->Add(i64Ptr, frame, offsetFunc);
|
||||
Expr ldrFunc = lmirBuilder_->Iread(i64Ref, addrFunc, lmirBuilder_->CreatePtrType(i64Ptr));
|
||||
lmirBuilder_->AppendStmt(bb, lmirBuilder_->Regassign(ldrFunc, vreg));
|
||||
if (circuit_->GetFrameType() == FrameType::OPTIMIZED_JS_FUNCTION_FRAME) {
|
||||
if (IsOptimizedJSFunction()) {
|
||||
// reset jsfunc on OptJSFuncFrame
|
||||
Expr fpAddr = CallingFp(false);
|
||||
Expr frameAddr = lmirBuilder_->Cvt(fpAddr.GetType(), lmirBuilder_->i64Type, fpAddr);
|
||||
|
@ -205,6 +205,7 @@ private:
|
||||
void CollectExraCallSiteInfo(std::unordered_map<int, maple::litecg::LiteCGValue> &deoptBundleInfo,
|
||||
maple::litecg::Expr pcOffset, GateRef frameArgs);
|
||||
void GenPrologue(maple::litecg::Function &function);
|
||||
void SaveByteCodePcOnOptJSFuncFrame(maple::litecg::Var &value);
|
||||
void SaveJSFuncOnOptJSFuncFrame(maple::litecg::Var &value);
|
||||
void SaveFrameTypeOnFrame(FrameType frameType);
|
||||
bool IsInterpreted() const;
|
||||
|
@ -62,7 +62,7 @@ GateRef CircuitBuilder::CallStub(GateRef glue, GateRef hirGate, int index, const
|
||||
auto label = GetCurrentLabel();
|
||||
auto depend = label->GetDepend();
|
||||
GateRef result;
|
||||
if (GetCircuit()->IsOptimizedJSFunctionFrame()) {
|
||||
if (GetCircuit()->IsOptimizedOrFastJit()) {
|
||||
ASSERT(hirGate != Circuit::NullGate());
|
||||
result = Call(cs, glue, target, depend, args, hirGate, comment);
|
||||
} else {
|
||||
@ -73,7 +73,7 @@ GateRef CircuitBuilder::CallStub(GateRef glue, GateRef hirGate, int index, const
|
||||
|
||||
GateRef CircuitBuilder::CallBuiltinRuntime(GateRef glue, GateRef depend, const std::vector<GateRef> &args, bool isNew)
|
||||
{
|
||||
ASSERT(!GetCircuit()->IsOptimizedJSFunctionFrame());
|
||||
ASSERT(!GetCircuit()->IsOptimizedOrFastJit());
|
||||
int index = 0;
|
||||
if (!isNew) {
|
||||
index = static_cast<int>(RTSTUB_ID(PushCallArgsAndDispatchNative));
|
||||
@ -94,7 +94,7 @@ GateRef CircuitBuilder::CallBuiltinRuntime(GateRef glue, GateRef depend, const s
|
||||
|
||||
GateRef CircuitBuilder::CallBuiltinRuntimeWithNewTarget(GateRef glue, GateRef depend, const std::vector<GateRef> &args)
|
||||
{
|
||||
ASSERT(!GetCircuit()->IsOptimizedJSFunctionFrame());
|
||||
ASSERT(!GetCircuit()->IsOptimizedOrFastJit());
|
||||
int index = 0;
|
||||
|
||||
index = static_cast<int>(RTSTUB_ID(PushNewTargetAndDispatchNative));
|
||||
@ -116,7 +116,7 @@ GateRef CircuitBuilder::Call(const CallSignature* cs, GateRef glue, GateRef targ
|
||||
std::vector<GateRef> inputs { depend, target, glue };
|
||||
inputs.insert(inputs.end(), args.begin(), args.end());
|
||||
auto numValuesIn = args.size() + 2; // 2: target & glue
|
||||
if (GetCircuit()->IsOptimizedJSFunctionFrame() && hirGate != Circuit::NullGate()) {
|
||||
if (GetCircuit()->IsOptimizedOrFastJit() && hirGate != Circuit::NullGate()) {
|
||||
AppendFrameArgs(inputs, hirGate);
|
||||
numValuesIn += 1;
|
||||
|
||||
@ -163,7 +163,7 @@ GateRef CircuitBuilder::Call(const CallSignature* cs, GateRef glue, GateRef targ
|
||||
GateRef CircuitBuilder::CallBCHandler(GateRef glue, GateRef target, const std::vector<GateRef> &args,
|
||||
const char* comment)
|
||||
{
|
||||
ASSERT(!GetCircuit()->IsOptimizedJSFunctionFrame());
|
||||
ASSERT(!GetCircuit()->IsOptimizedOrFastJit());
|
||||
const CallSignature *cs = BytecodeStubCSigns::BCHandler();
|
||||
ASSERT(cs->IsBCStub());
|
||||
auto label = GetCurrentLabel();
|
||||
@ -175,7 +175,7 @@ GateRef CircuitBuilder::CallBCHandler(GateRef glue, GateRef target, const std::v
|
||||
GateRef CircuitBuilder::CallBuiltin(GateRef glue, GateRef target, const std::vector<GateRef> &args,
|
||||
const char* comment)
|
||||
{
|
||||
ASSERT(!GetCircuit()->IsOptimizedJSFunctionFrame());
|
||||
ASSERT(!GetCircuit()->IsOptimizedOrFastJit());
|
||||
const CallSignature *cs = BuiltinsStubCSigns::BuiltinsCSign();
|
||||
ASSERT(cs->IsBuiltinsStub());
|
||||
auto label = GetCurrentLabel();
|
||||
@ -187,7 +187,7 @@ GateRef CircuitBuilder::CallBuiltin(GateRef glue, GateRef target, const std::vec
|
||||
GateRef CircuitBuilder::CallBuiltinWithArgv(GateRef glue, GateRef target, const std::vector<GateRef> &args,
|
||||
const char* comment)
|
||||
{
|
||||
ASSERT(!GetCircuit()->IsOptimizedJSFunctionFrame());
|
||||
ASSERT(!GetCircuit()->IsOptimizedOrFastJit());
|
||||
const CallSignature *cs = BuiltinsStubCSigns::BuiltinsWithArgvCSign();
|
||||
ASSERT(cs->IsBuiltinsWithArgvStub());
|
||||
auto label = GetCurrentLabel();
|
||||
@ -199,7 +199,7 @@ GateRef CircuitBuilder::CallBuiltinWithArgv(GateRef glue, GateRef target, const
|
||||
GateRef CircuitBuilder::CallBCDebugger(GateRef glue, GateRef target, const std::vector<GateRef> &args,
|
||||
const char* comment)
|
||||
{
|
||||
ASSERT(!GetCircuit()->IsOptimizedJSFunctionFrame());
|
||||
ASSERT(!GetCircuit()->IsOptimizedOrFastJit());
|
||||
const CallSignature *cs = BytecodeStubCSigns::BCDebuggerHandler();
|
||||
ASSERT(cs->IsBCDebuggerStub());
|
||||
auto label = GetCurrentLabel();
|
||||
@ -219,7 +219,7 @@ GateRef CircuitBuilder::CallRuntime(GateRef glue, int index, GateRef depend, con
|
||||
depend = label->GetDepend();
|
||||
}
|
||||
GateRef filteredHirGate = Circuit::NullGate();
|
||||
if (GetCircuit()->IsOptimizedJSFunctionFrame()) {
|
||||
if (GetCircuit()->IsOptimizedOrFastJit()) {
|
||||
ASSERT(hirGate != Circuit::NullGate());
|
||||
filteredHirGate = hirGate;
|
||||
}
|
||||
@ -229,7 +229,7 @@ GateRef CircuitBuilder::CallRuntime(GateRef glue, int index, GateRef depend, con
|
||||
|
||||
GateRef CircuitBuilder::CallRuntimeVarargs(GateRef glue, int index, GateRef argc, GateRef argv, const char* comment)
|
||||
{
|
||||
ASSERT(!GetCircuit()->IsOptimizedJSFunctionFrame());
|
||||
ASSERT(!GetCircuit()->IsOptimizedOrFastJit());
|
||||
const CallSignature *cs = RuntimeStubCSigns::Get(RTSTUB_ID(CallRuntimeWithArgv));
|
||||
GateRef target = IntPtr(index);
|
||||
auto label = GetCurrentLabel();
|
||||
@ -250,7 +250,7 @@ GateRef CircuitBuilder::CallNGCRuntime(GateRef glue, int index, GateRef depend,
|
||||
depend = label->GetDepend();
|
||||
}
|
||||
GateRef filteredHirGate = Circuit::NullGate();
|
||||
if (GetCircuit()->IsOptimizedJSFunctionFrame() && RuntimeStubCSigns::IsAsmStub(index)) {
|
||||
if (GetCircuit()->IsOptimizedOrFastJit() && RuntimeStubCSigns::IsAsmStub(index)) {
|
||||
ASSERT(hirGate != Circuit::NullGate());
|
||||
filteredHirGate = hirGate;
|
||||
}
|
||||
@ -305,7 +305,7 @@ GateRef CircuitBuilder::FastCallOptimized(GateRef glue, GateRef code, GateRef de
|
||||
depend = label->GetDepend();
|
||||
}
|
||||
GateRef filteredHirGate = Circuit::NullGate();
|
||||
if (GetCircuit()->IsOptimizedJSFunctionFrame()) {
|
||||
if (GetCircuit()->IsOptimizedOrFastJit()) {
|
||||
ASSERT(hirGate != Circuit::NullGate());
|
||||
filteredHirGate = hirGate;
|
||||
}
|
||||
@ -323,7 +323,7 @@ GateRef CircuitBuilder::CallOptimized(GateRef glue, GateRef code, GateRef depend
|
||||
depend = label->GetDepend();
|
||||
}
|
||||
GateRef filteredHirGate = Circuit::NullGate();
|
||||
if (GetCircuit()->IsOptimizedJSFunctionFrame()) {
|
||||
if (GetCircuit()->IsOptimizedOrFastJit()) {
|
||||
ASSERT(hirGate != Circuit::NullGate());
|
||||
filteredHirGate = hirGate;
|
||||
}
|
||||
|
@ -824,7 +824,8 @@ public:
|
||||
CreateCodeGen(module, enableLog);
|
||||
CodeGenerator codegen(cgImpl_, data->GetMethodName());
|
||||
codegen.Run(data->GetCircuit(), data->GetConstScheduleResult(), data->GetCompilerConfig(),
|
||||
data->GetMethodLiteral(), data->GetJSPandaFile(), enableOptInlining, enableOptBranchProfiling);
|
||||
data->GetMethodLiteral(), data->GetJSPandaFile(), data->GetCircuit()->GetFrameType(),
|
||||
enableOptInlining, enableOptBranchProfiling);
|
||||
return true;
|
||||
}
|
||||
private:
|
||||
|
@ -91,7 +91,6 @@ bool JitPassManager::Compile(JSHandle<ProfileTypeInfo> &profileTypeInfo,
|
||||
if (UNLIKELY(!hasTypes)) {
|
||||
LOG_COMPILER(INFO) << "record: " << recordName << " has no types";
|
||||
}
|
||||
|
||||
if (compilationEnv_->GetJSOptions().IsEnableJITPGO()) {
|
||||
Jit::JitLockHolder lock(compilationEnv_, "PGO ProfileBytecode");
|
||||
jitProfiler_ = compilationEnv_->GetPGOProfiler()->GetJITProfile();
|
||||
@ -103,8 +102,15 @@ bool JitPassManager::Compile(JSHandle<ProfileTypeInfo> &profileTypeInfo,
|
||||
} else {
|
||||
jitProfiler_ = nullptr;
|
||||
}
|
||||
circuit_ = new Circuit(compilationEnv_->GetNativeAreaAllocator(), ctx_->GetAOTModule()->GetDebugInfo(),
|
||||
fullName.c_str(), cmpCfg->Is64Bit(), FrameType::OPTIMIZED_JS_FUNCTION_FRAME);
|
||||
|
||||
if (compilationEnv_->GetJSOptions().IsEnableJitFrame()) {
|
||||
circuit_ = new Circuit(compilationEnv_->GetNativeAreaAllocator(), ctx_->GetAOTModule()->GetDebugInfo(),
|
||||
fullName.c_str(), cmpCfg->Is64Bit(), FrameType::FASTJIT_FUNCTION_FRAME);
|
||||
} else {
|
||||
circuit_ = new Circuit(compilationEnv_->GetNativeAreaAllocator(), ctx_->GetAOTModule()->GetDebugInfo(),
|
||||
fullName.c_str(), cmpCfg->Is64Bit(), FrameType::OPTIMIZED_JS_FUNCTION_FRAME);
|
||||
}
|
||||
|
||||
PGOProfilerDecoder *decoder = passOptions_->EnableOptPGOType() ? &profilerDecoder_ : nullptr;
|
||||
|
||||
builder_ = new BytecodeCircuitBuilder(jsPandaFile, methodLiteral, methodPCInfo,
|
||||
|
@ -47,7 +47,7 @@ using namespace panda::ecmascript;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#define ASM_ASSERT(messageId, condition) \
|
||||
if (!GetEnvironment()->GetCircuit()->IsOptimizedJSFunctionFrame()) { \
|
||||
if (!GetEnvironment()->GetCircuit()->IsOptimizedOrFastJit()) { \
|
||||
SUBENTRY(messageId, condition); \
|
||||
EXITENTRY(); \
|
||||
}
|
||||
@ -55,7 +55,7 @@ using namespace panda::ecmascript;
|
||||
SUBENTRY_WITH_GLUE(messageId, condition, glue)
|
||||
#elif defined(ENABLE_ASM_ASSERT)
|
||||
#define ASM_ASSERT(messageId, condition) \
|
||||
if (!GetEnvironment()->GetCircuit()->IsOptimizedJSFunctionFrame()) { \
|
||||
if (!GetEnvironment()->GetCircuit()->IsOptimizedOrFastJit()) { \
|
||||
SUBENTRY(messageId, condition); \
|
||||
EXITENTRY(); \
|
||||
}
|
||||
|
@ -256,6 +256,22 @@ void Deoptimizier::CollectVregs(const std::vector<kungfu::ARKDeopt>& deoptBundle
|
||||
// | . . . . | v
|
||||
// +----------------------------------+--------+
|
||||
|
||||
template<class T>
|
||||
void Deoptimizier::AssistCollectDeoptBundleVec(FrameIterator &it, T &frame)
|
||||
{
|
||||
CalleeRegAndOffsetVec calleeRegInfo;
|
||||
frame->GetFuncCalleeRegAndOffset(it, calleeRegInfo);
|
||||
context_.calleeRegAndOffset = calleeRegInfo;
|
||||
context_.callsiteSp = it.GetCallSiteSp();
|
||||
context_.callsiteFp = reinterpret_cast<uintptr_t>(it.GetSp());
|
||||
auto preFrameSp = frame->ComputePrevFrameSp(it);
|
||||
frameArgc_ = frame->GetArgc(preFrameSp);
|
||||
frameArgvs_ = frame->GetArgv(preFrameSp);
|
||||
stackContext_.callFrameTop_ = it.GetPrevFrameCallSiteSp();
|
||||
stackContext_.returnAddr_ = frame->GetReturnAddr();
|
||||
stackContext_.callerFp_ = reinterpret_cast<uintptr_t>(frame->GetPrevFrameFp());
|
||||
}
|
||||
|
||||
void Deoptimizier::CollectDeoptBundleVec(std::vector<ARKDeopt>& deoptBundle)
|
||||
{
|
||||
JSTaggedType *lastLeave = const_cast<JSTaggedType *>(thread_->GetLastLeaveFrame());
|
||||
@ -268,17 +284,14 @@ void Deoptimizier::CollectDeoptBundleVec(std::vector<ARKDeopt>& deoptBundle)
|
||||
case FrameType::OPTIMIZED_JS_FUNCTION_FRAME: {
|
||||
auto frame = it.GetFrame<OptimizedJSFunctionFrame>();
|
||||
frame->GetDeoptBundleInfo(it, deoptBundle);
|
||||
CalleeRegAndOffsetVec calleeRegInfo;
|
||||
frame->GetFuncCalleeRegAndOffset(it, calleeRegInfo);
|
||||
context_.calleeRegAndOffset = calleeRegInfo;
|
||||
context_.callsiteSp = it.GetCallSiteSp();
|
||||
context_.callsiteFp = reinterpret_cast<uintptr_t>(it.GetSp());
|
||||
auto preFrameSp = frame->ComputePrevFrameSp(it);
|
||||
frameArgc_ = frame->GetArgc(preFrameSp);
|
||||
frameArgvs_ = frame->GetArgv(preFrameSp);
|
||||
stackContext_.callFrameTop_ = it.GetPrevFrameCallSiteSp();
|
||||
stackContext_.returnAddr_ = frame->GetReturnAddr();
|
||||
stackContext_.callerFp_ = reinterpret_cast<uintptr_t>(frame->GetPrevFrameFp());
|
||||
AssistCollectDeoptBundleVec(it, frame);
|
||||
break;
|
||||
}
|
||||
case FrameType::FASTJIT_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME: {
|
||||
auto frame = it.GetFrame<FASTJITFunctionFrame>();
|
||||
frame->GetDeoptBundleInfo(it, deoptBundle);
|
||||
AssistCollectDeoptBundleVec(it, frame);
|
||||
break;
|
||||
}
|
||||
case FrameType::ASM_BRIDGE_FRAME: {
|
||||
|
@ -121,6 +121,8 @@ public:
|
||||
traceDeopt_ = options.GetTraceDeopt();
|
||||
}
|
||||
void CollectVregs(const std::vector<kungfu::ARKDeopt>& deoptBundle, size_t shift);
|
||||
template<class T>
|
||||
void AssistCollectDeoptBundleVec(FrameIterator &it, T &frame);
|
||||
void CollectDeoptBundleVec(std::vector<kungfu::ARKDeopt>& deoptBundle);
|
||||
JSTaggedType ConstructAsmInterpretFrame();
|
||||
void UpdateAndDumpDeoptInfo(kungfu::DeoptType type);
|
||||
|
@ -437,6 +437,9 @@ std::string SamplesRecord::AddRunningState(char *functionName, RunningState stat
|
||||
temp.append("(RUNTIME)");
|
||||
}
|
||||
break;
|
||||
case RunningState::JIT:
|
||||
temp.append("(JIT)");
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
@ -482,6 +485,10 @@ void SamplesRecord::StatisticStateTime(int timeDelta, RunningState state)
|
||||
profileInfo_->runtimeTime += static_cast<uint64_t>(timeDelta);
|
||||
return;
|
||||
}
|
||||
case RunningState::JIT: {
|
||||
profileInfo_->jitTime += static_cast<uint64_t>(timeDelta);
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
profileInfo_->otherTime += static_cast<uint64_t>(timeDelta);
|
||||
return;
|
||||
|
@ -73,6 +73,7 @@ struct ProfileInfo {
|
||||
uint64_t napiTime = 0;
|
||||
uint64_t arkuiEngineTime = 0;
|
||||
uint64_t runtimeTime = 0;
|
||||
uint64_t jitTime = 0;
|
||||
uint64_t otherTime = 0;
|
||||
};
|
||||
|
||||
|
@ -189,10 +189,11 @@ RunningState JsStackGetter::GetRunningState(const FrameIterator &it, const EcmaV
|
||||
return RunningState::NAPI;
|
||||
}
|
||||
if (isNative) {
|
||||
if (function->GetNativeFunctionExtraInfo().CheckIsJSNativePointer()) {
|
||||
return RunningState::ARKUI_ENGINE;
|
||||
}
|
||||
return RunningState::BUILTIN;
|
||||
return function->GetNativeFunctionExtraInfo().CheckIsJSNativePointer() ? RunningState::ARKUI_ENGINE :
|
||||
RunningState::BUILTIN;
|
||||
}
|
||||
if (it.IsFastJitFunctionFrame()) {
|
||||
return RunningState::JIT;
|
||||
}
|
||||
if (it.IsOptimizedJSFunctionFrame()) {
|
||||
return RunningState::AOT;
|
||||
@ -216,10 +217,8 @@ RunningState JsStackGetter::GetRunningState(const FrameIterator &it, const EcmaV
|
||||
return RunningState::NAPI;
|
||||
}
|
||||
if (isNative) {
|
||||
if (function->GetNativeFunctionExtraInfo().CheckIsJSNativePointer()) {
|
||||
return RunningState::ARKUI_ENGINE;
|
||||
}
|
||||
return RunningState::BUILTIN;
|
||||
return function->GetNativeFunctionExtraInfo().CheckIsJSNativePointer() ? RunningState::ARKUI_ENGINE :
|
||||
RunningState::BUILTIN;
|
||||
}
|
||||
|
||||
return RunningState::OTHER;
|
||||
|
@ -32,7 +32,8 @@ enum class RunningState : size_t {
|
||||
BUILTIN,
|
||||
NAPI,
|
||||
ARKUI_ENGINE,
|
||||
RUNTIME
|
||||
RUNTIME,
|
||||
JIT
|
||||
};
|
||||
|
||||
struct MethodKey {
|
||||
|
@ -36,8 +36,15 @@
|
||||
namespace panda::ecmascript {
|
||||
[[maybe_unused]] static bool g_needCheck = true;
|
||||
std::unordered_map<EntityId, std::string> JsStackInfo::nameMap;
|
||||
std::unordered_map<EntityId, std::vector<uint8>> JsStackInfo::machineCodeMap;
|
||||
|
||||
std::string JsStackInfo::BuildMethodTrace(Method *method, uint32_t pcOffset, bool enableStackSourceFile)
|
||||
bool IsFastJitFunctionFrame(const FrameType frameType)
|
||||
{
|
||||
return frameType == FrameType::FASTJIT_FUNCTION_FRAME || frameType == FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME;
|
||||
}
|
||||
|
||||
std::string JsStackInfo::BuildMethodTrace(Method *method, uint32_t pcOffset, const FrameType frameType,
|
||||
bool enableStackSourceFile)
|
||||
{
|
||||
std::string data;
|
||||
data.append(" at ");
|
||||
@ -45,6 +52,9 @@ std::string JsStackInfo::BuildMethodTrace(Method *method, uint32_t pcOffset, boo
|
||||
if (name.empty()) {
|
||||
name = "anonymous";
|
||||
}
|
||||
if (IsFastJitFunctionFrame(frameType)) {
|
||||
LOG_ECMA(ERROR) << "jit : js crash at method : " << name;
|
||||
}
|
||||
data += name;
|
||||
data.append(" (");
|
||||
// source file
|
||||
@ -82,7 +92,8 @@ std::string JsStackInfo::BuildMethodTrace(Method *method, uint32_t pcOffset, boo
|
||||
return data;
|
||||
}
|
||||
|
||||
std::string JsStackInfo::BuildInlinedMethodTrace(const JSPandaFile *pf, std::map<uint32_t, uint32_t> &methodOffsets)
|
||||
std::string JsStackInfo::BuildInlinedMethodTrace(const JSPandaFile *pf, std::map<uint32_t, uint32_t> &methodOffsets,
|
||||
const FrameType frameType)
|
||||
{
|
||||
std::string data;
|
||||
std::map<uint32_t, uint32_t>::reverse_iterator it;
|
||||
@ -98,6 +109,9 @@ std::string JsStackInfo::BuildInlinedMethodTrace(const JSPandaFile *pf, std::map
|
||||
}
|
||||
}
|
||||
data.append(" at ");
|
||||
if (IsFastJitFunctionFrame(frameType)) {
|
||||
LOG_ECMA(ERROR) << "jit : js crash at method : " << name;
|
||||
}
|
||||
data.append(name);
|
||||
data.append(" (maybe inlined).");
|
||||
data.append(" depth: ");
|
||||
@ -108,6 +122,34 @@ std::string JsStackInfo::BuildInlinedMethodTrace(const JSPandaFile *pf, std::map
|
||||
return data;
|
||||
}
|
||||
|
||||
void PrintJSCrashOffset(uintptr_t pc, JSFunction *func)
|
||||
{
|
||||
if (func->GetMachineCode() == JSTaggedValue::Undefined()) {
|
||||
return;
|
||||
}
|
||||
MachineCode *machineCode = MachineCode::Cast(func->GetMachineCode().GetTaggedObject());
|
||||
uintptr_t funcAddr = machineCode->GetFuncAddr();
|
||||
uintptr_t offsetAmount = pc - funcAddr;
|
||||
LOG_ECMA(ERROR) << "jit : Current pc is : " << pc << ". Current funcAddr is : " << funcAddr <<
|
||||
". Current crash offset is : " << offsetAmount;
|
||||
}
|
||||
|
||||
void DumpJitCode([[maybe_unused]] JSThread *thread)
|
||||
{
|
||||
JsJitDumpElf jitDumpElf;
|
||||
jitDumpElf.Init();
|
||||
std::string fileName = "jitCode-" + std::to_string(getpid());
|
||||
std::string realOutPath;
|
||||
std::string sanboxPath = panda::os::file::File::GetExtendedFilePath(ohos::AotCrashInfo::GetSandBoxPath());
|
||||
if (!ecmascript::RealPath(sanboxPath, realOutPath, false)) {
|
||||
return;
|
||||
}
|
||||
std::string outFile = realOutPath + "/" + fileName;
|
||||
int fd = open(outFile.c_str(), O_RDWR | O_CREAT | O_TRUNC, 0664);
|
||||
jitDumpElf.WriteJitElfFile(fd);
|
||||
close(fd);
|
||||
}
|
||||
|
||||
std::string JsStackInfo::BuildJsStackTrace(JSThread *thread, bool needNative)
|
||||
{
|
||||
std::string data;
|
||||
@ -125,8 +167,16 @@ std::string JsStackInfo::BuildJsStackTrace(JSThread *thread, bool needNative)
|
||||
auto pcOffset = it.GetBytecodeOffset();
|
||||
const JSPandaFile *pf = method->GetJSPandaFile();
|
||||
std::map<uint32_t, uint32_t> methodOffsets = it.GetInlinedMethodInfo();
|
||||
data += BuildInlinedMethodTrace(pf, methodOffsets);
|
||||
data += BuildMethodTrace(method, pcOffset, thread->GetEnableStackSourceFile());
|
||||
FrameType frameType = it.GetFrameType();
|
||||
if (IsFastJitFunctionFrame(frameType)) {
|
||||
JSFunction *func = static_cast<JSFunction*>(it.GetFunction().GetTaggedObject());
|
||||
auto frame = it.GetFrame<FASTJITFunctionFrame>();
|
||||
uintptr_t pc = frame->GetReturnAddr();
|
||||
PrintJSCrashOffset(pc, func);
|
||||
DumpJitCode(thread);
|
||||
}
|
||||
data += BuildInlinedMethodTrace(pf, methodOffsets, frameType);
|
||||
data += BuildMethodTrace(method, pcOffset, frameType, thread->GetEnableStackSourceFile());
|
||||
} else if (needNative) {
|
||||
auto addr = method->GetNativePointer();
|
||||
std::stringstream strm;
|
||||
@ -403,6 +453,11 @@ bool GetTypeOffsetAndPrevOffsetFromFrameType(uintptr_t frameType, uintptr_t &typ
|
||||
typeOffset = AsmInterpretedBridgeFrame::GetTypeOffset();
|
||||
prevOffset = AsmInterpretedBridgeFrame::GetPrevOffset();
|
||||
break;
|
||||
case FrameType::FASTJIT_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME:
|
||||
typeOffset = FASTJITFunctionFrame::GetTypeOffset();
|
||||
prevOffset = FASTJITFunctionFrame::GetPrevOffset();
|
||||
break;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
@ -422,7 +477,9 @@ bool IsFunctionFrame(uintptr_t frameType)
|
||||
static_cast<FrameType>(frameType) == FrameType::INTERPRETER_FRAME ||
|
||||
static_cast<FrameType>(frameType) == FrameType::INTERPRETER_FAST_NEW_FRAME ||
|
||||
static_cast<FrameType>(frameType) == FrameType::OPTIMIZED_JS_FUNCTION_FRAME ||
|
||||
static_cast<FrameType>(frameType) == FrameType::OPTIMIZED_JS_FAST_CALL_FUNCTION_FRAME;
|
||||
static_cast<FrameType>(frameType) == FrameType::OPTIMIZED_JS_FAST_CALL_FUNCTION_FRAME ||
|
||||
static_cast<FrameType>(frameType) == FrameType::FASTJIT_FUNCTION_FRAME ||
|
||||
static_cast<FrameType>(frameType) == FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME;
|
||||
}
|
||||
|
||||
std::optional<MethodInfo> JSStackTrace::ReadMethodInfo(panda_file::MethodDataAccessor &mda)
|
||||
@ -627,6 +684,12 @@ uintptr_t GetBytecodeOffset(void *ctx, ReadMemFunc readMem, uintptr_t frameType,
|
||||
readMem(ctx, currentPtr, &bytecodePc);
|
||||
return bytecodePc;
|
||||
}
|
||||
case FrameType::FASTJIT_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME: {
|
||||
currentPtr -= FASTJITFunctionFrame::GetTypeOffset();
|
||||
readMem(ctx, currentPtr, &bytecodePc);
|
||||
return bytecodePc;
|
||||
}
|
||||
default: {
|
||||
break;
|
||||
}
|
||||
@ -657,6 +720,12 @@ uintptr_t ArkGetFunction(void *ctx, ReadMemFunc readMem, uintptr_t currentPtr, u
|
||||
funcAddr += InterpretedFrame::GetFunctionOffset();
|
||||
break;
|
||||
}
|
||||
case FrameType::FASTJIT_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME: {
|
||||
funcAddr -= FASTJITFunctionFrame::GetTypeOffset();
|
||||
funcAddr += FASTJITFunctionFrame::GetFunctionOffset();
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
return 0;
|
||||
}
|
||||
@ -781,51 +850,101 @@ bool ArkGetNextFrame(void *ctx, ReadMemFunc readMem, uintptr_t ¤tPtr,
|
||||
return ArkGetNextFrame(ctx, readMem, currentPtr, frameType, pc, methodId);
|
||||
}
|
||||
|
||||
bool ArkWriteJitCode(void *ctx, ReadMemFunc readMem, int fd, const uintptr_t *const jitCodeArray,
|
||||
const size_t jitSize)
|
||||
bool ArkGetMethodIdWithJit(ArkUnwindParam *arkUnwindParam, uintptr_t frameType, uintptr_t currentPtr)
|
||||
{
|
||||
uintptr_t function = ArkGetFunction(arkUnwindParam->ctx, arkUnwindParam->readMem, currentPtr, frameType);
|
||||
if (!function) {
|
||||
LOG_ECMA(DEBUG) << "Failed to get function";
|
||||
return false;
|
||||
}
|
||||
|
||||
uintptr_t method = ArkCheckAndGetMethod(arkUnwindParam->ctx, arkUnwindParam->readMem, function);
|
||||
if (!method) {
|
||||
LOG_ECMA(DEBUG) << std::hex << "Failed to get method: " << function;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!ArkGetMethodIdFromMethod(arkUnwindParam->ctx, arkUnwindParam->readMem, method, *arkUnwindParam->methodId)) {
|
||||
LOG_ECMA(DEBUG) << std::hex << "ArkGetJsFrameDebugInfo failed, method: " << method;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (IsFastJitFunctionFrame(static_cast<FrameType>(frameType))) {
|
||||
uintptr_t machineCode = 0;
|
||||
uintptr_t functionAddr = function + JSFunction::MACHINECODE_OFFSET;
|
||||
arkUnwindParam->readMem(arkUnwindParam->ctx, functionAddr, &machineCode);
|
||||
uintptr_t size = 0;
|
||||
uintptr_t funcAddr = 0;
|
||||
if (machineCode) {
|
||||
arkUnwindParam->readMem(arkUnwindParam->ctx, machineCode + MachineCode::INSTRSIZ_OFFSET, &size);
|
||||
arkUnwindParam->readMem(arkUnwindParam->ctx, machineCode + MachineCode::FUNCADDR_OFFSET, &funcAddr);
|
||||
}
|
||||
if (size && funcAddr) {
|
||||
// take the lower four bytes
|
||||
size &= 0xFFFFFFFF;
|
||||
std::vector<uint8> codeVec;
|
||||
for (size_t l = 0; l < size; l++) {
|
||||
uintptr_t tmp = 0;
|
||||
arkUnwindParam->readMem(arkUnwindParam->ctx, funcAddr + l, &tmp);
|
||||
codeVec.push_back(tmp);
|
||||
}
|
||||
arkUnwindParam->jitCache.push_back(*arkUnwindParam->methodId);
|
||||
JsStackInfo::machineCodeMap[EntityId(*arkUnwindParam->methodId)] = codeVec;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ArkGetNextFrameWithJit(ArkUnwindParam *arkUnwindParam, uintptr_t ¤tPtr, uintptr_t &frameType)
|
||||
{
|
||||
currentPtr -= sizeof(FrameType);
|
||||
if (!arkUnwindParam->readMem(arkUnwindParam->ctx, currentPtr, &frameType)) {
|
||||
return false;
|
||||
}
|
||||
if (ArkFrameCheck(frameType)) {
|
||||
return true;
|
||||
}
|
||||
bool ret = false;
|
||||
if (IsFunctionFrame(frameType)) {
|
||||
*arkUnwindParam->pc = GetBytecodeOffset(arkUnwindParam->ctx, arkUnwindParam->readMem, frameType, currentPtr);
|
||||
ret = true;
|
||||
if (arkUnwindParam->methodId != nullptr) {
|
||||
ret = ArkGetMethodIdWithJit(arkUnwindParam, frameType, currentPtr);
|
||||
}
|
||||
}
|
||||
|
||||
uintptr_t typeOffset = 0;
|
||||
uintptr_t prevOffset = 0;
|
||||
if (!GetTypeOffsetAndPrevOffsetFromFrameType(frameType, typeOffset, prevOffset)) {
|
||||
return false;
|
||||
}
|
||||
currentPtr -= typeOffset;
|
||||
currentPtr += prevOffset;
|
||||
if (!arkUnwindParam->readMem(arkUnwindParam->ctx, currentPtr, ¤tPtr)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (ret) {
|
||||
return true;
|
||||
}
|
||||
return ArkGetNextFrameWithJit(arkUnwindParam, currentPtr, frameType);
|
||||
}
|
||||
|
||||
bool ArkWriteJitCode([[maybe_unused]] void *ctx, [[maybe_unused]] ReadMemFunc readMem,
|
||||
int fd, const uintptr_t *const jitCodeArray, const size_t jitSize)
|
||||
{
|
||||
JsJitDumpElf jitDumpElf;
|
||||
jitDumpElf.Init();
|
||||
int64 idx = 0;
|
||||
if (jitSize == 0) {
|
||||
for (size_t i = 0; i < jitSize; i++) {
|
||||
uintptr_t functionAddr = jitCodeArray[i];
|
||||
uintptr_t machineCode = 0;
|
||||
readMem(ctx, functionAddr + JSFunction::MACHINECODE_OFFSET, &machineCode);
|
||||
if (!machineCode) {
|
||||
continue;
|
||||
}
|
||||
uintptr_t method = ArkCheckAndGetMethod(ctx, readMem, functionAddr);
|
||||
if (!method) {
|
||||
LOG_ECMA(DEBUG) << std::hex << "Failed to get method: " << functionAddr;
|
||||
continue;
|
||||
}
|
||||
uintptr_t methodId = 0;
|
||||
if (!ArkGetMethodIdFromMethod(ctx, readMem, method, methodId)) {
|
||||
LOG_ECMA(DEBUG) << std::hex << "ArkGetJsFrameDebugInfo failed, method: " << method;
|
||||
continue;
|
||||
}
|
||||
std::string name = JsStackInfo::nameMap[EntityId(methodId)];
|
||||
uintptr_t size = 0;
|
||||
readMem(ctx, machineCode + MachineCode::INSTRSIZ_OFFSET, &size);
|
||||
if (!size) {
|
||||
continue;
|
||||
}
|
||||
size_t len = *reinterpret_cast<size_t*>(size);
|
||||
uintptr_t funcAddr = 0;
|
||||
readMem(ctx, machineCode + MachineCode::FUNCADDR_OFFSET, &funcAddr);
|
||||
if (!funcAddr) {
|
||||
continue;
|
||||
}
|
||||
std::vector<uint8> codeVec;
|
||||
for (size_t l = 0; l < size; l++) {
|
||||
uintptr_t tmp = 0;
|
||||
readMem(ctx, funcAddr + l, &tmp);
|
||||
codeVec.push_back(*reinterpret_cast<uint8*>(tmp));
|
||||
}
|
||||
jitDumpElf.AppendData(codeVec);
|
||||
jitDumpElf.AppendSymbolToSymTab(idx++, 0, len, name);
|
||||
}
|
||||
size_t offset = 0;
|
||||
for (size_t i = 0; i < jitSize; i++) {
|
||||
uintptr_t methodId = jitCodeArray[i];
|
||||
std::vector<uint8> codeVec = JsStackInfo::machineCodeMap[EntityId(methodId)];
|
||||
std::string name = JsStackInfo::nameMap[EntityId(methodId)];
|
||||
size_t len = codeVec.size();
|
||||
jitDumpElf.AppendData(codeVec);
|
||||
jitDumpElf.AppendSymbolToSymTab(idx++, offset, len, name);
|
||||
offset += len;
|
||||
}
|
||||
jitDumpElf.WriteJitElfFile(fd);
|
||||
JsStackInfo::nameMap.clear();
|
||||
@ -842,24 +961,7 @@ bool StepArkWithRecordJit(ArkUnwindParam *arkUnwindParam)
|
||||
}
|
||||
|
||||
uintptr_t frameType = 0;
|
||||
uintptr_t frameTypeTmp = 0;
|
||||
uintptr_t tmpPtr = currentPtr;
|
||||
tmpPtr -= sizeof(FrameType);
|
||||
if (arkUnwindParam->readMem(arkUnwindParam->ctx, tmpPtr, &frameTypeTmp)) {
|
||||
if (IsFunctionFrame(frameTypeTmp)) {
|
||||
if (static_cast<FrameType>(frameTypeTmp) == FrameType::OPTIMIZED_JS_FAST_CALL_FUNCTION_FRAME ||
|
||||
static_cast<FrameType>(frameTypeTmp) == FrameType::OPTIMIZED_JS_FUNCTION_FRAME) {
|
||||
uintptr_t function = 0;
|
||||
uintptr_t funcAddr = tmpPtr;
|
||||
funcAddr -= OptimizedJSFunctionFrame::GetTypeOffset();
|
||||
funcAddr += OptimizedJSFunctionFrame::GetFunctionOffset();
|
||||
arkUnwindParam->readMem(arkUnwindParam->ctx, funcAddr, &function);
|
||||
arkUnwindParam->jitCache.push_back(function);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (ArkGetNextFrame(arkUnwindParam->ctx, arkUnwindParam->readMem, currentPtr, frameType, *arkUnwindParam->pc,
|
||||
arkUnwindParam->methodId)) {
|
||||
if (ArkGetNextFrameWithJit(arkUnwindParam, currentPtr, frameType)) {
|
||||
if (ArkFrameCheck(frameType)) {
|
||||
currentPtr += sizeof(FrameType);
|
||||
*arkUnwindParam->sp = currentPtr;
|
||||
@ -965,6 +1067,12 @@ uintptr_t ArkGetFunction(int pid, uintptr_t currentPtr, uintptr_t frameType)
|
||||
funcAddr += OptimizedBuiltinLeaveFrame::GetFunctionOffset();
|
||||
break;
|
||||
}
|
||||
case FrameType::FASTJIT_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME: {
|
||||
funcAddr -= FASTJITFunctionFrame::GetTypeOffset();
|
||||
funcAddr += FASTJITFunctionFrame::GetFunctionOffset();
|
||||
break;
|
||||
}
|
||||
case FrameType::BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME :
|
||||
case FrameType::OPTIMIZED_FRAME:
|
||||
case FrameType::OPTIMIZED_ENTRY_FRAME:
|
||||
@ -1097,7 +1205,9 @@ uint32_t ArkGetBytecodeOffset(int pid, uintptr_t method, uintptr_t frameType, ui
|
||||
}
|
||||
// aot need stackmaps
|
||||
case FrameType::OPTIMIZED_JS_FAST_CALL_FUNCTION_FRAME:
|
||||
case FrameType::OPTIMIZED_JS_FUNCTION_FRAME: {
|
||||
case FrameType::OPTIMIZED_JS_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME: {
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
@ -1585,7 +1695,7 @@ bool GetArkJSHeapCrashInfo(int pid, uintptr_t *bytecodePc, uintptr_t *fp, bool o
|
||||
Method *method = ECMAObject::Cast(functionValue.GetTaggedObject())->GetCallTarget();
|
||||
auto bytecodeOffset = static_cast<uint32_t>(reinterpret_cast<uint8_t *>(*bytecodePc) -
|
||||
method->GetBytecodeArray());
|
||||
std::string info = JsStackInfo::BuildMethodTrace(method, bytecodeOffset);
|
||||
std::string info = JsStackInfo::BuildMethodTrace(method, bytecodeOffset, static_cast<FrameType>(frameType));
|
||||
const char *infoChar = info.c_str();
|
||||
if (strIndex < strLen - 1) { // 1: last '\0'
|
||||
outStr[strIndex++] = ' ';
|
||||
|
@ -155,14 +155,17 @@ private:
|
||||
|
||||
class JsStackInfo {
|
||||
public:
|
||||
static std::string BuildInlinedMethodTrace(const JSPandaFile *pf, std::map<uint32_t, uint32_t> &methodOffsets);
|
||||
static std::string BuildInlinedMethodTrace(const JSPandaFile *pf, std::map<uint32_t, uint32_t> &methodOffsets,
|
||||
const FrameType frameType);
|
||||
static std::string BuildJsStackTrace(JSThread *thread, bool needNative);
|
||||
static std::vector<JsFrameInfo> BuildJsStackInfo(JSThread *thread, bool currentStack = false);
|
||||
static std::string BuildMethodTrace(Method *method, uint32_t pcOffset, bool enableStackSourceFile = true);
|
||||
static std::string BuildMethodTrace(Method *method, uint32_t pcOffset, const FrameType frameType,
|
||||
bool enableStackSourceFile = true);
|
||||
static AOTFileManager *loader;
|
||||
static JSRuntimeOptions *options;
|
||||
static void BuildCrashInfo(bool isJsCrash, uintptr_t pc = 0);
|
||||
static std::unordered_map<EntityId, std::string> nameMap;
|
||||
static std::unordered_map<EntityId, std::vector<uint8>> machineCodeMap;
|
||||
};
|
||||
void CrashCallback(char *buf, size_t len, void *ucontext);
|
||||
} // namespace panda::ecmascript
|
||||
|
@ -241,7 +241,10 @@ void EcmaVM::PostFork()
|
||||
}
|
||||
ResetPGOProfiler();
|
||||
|
||||
bool jitEscapeDisable = ohos::JitTools::GetJitEscapeEanble();
|
||||
bool enableJitFrame = ohos::JitTools::GetJitFrameEnable();
|
||||
options_.SetEnableJitFrame(enableJitFrame);
|
||||
|
||||
bool jitEscapeDisable = ohos::JitTools::GetJitEscapeDisable();
|
||||
if (jitEscapeDisable || !JSNApi::IsJitEscape()) {
|
||||
if (ohos::EnableAotListHelper::GetJitInstance()->IsEnableJit(bundleName)) {
|
||||
bool isEnableFastJit = options_.IsEnableJIT() && options_.GetEnableAsmInterpreter();
|
||||
|
@ -89,6 +89,11 @@ JSTaggedValue FrameIterator::GetFunction() const
|
||||
auto *frame = OptimizedBuiltinLeaveFrame::GetFrameFromSp(GetSp());
|
||||
return JSTaggedValue(*(frame->GetArgv()));
|
||||
}
|
||||
case FrameType::FASTJIT_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME: {
|
||||
auto frame = GetFrame<FASTJITFunctionFrame>();
|
||||
return frame->GetFunction();
|
||||
}
|
||||
case FrameType::BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME :
|
||||
case FrameType::OPTIMIZED_FRAME:
|
||||
case FrameType::OPTIMIZED_ENTRY_FRAME:
|
||||
@ -130,6 +135,22 @@ AOTFileInfo::CallSiteInfo FrameIterator::TryCalCallSiteInfoFromMachineCode(uintp
|
||||
MachineCode::Cast(machineCode.GetTaggedObject())->IsInText(retAddr)) {
|
||||
return MachineCode::Cast(machineCode.GetTaggedObject())->CalCallSiteInfo(retAddr);
|
||||
}
|
||||
} else if (type == FrameType::FASTJIT_FUNCTION_FRAME ||
|
||||
type == FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME) {
|
||||
auto frame = GetFrame<FASTJITFunctionFrame>();
|
||||
JSTaggedValue func = frame->GetFunction();
|
||||
if (!func.IsHeapObject()) {
|
||||
return {};
|
||||
}
|
||||
// cast to jsfunction directly. JSFunction::Cast may fail,
|
||||
// as jsfunction class may set forwardingAddress in Evacuate, but forwarding obj not init.
|
||||
JSFunction *jsfunc = reinterpret_cast<JSFunction*>(func.GetTaggedObject());
|
||||
// machineCode non move
|
||||
JSTaggedValue machineCode = jsfunc->GetMachineCode();
|
||||
if (machineCode.IsMachineCodeObject() &&
|
||||
MachineCode::Cast(machineCode.GetTaggedObject())->IsInText(retAddr)) {
|
||||
return MachineCode::Cast(machineCode.GetTaggedObject())->CalCallSiteInfo(retAddr);
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
@ -348,6 +369,17 @@ void FrameIterator::Advance()
|
||||
current_ = frame->GetPrevFrameFp();
|
||||
break;
|
||||
}
|
||||
case FrameType::FASTJIT_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME: {
|
||||
auto frame = GetFrame<FASTJITFunctionFrame>();
|
||||
if constexpr (GCVisit == GCVisitedFlag::VISITED || GCVisit == GCVisitedFlag::HYBRID_STACK) {
|
||||
optimizedCallSiteSp_ = GetPrevFrameCallSiteSp();
|
||||
optimizedReturnAddr_ = frame->GetReturnAddr();
|
||||
needCalCallSiteInfo = true;
|
||||
}
|
||||
current_ = frame->GetPrevFrameFp();
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
if (GCVisit == GCVisitedFlag::HYBRID_STACK) {
|
||||
current_ = nullptr;
|
||||
@ -408,7 +440,9 @@ uintptr_t FrameIterator::GetPrevFrameCallSiteSp() const
|
||||
case FrameType::OPTIMIZED_FRAME:
|
||||
case FrameType::BASELINE_BUILTIN_FRAME:
|
||||
case FrameType::OPTIMIZED_JS_FAST_CALL_FUNCTION_FRAME:
|
||||
case FrameType::OPTIMIZED_JS_FUNCTION_FRAME: {
|
||||
case FrameType::OPTIMIZED_JS_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME: {
|
||||
ASSERT(thread_ != nullptr);
|
||||
auto callSiteSp = reinterpret_cast<uintptr_t>(current_) + fpDeltaPrevFrameSp_;
|
||||
return callSiteSp;
|
||||
@ -453,6 +487,11 @@ std::map<uint32_t, uint32_t> FrameIterator::GetInlinedMethodInfo()
|
||||
CollectMethodOffsetInfo(inlineMethodInfos);
|
||||
break;
|
||||
}
|
||||
case FrameType::FASTJIT_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME: {
|
||||
CollectMethodOffsetInfo(inlineMethodInfos);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
break;
|
||||
}
|
||||
@ -488,6 +527,16 @@ uint32_t FrameIterator::GetBytecodeOffset() const
|
||||
}
|
||||
[[fallthrough]];
|
||||
}
|
||||
case FrameType::FASTJIT_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME: {
|
||||
auto frame = this->GetFrame<FASTJITFunctionFrame>();
|
||||
ConstInfo constInfo;
|
||||
frame->CollectPcOffsetInfo(*this, constInfo);
|
||||
if (!constInfo.empty()) {
|
||||
return constInfo[0];
|
||||
}
|
||||
[[fallthrough]];
|
||||
}
|
||||
default: {
|
||||
break;
|
||||
}
|
||||
@ -590,7 +639,6 @@ ARK_INLINE uintptr_t* OptimizedJSFunctionFrame::ComputePrevFrameSp(const FrameIt
|
||||
return preFrameSp;
|
||||
}
|
||||
|
||||
|
||||
void OptimizedJSFunctionFrame::CollectPcOffsetInfo(const FrameIterator &it, ConstInfo &info) const
|
||||
{
|
||||
it.CollectPcOffsetInfo(info);
|
||||
@ -635,6 +683,65 @@ void OptimizedJSFunctionFrame::GetFuncCalleeRegAndOffset(
|
||||
it.GetCalleeRegAndOffsetVec(ret);
|
||||
}
|
||||
|
||||
ARK_INLINE JSTaggedType* FASTJITFunctionFrame::GetArgv(const FrameIterator &it) const
|
||||
{
|
||||
uintptr_t *preFrameSp = ComputePrevFrameSp(it);
|
||||
return GetArgv(preFrameSp);
|
||||
}
|
||||
|
||||
ARK_INLINE uintptr_t* FASTJITFunctionFrame::ComputePrevFrameSp(const FrameIterator &it) const
|
||||
{
|
||||
const JSTaggedType *sp = it.GetSp();
|
||||
int delta = it.ComputeDelta();
|
||||
ASSERT((delta > 0) && (delta % sizeof(uintptr_t) == 0));
|
||||
uintptr_t *preFrameSp = reinterpret_cast<uintptr_t *>(const_cast<JSTaggedType *>(sp)) + delta / sizeof(uintptr_t);
|
||||
return preFrameSp;
|
||||
}
|
||||
|
||||
void FASTJITFunctionFrame::CollectPcOffsetInfo(const FrameIterator &it, ConstInfo &info) const
|
||||
{
|
||||
it.CollectPcOffsetInfo(info);
|
||||
}
|
||||
|
||||
ARK_INLINE void FASTJITFunctionFrame::GCIterate(const FrameIterator &it,
|
||||
const RootVisitor &visitor,
|
||||
[[maybe_unused]] const RootRangeVisitor &rangeVisitor,
|
||||
const RootBaseAndDerivedVisitor &derivedVisitor, FrameType frameType) const
|
||||
{
|
||||
FASTJITFunctionFrame *frame = FASTJITFunctionFrame::GetFrameFromSp(it.GetSp());
|
||||
uintptr_t *jsFuncPtr = reinterpret_cast<uintptr_t *>(frame);
|
||||
uintptr_t jsFuncSlot = ToUintPtr(jsFuncPtr);
|
||||
visitor(Root::ROOT_FRAME, ObjectSlot(jsFuncSlot));
|
||||
if (frameType == FrameType::FASTJIT_FUNCTION_FRAME) {
|
||||
uintptr_t *preFrameSp = frame->ComputePrevFrameSp(it);
|
||||
auto argc = frame->GetArgc(preFrameSp);
|
||||
JSTaggedType *argv = frame->GetArgv(reinterpret_cast<uintptr_t *>(preFrameSp));
|
||||
if (argc > 0) {
|
||||
uintptr_t start = ToUintPtr(argv); // argv
|
||||
uintptr_t end = ToUintPtr(argv + argc);
|
||||
rangeVisitor(Root::ROOT_FRAME, ObjectSlot(start), ObjectSlot(end));
|
||||
}
|
||||
}
|
||||
|
||||
bool ret = it.IteratorStackMap(visitor, derivedVisitor);
|
||||
if (!ret) {
|
||||
#ifndef NDEBUG
|
||||
LOG_ECMA(DEBUG) << " stackmap don't found returnAddr " << it.GetOptimizedReturnAddr();
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
void FASTJITFunctionFrame::GetDeoptBundleInfo(const FrameIterator &it, std::vector<kungfu::ARKDeopt>& deopts) const
|
||||
{
|
||||
it.CollectArkDeopt(deopts);
|
||||
}
|
||||
|
||||
void FASTJITFunctionFrame::GetFuncCalleeRegAndOffset(
|
||||
const FrameIterator &it, kungfu::CalleeRegAndOffsetVec &ret) const
|
||||
{
|
||||
it.GetCalleeRegAndOffsetVec(ret);
|
||||
}
|
||||
|
||||
ARK_INLINE void AsmInterpretedFrame::GCIterate(const FrameIterator &it,
|
||||
const RootVisitor &visitor,
|
||||
const RootRangeVisitor &rangeVisitor,
|
||||
|
@ -134,6 +134,8 @@ enum class FrameType: uintptr_t {
|
||||
OPTIMIZED_JS_FUNCTION_UNFOLD_ARGV_FRAME,
|
||||
BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME,
|
||||
BASELINE_BUILTIN_FRAME,
|
||||
FASTJIT_FUNCTION_FRAME,
|
||||
FASTJIT_FAST_CALL_FUNCTION_FRAME,
|
||||
|
||||
FRAME_TYPE_FIRST = OPTIMIZED_FRAME,
|
||||
FRAME_TYPE_LAST = OPTIMIZED_JS_FUNCTION_UNFOLD_ARGV_FRAME,
|
||||
@ -1689,6 +1691,162 @@ struct BuiltinWithArgvFrame : public base::AlignedStruct<base::AlignedPointer::S
|
||||
alignas(EAS) uintptr_t returnAddr;
|
||||
};
|
||||
|
||||
// * FASTJITFunctionFrame layout description as the following:
|
||||
// +--------------------------+
|
||||
// | arg[N-1] |
|
||||
// +--------------------------+
|
||||
// | ... |
|
||||
// +--------------------------+
|
||||
// | arg[1] |
|
||||
// +--------------------------+
|
||||
// | arg[0] |
|
||||
// +--------------------------+
|
||||
// | this |
|
||||
// +--------------------------+
|
||||
// | new-target |
|
||||
// +--------------------------+
|
||||
// | call-target |
|
||||
// |--------------------------|
|
||||
// | argc |
|
||||
// sp ----> |--------------------------| ---------------
|
||||
// | returnAddr | ^
|
||||
// |--------------------------| |
|
||||
// | callsiteFp | |
|
||||
// |--------------------------| |
|
||||
// | frameType | FASTJITFunctionFrame
|
||||
// |--------------------------| |
|
||||
// | call-target | |
|
||||
// |--------------------------| |
|
||||
// | pc(bytecode pc) | v
|
||||
// +--------------------------+ ---------------
|
||||
//
|
||||
// NOLINTNEXTLINE(cppcoreguidelines-pro-type-member-init)
|
||||
struct FASTJITFunctionFrame : public base::AlignedStruct<JSTaggedValue::TaggedTypeSize(),
|
||||
JSTaggedValue,
|
||||
JSTaggedValue,
|
||||
base::AlignedPointer,
|
||||
base::AlignedPointer,
|
||||
base::AlignedPointer> {
|
||||
public:
|
||||
using ConstInfo = kungfu::LLVMStackMapType::ConstInfo;
|
||||
enum class Index : size_t {
|
||||
PcIndex = 0,
|
||||
JSFuncIndex,
|
||||
TypeIndex,
|
||||
PrevFpIndex,
|
||||
ReturnAddrIndex,
|
||||
NumOfMembers
|
||||
};
|
||||
static_assert(static_cast<size_t>(Index::NumOfMembers) == NumOfTypes);
|
||||
|
||||
static constexpr size_t GetFunctionDeltaReturnAddr()
|
||||
{
|
||||
return static_cast<size_t>(Index::ReturnAddrIndex) - static_cast<size_t>(Index::JSFuncIndex);
|
||||
}
|
||||
|
||||
inline JSTaggedType* GetPrevFrameFp()
|
||||
{
|
||||
return prevFp;
|
||||
}
|
||||
|
||||
JSTaggedType* GetArgv(uintptr_t *preFrameSp) const
|
||||
{
|
||||
const size_t offset = 2; // 2: skip argc and argv.
|
||||
return reinterpret_cast<JSTaggedType *>(preFrameSp + offset * sizeof(uint64_t) / sizeof(uintptr_t));
|
||||
}
|
||||
|
||||
size_t GetArgc(uintptr_t *preFrameSp) const
|
||||
{
|
||||
return *preFrameSp;
|
||||
}
|
||||
|
||||
JSTaggedType* GetArgv(const FrameIterator &it) const;
|
||||
|
||||
uintptr_t GetReturnAddr() const
|
||||
{
|
||||
return returnAddr;
|
||||
}
|
||||
|
||||
void GCIterate(const FrameIterator &it, const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
|
||||
const RootBaseAndDerivedVisitor &derivedVisitor, FrameType frameType) const;
|
||||
void CollectPcOffsetInfo(const FrameIterator &it, ConstInfo &info) const;
|
||||
|
||||
inline JSTaggedValue GetFunction() const
|
||||
{
|
||||
return jsFunc;
|
||||
}
|
||||
|
||||
static uintptr_t ComputeArgsConfigFrameSp(JSTaggedType *fp)
|
||||
{
|
||||
const size_t offset = 2; // 2: skip prevFp and return address.
|
||||
return reinterpret_cast<uintptr_t>(fp) + offset * sizeof(uintptr_t);
|
||||
}
|
||||
|
||||
static size_t GetTypeOffset(bool isArch32 = false)
|
||||
{
|
||||
return GetOffset<static_cast<size_t>(Index::TypeIndex)>(isArch32);
|
||||
}
|
||||
|
||||
static size_t GetPcOffset(bool isArch32 = false)
|
||||
{
|
||||
return GetOffset<static_cast<size_t>(Index::PcIndex)>(isArch32);
|
||||
}
|
||||
|
||||
static size_t GetPrevOffset(bool isArch32 = false)
|
||||
{
|
||||
return GetOffset<static_cast<size_t>(Index::PrevFpIndex)>(isArch32);
|
||||
}
|
||||
|
||||
static size_t GetFunctionOffset(bool isArch32 = false)
|
||||
{
|
||||
return GetOffset<static_cast<size_t>(Index::JSFuncIndex)>(isArch32);
|
||||
}
|
||||
|
||||
static size_t ComputeReservedJSFuncOffset(size_t slotSize)
|
||||
{
|
||||
size_t slotOffset = static_cast<size_t>(Index::PrevFpIndex) - static_cast<size_t>(Index::JSFuncIndex);
|
||||
return slotSize * slotOffset;
|
||||
}
|
||||
|
||||
static size_t ComputeReservedPcOffset(size_t slotSize)
|
||||
{
|
||||
size_t slotOffset = static_cast<size_t>(Index::PrevFpIndex) - static_cast<size_t>(Index::PcIndex);
|
||||
return slotSize * slotOffset;
|
||||
}
|
||||
|
||||
FrameType GetType() const
|
||||
{
|
||||
return type;
|
||||
}
|
||||
|
||||
inline const uint8_t *GetPc() const
|
||||
{
|
||||
return pc;
|
||||
}
|
||||
|
||||
friend class FrameIterator;
|
||||
friend class FrameHandler;
|
||||
void GetDeoptBundleInfo(const FrameIterator &it, std::vector<kungfu::ARKDeopt>& deopts) const;
|
||||
void GetFuncCalleeRegAndOffset(
|
||||
const FrameIterator &it, kungfu::CalleeRegAndOffsetVec &ret) const;
|
||||
uintptr_t* ComputePrevFrameSp(const FrameIterator &it) const;
|
||||
|
||||
private:
|
||||
static FASTJITFunctionFrame* GetFrameFromSp(const JSTaggedType *sp)
|
||||
{
|
||||
return reinterpret_cast<FASTJITFunctionFrame *>(reinterpret_cast<uintptr_t>(sp) -
|
||||
MEMBER_OFFSET(FASTJITFunctionFrame, prevFp));
|
||||
}
|
||||
|
||||
// dynamic callee saveregisters for x86-64
|
||||
alignas(EAS) const uint8_t *pc {nullptr};
|
||||
alignas(EAS) JSTaggedValue jsFunc {JSTaggedValue::Undefined()};
|
||||
alignas(EAS) FrameType type {0};
|
||||
alignas(EAS) JSTaggedType *prevFp {nullptr};
|
||||
alignas(EAS) uintptr_t returnAddr {0};
|
||||
// dynamic callee saveregisters for arm64
|
||||
};
|
||||
|
||||
enum class GCVisitedFlag : bool {
|
||||
VISITED = true,
|
||||
IGNORED = false,
|
||||
@ -1786,7 +1944,7 @@ public:
|
||||
bool IsJSFrame() const
|
||||
{
|
||||
FrameType type = GetFrameType();
|
||||
return IsInterpretedFrame(type) || IsOptimizedJSFunctionFrame(type);
|
||||
return IsInterpretedFrame(type) || IsOptimizedJSFunctionFrame(type) || IsFastJitFunctionFrame(type);
|
||||
}
|
||||
|
||||
bool IsOptimizedJSFunctionFrame(FrameType type) const
|
||||
@ -1801,6 +1959,23 @@ public:
|
||||
return IsOptimizedJSFunctionFrame(type);
|
||||
}
|
||||
|
||||
bool IsFastJitFunctionFrame(FrameType type) const
|
||||
{
|
||||
return type == FrameType::FASTJIT_FUNCTION_FRAME ||
|
||||
type == FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME;
|
||||
}
|
||||
|
||||
bool IsFastJitFunctionFrame() const
|
||||
{
|
||||
FrameType type = GetFrameType();
|
||||
return IsFastJitFunctionFrame(type);
|
||||
}
|
||||
|
||||
bool IsAotOrJitFunctionFrame() const
|
||||
{
|
||||
return IsOptimizedJSFunctionFrame() || IsFastJitFunctionFrame();
|
||||
}
|
||||
|
||||
private:
|
||||
JSTaggedType *current_ {nullptr};
|
||||
const JSThread *thread_ {nullptr};
|
||||
|
@ -216,6 +216,11 @@ JSTaggedValue FrameHandler::GetFunction() const
|
||||
auto *frame = OptimizedJSFunctionFrame::GetFrameFromSp(sp_);
|
||||
return frame->GetFunction();
|
||||
}
|
||||
case FrameType::FASTJIT_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME: {
|
||||
auto *frame = FASTJITFunctionFrame::GetFrameFromSp(sp_);
|
||||
return frame->GetFunction();
|
||||
}
|
||||
case FrameType::BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME :
|
||||
case FrameType::INTERPRETER_FRAME:
|
||||
case FrameType::INTERPRETER_FAST_NEW_FRAME:
|
||||
@ -422,6 +427,12 @@ void FrameHandler::IterateFrameChain(JSTaggedType *start, const RootVisitor &vis
|
||||
frame->GCIterate(it, visitor, rangeVisitor, derivedVisitor);
|
||||
break;
|
||||
}
|
||||
case FrameType::FASTJIT_FUNCTION_FRAME:
|
||||
case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME: {
|
||||
auto frame = it.GetFrame<FASTJITFunctionFrame>();
|
||||
frame->GCIterate(it, visitor, rangeVisitor, derivedVisitor, type);
|
||||
break;
|
||||
}
|
||||
case FrameType::ASM_INTERPRETER_FRAME:
|
||||
case FrameType::INTERPRETER_CONSTRUCTOR_FRAME: {
|
||||
auto frame = it.GetFrame<AsmInterpretedFrame>();
|
||||
|
@ -93,7 +93,7 @@ public:
|
||||
|
||||
bool IsJSFrame(FrameType type) const
|
||||
{
|
||||
return IsInterpretedFrame(type) || IsOptimizedJSFunctionFrame(type);
|
||||
return IsInterpretedFrame(type) || IsOptimizedJSFunctionFrame(type) || IsFastJitFunctionFrame(type);
|
||||
}
|
||||
|
||||
bool IsOptimizedJSFunctionFrame(FrameType type) const
|
||||
@ -102,6 +102,12 @@ public:
|
||||
type == FrameType::OPTIMIZED_JS_FAST_CALL_FUNCTION_FRAME;
|
||||
}
|
||||
|
||||
bool IsFastJitFunctionFrame(FrameType type) const
|
||||
{
|
||||
return type == FrameType::FASTJIT_FUNCTION_FRAME ||
|
||||
type == FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME;
|
||||
}
|
||||
|
||||
bool IsAsmInterpretedFrame() const
|
||||
{
|
||||
FrameIterator it(sp_, thread_);
|
||||
|
@ -1148,6 +1148,16 @@ public:
|
||||
return enableAPPJIT_;
|
||||
}
|
||||
|
||||
void SetEnableJitFrame(bool value)
|
||||
{
|
||||
enableJitFrame_ = value;
|
||||
}
|
||||
|
||||
bool IsEnableJitFrame() const
|
||||
{
|
||||
return enableJitFrame_;
|
||||
}
|
||||
|
||||
bool IsEnableJitDfxDump() const
|
||||
{
|
||||
return isEnableJitDfxDump_;
|
||||
@ -1934,6 +1944,7 @@ private:
|
||||
bool enableMemoryAnalysis_ {true};
|
||||
bool checkPgoVersion_ {false};
|
||||
bool enableJitFastCompile_ {false};
|
||||
bool enableJitFrame_{false};
|
||||
};
|
||||
} // namespace panda::ecmascript
|
||||
|
||||
|
@ -23,7 +23,7 @@ namespace panda::ecmascript::ohos {
|
||||
|
||||
class JitTools {
|
||||
public:
|
||||
static bool GetJitEscapeEanble()
|
||||
static bool GetJitEscapeDisable()
|
||||
{
|
||||
#ifdef JIT_ESCAPE_ENABLE
|
||||
return OHOS::system::GetBoolParameter("ark.jit.escape.disable", false);
|
||||
@ -63,6 +63,14 @@ public:
|
||||
#endif
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool GetJitFrameEnable()
|
||||
{
|
||||
#ifdef JIT_ESCAPE_ENABLE
|
||||
return OHOS::system::GetBoolParameter("ark.jit.enable.jitframe", false);
|
||||
#endif
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
#endif // ECMASCRIPT_JIT_TOOLS_H
|
||||
|
@ -2954,9 +2954,14 @@ JSTaggedType *RuntimeStubs::GetActualArgv(JSThread *thread)
|
||||
FrameIterator it(current, thread);
|
||||
ASSERT(it.IsLeaveFrame());
|
||||
it.Advance<GCVisitedFlag::VISITED>();
|
||||
ASSERT(it.IsOptimizedJSFunctionFrame());
|
||||
auto optimizedJSFunctionFrame = it.GetFrame<OptimizedJSFunctionFrame>();
|
||||
return optimizedJSFunctionFrame->GetArgv(it);
|
||||
ASSERT(it.IsAotOrJitFunctionFrame());
|
||||
if (it.IsFastJitFunctionFrame()) {
|
||||
auto optimizedJSJITFunctionFrame = it.GetFrame<FASTJITFunctionFrame>();
|
||||
return optimizedJSJITFunctionFrame->GetArgv(it);
|
||||
} else {
|
||||
auto optimizedJSFunctionFrame = it.GetFrame<OptimizedJSFunctionFrame>();
|
||||
return optimizedJSFunctionFrame->GetArgv(it);
|
||||
}
|
||||
}
|
||||
|
||||
JSTaggedType *RuntimeStubs::GetActualArgvFromStub(JSThread *thread)
|
||||
@ -2967,7 +2972,11 @@ JSTaggedType *RuntimeStubs::GetActualArgvFromStub(JSThread *thread)
|
||||
it.Advance<GCVisitedFlag::VISITED>();
|
||||
ASSERT(it.IsOptimizedFrame());
|
||||
it.Advance<GCVisitedFlag::VISITED>();
|
||||
ASSERT(it.IsOptimizedJSFunctionFrame());
|
||||
ASSERT(it.IsAotOrJitFunctionFrame());
|
||||
if (it.IsFastJitFunctionFrame()) {
|
||||
auto optimizedJSJITFunctionFrame = it.GetFrame<FASTJITFunctionFrame>();
|
||||
return optimizedJSJITFunctionFrame->GetArgv(it);
|
||||
}
|
||||
auto optimizedJSFunctionFrame = it.GetFrame<OptimizedJSFunctionFrame>();
|
||||
return optimizedJSFunctionFrame->GetArgv(it);
|
||||
}
|
||||
|
@ -3113,9 +3113,14 @@ JSTaggedType RuntimeStubs::GetActualArgvNoGC(uintptr_t argGlue)
|
||||
FrameIterator it(current, thread);
|
||||
ASSERT(it.IsOptimizedFrame());
|
||||
it.Advance<GCVisitedFlag::VISITED>();
|
||||
ASSERT(it.IsOptimizedJSFunctionFrame());
|
||||
auto optimizedJSFunctionFrame = it.GetFrame<OptimizedJSFunctionFrame>();
|
||||
return reinterpret_cast<uintptr_t>(optimizedJSFunctionFrame->GetArgv(it));
|
||||
ASSERT(it.IsAotOrJitFunctionFrame());
|
||||
if (it.IsFastJitFunctionFrame()) {
|
||||
auto fastJitFunctionFrame = it.GetFrame<FASTJITFunctionFrame>();
|
||||
return reinterpret_cast<uintptr_t>(fastJitFunctionFrame->GetArgv(it));
|
||||
} else {
|
||||
auto optimizedJSFunctionFrame = it.GetFrame<OptimizedJSFunctionFrame>();
|
||||
return reinterpret_cast<uintptr_t>(optimizedJSFunctionFrame->GetArgv(it));
|
||||
}
|
||||
}
|
||||
|
||||
double RuntimeStubs::FloatMod(double x, double y)
|
||||
|
@ -20,3 +20,4 @@ ark.jit.enable.litecg = shell:shell:0775
|
||||
ark.jit.enable.dumpobj = shell:shell:0775
|
||||
ark.jit.hotness.threshold = shell:shell:0775
|
||||
persist.ark.edengc.enable = shell:shell:0775
|
||||
ark.jit.enable.jitframe = shell:shell:0775
|
||||
|
Loading…
Reference in New Issue
Block a user