!9145 del unused opcodes part2

Merge pull request !9145 from suyue/sy_opcode_oh
This commit is contained in:
openharmony_ci 2024-09-18 19:39:11 +00:00 committed by Gitee
commit 1d3a971cbc
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
24 changed files with 32 additions and 1518 deletions

View File

@ -68,10 +68,7 @@ if (TARGET == "ark") {
deps_libcg += [ ":libcgark" ]
}
src_libcglowerer = [
"src/be/bbt.cpp",
"src/be/lower.cpp",
]
src_libcglowerer = [ "src/be/lower.cpp" ]
src_libmplbe = [
"src/be/becommon.cpp",

View File

@ -1,155 +0,0 @@
/*
* Copyright (c) 2023 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MAPLEBE_INCLUDE_BE_BBT_H
#define MAPLEBE_INCLUDE_BE_BBT_H
/* MapleIR headers. */
#include "mir_nodes.h"
#include "mir_lower.h"
namespace maplebe {
using namespace maple;
class BBT {
/*
* if stmt is a switch/rangegoto, succs gets defined, and condJumpBranch == fallthruBranch == nullptr.
* otherwise, succs.size() ==0 &&
* 1. for cond br stmt, both condJumpBranch and fallthruBranch are defined.
* 2. if bb ends with 'throw', both fields get nullptr.
* 3. for the others, condJumpBranch == nullptr && only fallthruBranch is defined
*/
public:
enum BBTType : uint8 { kBBPlain, kBBTry, kBBEndTry, kBBCatch };
BBT(StmtNode *s, StmtNode *e, MemPool *memPool)
: alloc(memPool),
type(kBBPlain),
succs(alloc.Adapter()),
labelIdx(MIRLabelTable::GetDummyLabel()),
firstStmt(s != nullptr ? s : e),
lastStmt(e)
{
}
~BBT() = default;
void Extend(const StmtNode *sNode, StmtNode *eNode)
{
CHECK_FATAL(lastStmt != nullptr, "nullptr check");
CHECK_FATAL(sNode != nullptr ? lastStmt->GetNext() == sNode : lastStmt->GetNext() == eNode, "Extend fail");
lastStmt = eNode;
}
void SetLabelIdx(LabelIdx li)
{
labelIdx = li;
}
bool IsLabeled() const
{
return labelIdx != MIRLabelTable::GetDummyLabel();
}
LabelIdx GetLabelIdx() const
{
return labelIdx;
}
void SetType(BBTType t, StmtNode &k)
{
type = t;
keyStmt = &k;
}
bool IsTry() const
{
return type == kBBTry;
}
bool IsEndTry() const
{
return type == kBBEndTry;
}
bool IsCatch() const
{
return type == kBBCatch;
}
void AddSuccs(BBT *bb)
{
succs.emplace_back(bb);
}
void SetCondJumpBranch(BBT *bb)
{
condJumpBranch = bb;
}
BBT *GetCondJumpBranch()
{
return condJumpBranch;
}
void SetFallthruBranch(BBT *bb)
{
fallthruBranch = bb;
}
BBT *GetFallthruBranch()
{
return fallthruBranch;
}
StmtNode *GetFirstStmt()
{
return firstStmt;
}
void SetFirstStmt(StmtNode &stmt)
{
firstStmt = &stmt;
}
StmtNode *GetLastStmt()
{
return lastStmt;
}
void SetLastStmt(StmtNode &stmt)
{
lastStmt = &stmt;
}
StmtNode *GetKeyStmt()
{
return keyStmt;
}
#if DEBUG
void Dump(const MIRModule &mod) const;
#endif
private:
MapleAllocator alloc;
BBTType type;
BBT *condJumpBranch = nullptr;
BBT *fallthruBranch = nullptr;
MapleVector<BBT *> succs;
LabelIdx labelIdx;
StmtNode *firstStmt;
StmtNode *lastStmt;
StmtNode *keyStmt = nullptr;
};
} /* namespace maplebe */
#endif /* MAPLEBE_INCLUDE_BE_BBT_H */

View File

@ -25,7 +25,6 @@
#include "intrinsics.h" /* For IntrinDesc. This includes 'intrinsic_op.h' as well */
#include "becommon.h"
#include "cg.h"
#include "bbt.h"
/* MapleIR headers. */
#include "mir_nodes.h"
#include "mir_module.h"
@ -113,8 +112,6 @@ public:
return &addrof;
}
BaseNode *LowerIaddrof(const IreadNode &iaddrof);
DassignNode *SaveReturnValueInLocal(StIdx, uint16);
BaseNode *NeedRetypeWhenLowerCallAssigned(PrimType pType);
void LowerCallStmt(StmtNode &, StmtNode *&, BlockNode &, MIRType *retty = nullptr, bool uselvar = false,
@ -145,8 +142,6 @@ public:
void LowerTypePtr(BaseNode &expr) const;
void LowerAsmStmt(AsmNode *asmNode, BlockNode *blk);
/* A pseudo register refers to a symbol when DreadNode is converted to RegreadNode. */
StIdx GetSymbolReferredToByPseudoRegister(PregIdx regNO) const
{

View File

@ -1,48 +0,0 @@
/*
* Copyright (c) 2023 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "bbt.h"
namespace maplebe {
#if DEBUG && defined(ARK_LITECG_DEBUG)
void BBT::Dump(const MIRModule &mod) const
{
if (IsTry()) {
LogInfo::MapleLogger() << "Try" << '\n';
} else if (IsEndTry()) {
LogInfo::MapleLogger() << "EndTry" << '\n';
} else if (IsCatch()) {
LogInfo::MapleLogger() << "Catch" << '\n';
} else {
LogInfo::MapleLogger() << "Plain" << '\n';
}
if (firstStmt != nullptr) {
firstStmt->Dump(0);
LogInfo::MapleLogger() << '\n';
if (keyStmt != nullptr) {
keyStmt->Dump(0);
LogInfo::MapleLogger() << '\n';
} else {
LogInfo::MapleLogger() << "<<No-Key-Stmt>>" << '\n';
}
if (lastStmt != nullptr) {
lastStmt->Dump(0);
}
LogInfo::MapleLogger() << '\n';
} else {
LogInfo::MapleLogger() << "<<Empty>>" << '\n';
}
}
#endif
} /* namespace maplebe */

View File

@ -34,12 +34,6 @@ using namespace maple;
#define TARGARM32 0
BaseNode *CGLowerer::LowerIaddrof(const IreadNode &iaddrof)
{
CHECK_FATAL(iaddrof.GetFieldID() == 0, "fieldID must be 0");
return iaddrof.Opnd(0);
}
// input node must be cvt, retype, zext or sext
BaseNode *CGLowerer::LowerCastExpr(BaseNode &expr)
{
@ -115,48 +109,6 @@ void CGLowerer::LowerIassign(IassignNode &iassign, BlockNode &newBlk)
newBlk.AddStatement(newStmt);
}
static GStrIdx NewAsmTempStrIdx()
{
static uint32 strIdxCount = 0; // to create unique temporary symbol names
std::string asmTempStr("asm_tempvar");
asmTempStr += std::to_string(++strIdxCount);
return GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(asmTempStr);
}
void CGLowerer::LowerAsmStmt(AsmNode *asmNode, BlockNode *newBlk)
{
for (size_t i = 0; i < asmNode->NumOpnds(); i++) {
BaseNode *opnd = LowerExpr(*asmNode, *asmNode->Opnd(i), *newBlk);
if (opnd->NumOpnds() == 0) {
asmNode->SetOpnd(opnd, i);
continue;
}
// introduce a temporary to store the expression tree operand
TyIdx tyIdxUsed = static_cast<TyIdx>(opnd->GetPrimType());
if (opnd->op == OP_iread) {
IreadNode *ireadNode = static_cast<IreadNode *>(opnd);
tyIdxUsed = ireadNode->GetType()->GetTypeIndex();
}
StmtNode *assignNode = nullptr;
BaseNode *readOpnd = nullptr;
PrimType type = GlobalTables::GetTypeTable().GetTypeFromTyIdx(tyIdxUsed)->GetPrimType();
if (CGOptions::GetInstance().GetOptimizeLevel() >= CGOptions::kLevel2) {
DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "curFunction should not be nullptr");
PregIdx pregIdx = mirModule.CurFunction()->GetPregTab()->CreatePreg(type);
assignNode = mirBuilder->CreateStmtRegassign(type, pregIdx, opnd);
readOpnd = mirBuilder->CreateExprRegread(type, pregIdx);
} else {
MIRSymbol *st = mirModule.GetMIRBuilder()->CreateSymbol(tyIdxUsed, NewAsmTempStrIdx(), kStVar, kScAuto,
mirModule.CurFunction(), kScopeLocal);
assignNode = mirModule.GetMIRBuilder()->CreateStmtDassign(*st, 0, opnd);
readOpnd = mirBuilder->CreateExprDread(*st);
}
newBlk->AddStatement(assignNode);
asmNode->SetOpnd(readOpnd, i);
}
newBlk->AddStatement(asmNode);
}
BaseNode *CGLowerer::NeedRetypeWhenLowerCallAssigned(PrimType pType)
{
BaseNode *retNode = mirModule.GetMIRBuilder()->CreateExprRegread(pType, -kSregRetval0);
@ -218,20 +170,13 @@ StmtNode *CGLowerer::GenCallNode(const StmtNode &stmt, PUIdx &funcCalled, CallNo
CallNode *newCall = nullptr;
if (stmt.GetOpCode() == OP_callassigned) {
newCall = mirModule.GetMIRBuilder()->CreateStmtCall(origCall.GetPUIdx(), origCall.GetNopnd());
} else if (stmt.GetOpCode() == OP_virtualcallassigned) {
newCall = mirModule.GetMIRBuilder()->CreateStmtVirtualCall(origCall.GetPUIdx(), origCall.GetNopnd());
} else if (stmt.GetOpCode() == OP_superclasscallassigned) {
newCall = mirModule.GetMIRBuilder()->CreateStmtSuperclassCall(origCall.GetPUIdx(), origCall.GetNopnd());
}
CHECK_FATAL(newCall != nullptr, "nullptr is not expected");
newCall->SetDeoptBundleInfo(origCall.GetDeoptBundleInfo());
newCall->SetSrcPos(stmt.GetSrcPos());
funcCalled = origCall.GetPUIdx();
CHECK_FATAL((newCall->GetOpCode() == OP_call || newCall->GetOpCode() == OP_interfacecall),
CHECK_FATAL((newCall->GetOpCode() == OP_call),
"virtual call or super class call are not expected");
if (newCall->GetOpCode() == OP_interfacecall) {
std::cerr << "interfacecall found\n";
}
newCall->SetStmtAttrs(stmt.GetStmtAttrs());
return newCall;
}
@ -255,11 +200,6 @@ StmtNode *CGLowerer::GenIntrinsiccallNode(const StmtNode &stmt, PUIdx &funcCalle
mirModule.GetMIRBuilder()->CreateStmtIntrinsicCall(origCall.GetIntrinsic(), origCall.GetNopnd());
CHECK_FATAL(newCall->GetOpCode() == OP_intrinsiccall,
"intrinsicnode except intrinsiccall is not expected");
} else if (stmt.GetOpCode() == OP_xintrinsiccallassigned) {
newCall =
mirModule.GetMIRBuilder()->CreateStmtXintrinsicCall(origCall.GetIntrinsic(), origCall.GetNopnd());
CHECK_FATAL(newCall->GetOpCode() == OP_intrinsiccall,
"intrinsicnode except intrinsiccall is not expected");
} else {
newCall = mirModule.GetMIRBuilder()->CreateStmtIntrinsicCall(origCall.GetIntrinsic(),
origCall.GetNopnd(), origCall.GetTyIdx());
@ -423,17 +363,6 @@ BlockNode *CGLowerer::LowerCallAssignedStmt(StmtNode &stmt, bool uselvar)
static_cast<IntrinsiccallNode *>(newCall)->SetReturnVec(*p2nRets);
break;
}
case OP_intrinsiccallwithtypeassigned: {
BlockNode *blockNode = LowerIntrinsiccallToIntrinsicop(stmt);
if (blockNode) {
return blockNode;
}
auto &origCall = static_cast<IntrinsiccallNode &>(stmt);
newCall = GenIntrinsiccallNode(stmt, funcCalled, handledAtLowerLevel, origCall);
p2nRets = &origCall.GetReturnVec();
static_cast<IntrinsiccallNode *>(newCall)->SetReturnVec(*p2nRets);
break;
}
case OP_icallprotoassigned:
case OP_icallassigned: {
auto &origCall = static_cast<IcallNode &>(stmt);
@ -541,11 +470,7 @@ BlockNode *CGLowerer::LowerBlock(BlockNode &block)
newBlk->AppendStatementsFromBlock(*LowerCallAssignedStmt(*stmt, lvar));
break;
}
case OP_virtualcallassigned:
case OP_superclasscallassigned:
case OP_intrinsiccallassigned:
case OP_xintrinsiccallassigned:
case OP_intrinsiccallwithtypeassigned:
newBlk->AppendStatementsFromBlock(*LowerCallAssignedStmt(*stmt));
break;
case OP_intrinsiccall:
@ -583,10 +508,6 @@ BlockNode *CGLowerer::LowerBlock(BlockNode &block)
case OP_comment:
newBlk->AddStatement(stmt);
break;
case OP_asm: {
LowerAsmStmt(static_cast<AsmNode *>(stmt), newBlk);
break;
}
default:
LowerStmt(*stmt, *newBlk);
newBlk->AddStatement(stmt);
@ -607,41 +528,12 @@ StmtNode *CGLowerer::LowerCall(CallNode &callNode, StmtNode *&nextStmt, BlockNod
* call $foo(constval u32 128)
* dassign %jlt (dread agg %%retval)
*/
bool isArrayStore = false;
if (callNode.GetOpCode() == OP_call) {
MIRFunction *calleeFunc = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(callNode.GetPUIdx());
if ((calleeFunc->GetName() == "MCC_WriteRefField") && (callNode.Opnd(1)->GetOpCode() == OP_iaddrof)) {
IreadNode *addrExpr = static_cast<IreadNode *>(callNode.Opnd(1));
if (addrExpr->Opnd(0)->GetOpCode() == OP_array) {
isArrayStore = true;
}
}
}
for (size_t i = 0; i < callNode.GetNopndSize(); ++i) {
BaseNode *newOpnd = LowerExpr(callNode, *callNode.GetNopndAt(i), newBlk);
callNode.SetOpnd(newOpnd, i);
}
if (isArrayStore && checkLoadStore) {
bool needCheckStore = true;
if (needCheckStore) {
MIRFunction *fn =
mirModule.GetMIRBuilder()->GetOrCreateFunction("MCC_Reflect_Check_Arraystore", TyIdx(PTY_void));
DEBUG_ASSERT(fn->GetFuncSymbol() != nullptr, "fn->GetFuncSymbol() should not be nullptr");
fn->GetFuncSymbol()->SetAppearsInCode(true);
beCommon.UpdateTypeTable(*fn->GetMIRFuncType());
fn->AllocSymTab();
MapleVector<BaseNode *> args(mirModule.GetMIRBuilder()->GetCurrentFuncCodeMpAllocator()->Adapter());
args.emplace_back(callNode.Opnd(0));
args.emplace_back(callNode.Opnd(kNodeThirdOpnd));
StmtNode *checkStoreStmt = mirModule.GetMIRBuilder()->CreateStmtCall(fn->GetPuidx(), args);
newBlk.AddStatement(checkStoreStmt);
}
}
DassignNode *dassignNode = nullptr;
if ((nextStmt != nullptr) && (nextStmt->GetOpCode() == OP_dassign)) {
dassignNode = static_cast<DassignNode *>(nextStmt);
@ -736,19 +628,6 @@ void CGLowerer::CleanupBranches(MIRFunction &func) const
*/
StmtNode *cmtB = nullptr;
StmtNode *cmtE = nullptr;
bool isCleanable = true;
while ((next != nullptr) && (next->GetOpCode() != OP_label)) {
if ((next->GetOpCode() == OP_endtry)) {
isCleanable = false;
break;
}
next = next->GetNext();
}
if ((next != nullptr) && (!isCleanable)) {
prev = next->GetPrev();
continue;
}
next = curr->GetNext();
while ((next != nullptr) && (next->GetOpCode() != OP_label)) {
@ -860,15 +739,7 @@ BaseNode *CGLowerer::LowerExpr(BaseNode &parent, BaseNode &expr, BlockNode &blkN
expr.SetPrimType(PTY_u8);
}
if (expr.GetOpCode() == OP_iread && expr.Opnd(0)->GetOpCode() == OP_array) {
BaseNode *node = LowerExpr(expr, *expr.Opnd(0), blkNode);
if (node->GetOpCode() == OP_intrinsicop) {
auto *binNode = static_cast<IntrinsicopNode *>(node);
return binNode;
} else {
expr.SetOpnd(node, 0);
}
} else {
{
for (size_t i = 0; i < expr.NumOpnds(); ++i) {
expr.SetOpnd(LowerExpr(expr, *expr.Opnd(i), blkNode), i);
}
@ -887,11 +758,6 @@ BaseNode *CGLowerer::LowerExpr(BaseNode &parent, BaseNode &expr, BlockNode &blkN
return converted;
}
switch (expr.GetOpCode()) {
case OP_array: {
DEBUG_ASSERT(false, "unsupported OP_array");
return &expr;
}
case OP_dread:
return LowerDread(static_cast<DreadNode &>(expr), blkNode);
@ -901,9 +767,6 @@ BaseNode *CGLowerer::LowerExpr(BaseNode &parent, BaseNode &expr, BlockNode &blkN
case OP_iread:
return LowerIread(static_cast<IreadNode &>(expr));
case OP_iaddrof:
return LowerIaddrof(static_cast<IreadNode &>(expr));
case OP_cvt:
case OP_retype:
case OP_zext:

View File

@ -1947,19 +1947,6 @@ void AArch64CGFunc::SelectCmpOp(Operand &resOpnd, Operand &lhsOpnd, Operand &rhs
* if OP_cmpg, CSINC RES, RES, WZR, VC (no overflow)
*/
RegOperand &xzr = GetZeroOpnd(dsize);
if ((opcode == OP_cmpl) || (opcode == OP_cmpg)) {
DEBUG_ASSERT(isFloat, "incorrect operand types");
SelectTargetFPCmpQuiet(opnd0, *opnd1, GetPrimTypeBitSize(primType));
SelectAArch64CSINV(resOpnd, xzr, xzr, GetCondOperand(CC_GE), (dsize == k64BitSize));
SelectAArch64CSINC(resOpnd, resOpnd, xzr, GetCondOperand(CC_LE), (dsize == k64BitSize));
if (opcode == OP_cmpl) {
SelectAArch64CSINV(resOpnd, resOpnd, xzr, GetCondOperand(CC_VC), (dsize == k64BitSize));
} else {
SelectAArch64CSINC(resOpnd, resOpnd, xzr, GetCondOperand(CC_VC), (dsize == k64BitSize));
}
return;
}
if (opcode == OP_cmp) {
SelectAArch64Cmp(opnd0, *opnd1, !isFloat, GetPrimTypeBitSize(primType));
if (unsignedIntegerComparison) {
@ -4116,22 +4103,7 @@ MemOperand &AArch64CGFunc::CreateNonExtendMemOpnd(PrimType ptype, const BaseNode
addrOpnd = HandleExpr(parent, addrExpr);
}
addrOpnd = static_cast<RegOperand *>(&LoadIntoRegister(*addrOpnd, PTY_a64));
Insn *lastInsn = GetCurBB() == nullptr ? nullptr : GetCurBB()->GetLastMachineInsn();
if ((addrExpr.GetOpCode() == OP_CG_array_elem_add) && (offset == 0) && lastInsn &&
(lastInsn->GetMachineOpcode() == MOP_xadrpl12) &&
(&lastInsn->GetOperand(kInsnFirstOpnd) == &lastInsn->GetOperand(kInsnSecondOpnd))) {
Operand &opnd = lastInsn->GetOperand(kInsnThirdOpnd);
StImmOperand &stOpnd = static_cast<StImmOperand &>(opnd);
OfstOperand &ofstOpnd = GetOrCreateOfstOpnd(static_cast<uint64>(stOpnd.GetOffset()), k32BitSize);
MemOperand &tmpMemOpnd =
GetOrCreateMemOpnd(MemOperand::kAddrModeLo12Li, GetPrimTypeBitSize(ptype),
static_cast<RegOperand *>(addrOpnd), nullptr, &ofstOpnd, stOpnd.GetSymbol());
if (GetCurBB() && GetCurBB()->GetLastMachineInsn()) {
GetCurBB()->RemoveInsn(*GetCurBB()->GetLastMachineInsn());
}
return tmpMemOpnd;
} else {
{
OfstOperand &ofstOpnd = GetOrCreateOfstOpnd(static_cast<uint64>(offset), k64BitSize);
return GetOrCreateMemOpnd(MemOperand::kAddrModeBOi, GetPrimTypeBitSize(ptype),
static_cast<RegOperand *>(addrOpnd), nullptr, &ofstOpnd, nullptr);

View File

@ -359,22 +359,7 @@ static void HandleCondbr(StmtNode &stmt, CGFunc &cgFunc)
return;
}
}
/*
* Special case:
* brfalse(ge (cmpg (op0, op1), 0) ==>
* fcmp op1, op2
* blo
*/
if ((condGotoNode.GetOpCode() == OP_brfalse) && (condNode->GetOpCode() == OP_ge) && (op0->GetOpCode() == OP_cmpg) &&
(op1->GetOpCode() == OP_constval)) {
auto *constValNode = static_cast<ConstvalNode *>(op1);
MIRConst *mirConst = constValNode->GetConstVal();
if (mirConst->IsZero()) {
cgFunc.SelectCondSpecialCase2(condGotoNode, *op0);
cgFunc.SetCurBB(*cgFunc.StartNewBB(condGotoNode));
return;
}
}
Operand *opnd0 = cgFunc.HandleExpr(*condNode, *condNode->Opnd(0));
Operand *opnd1 = cgFunc.HandleExpr(*condNode, *condNode->Opnd(1));
cgFunc.SelectCondGoto(condGotoNode, *opnd0, *opnd1);
@ -487,7 +472,6 @@ static void InitHandleStmtFactory()
RegisterFactoryFunction<HandleStmtFactory>(OP_intrinsiccall, HandleIntrinsicCall);
RegisterFactoryFunction<HandleStmtFactory>(OP_intrinsiccallassigned, HandleIntrinsicCall);
RegisterFactoryFunction<HandleStmtFactory>(OP_intrinsiccallwithtype, HandleIntrinsicCall);
RegisterFactoryFunction<HandleStmtFactory>(OP_intrinsiccallwithtypeassigned, HandleIntrinsicCall);
RegisterFactoryFunction<HandleStmtFactory>(OP_dassign, HandleDassign);
RegisterFactoryFunction<HandleStmtFactory>(OP_regassign, HandleRegassign);
RegisterFactoryFunction<HandleStmtFactory>(OP_iassign, HandleIassign);

View File

@ -40,10 +40,10 @@ uint32 MemLayout::FindLargestActualArea(int32 &aggCopySize)
uint32 maxCopyStackSize = 0; // Size of aggregate param stack copy requirement
for (; stmt != nullptr; stmt = stmt->GetNext()) {
Opcode opCode = stmt->GetOpCode();
if ((opCode < OP_call || opCode > OP_xintrinsiccallassigned) && opCode != OP_icallproto) {
if ((opCode < OP_call || opCode > OP_intrinsiccallassigned) && opCode != OP_icallproto) {
continue;
}
if (opCode == OP_intrinsiccallwithtypeassigned || opCode == OP_intrinsiccallwithtype ||
if (opCode == OP_intrinsiccallwithtype ||
opCode == OP_intrinsiccallassigned || opCode == OP_intrinsiccall) {
/*
* Some intrinsics, such as MPL_ATOMIC_EXCHANGE_PTR, are handled by CG,

View File

@ -480,22 +480,6 @@ Stmt &LMIRBuilder::ICall(Expr funcAddr, Args &args_, PregIdx pregIdx)
return *mirBuilder.CreateStmtIcallAssigned(args, pregIdx);
}
Stmt &LMIRBuilder::IntrinsicCall(IntrinsicId func_, Args &args_, Var *result)
{
MapleVector<BaseNode *> args(mirBuilder.GetCurrentFuncCodeMpAllocator()->Adapter());
for (const auto &arg : args_) {
args.emplace_back(arg.GetNode());
}
// need to fix the type for IntrinsicId
auto func = static_cast<MIRIntrinsicID>(func_);
if (result == nullptr) {
return *mirBuilder.CreateStmtIntrinsicCall(func, args);
} else {
return *mirBuilder.CreateStmtIntrinsicCallAssigned(func, args, result);
}
}
Stmt &LMIRBuilder::IntrinsicCall(IntrinsicId func_, Args &args_, PregIdx retPregIdx1, PregIdx retPregIdx2)
{
MapleVector<BaseNode *> args(mirBuilder.GetCurrentFuncCodeMpAllocator()->Adapter());

View File

@ -40,5 +40,4 @@ BINARYOP(rem)
BINARYOP(shl)
BINARYOP(ror)
BINARYOP(sub)
BINARYOP(CG_array_elem_add)

View File

@ -315,10 +315,6 @@ struct ExtractbitsNodeT : public BaseNodeT { // 8B
uint16 padding;
};
struct IreadoffNodeT : public BaseNodeT { // 8B
int32 offset;
};
using BinaryNodeT = BaseNodeT;
// Add expression types to compare node, to
// facilitate the evaluation of postorder stored kCmpl
@ -352,23 +348,16 @@ inline MirConstT *GetConstval(const ConstvalNodeT &node)
}
// SizeoftypeNode shouldn't be seen here
// ArrayNode shouldn't be seen here
struct AddrofNodeT : public BaseNodeT { // 12B
StIdx stIdx;
FieldID fieldID;
};
using DreadNodeT = AddrofNodeT; // same shape.
struct AddroffuncNodeT : public BaseNodeT { // 8B
PUIdx puIdx; // 32bit now
};
struct RegreadNodeT : public BaseNodeT { // 8B
PregIdx regIdx; // 32bit, negative if special register
};
struct AddroflabelNodeT : public BaseNodeT { // 8B
uint32 offset;
};
} // namespace maple
#endif // MAPLE_INCLUDE_VM_CMPL_V2

View File

@ -48,8 +48,7 @@ REGISTER_SAFE_CAST(UnaryNode, from.GetOpCode() == OP_abs ||
from.GetOpCode() == OP_sqrt ||
instance_of<TypeCvtNode>(from) ||
instance_of<ExtractbitsNode>(from) ||
instance_of<IreadNode>(from) ||
instance_of<IreadoffNode>(from));
instance_of<IreadNode>(from));
REGISTER_SAFE_CAST(TypeCvtNode, from.GetOpCode() == OP_ceil ||
from.GetOpCode() == OP_cvt ||
from.GetOpCode() == OP_floor ||
@ -59,9 +58,7 @@ REGISTER_SAFE_CAST(RetypeNode, from.GetOpCode() == OP_retype);
REGISTER_SAFE_CAST(ExtractbitsNode, from.GetOpCode() == OP_extractbits ||
from.GetOpCode() == OP_sext ||
from.GetOpCode() == OP_zext);
REGISTER_SAFE_CAST(IreadNode, from.GetOpCode() == OP_iread ||
from.GetOpCode() = OP_iaddrof);
REGISTER_SAFE_CAST(IreadoffNode, from.GetOpCode() == OP_ireadoff);
REGISTER_SAFE_CAST(IreadNode, from.GetOpCode() == OP_iread);
REGISTER_SAFE_CAST(BinaryNode, from.GetOpCode() == OP_add ||
from.GetOpCode() == OP_sub ||
from.GetOpCode() == OP_mul ||
@ -75,35 +72,21 @@ REGISTER_SAFE_CAST(BinaryNode, from.GetOpCode() == OP_add ||
from.GetOpCode() == OP_band ||
from.GetOpCode() == OP_bior ||
from.GetOpCode() == OP_bxor ||
from.GetOpCode() == OP_CG_array_elem_add ||
from.GetOpCode() == OP_cand ||
from.GetOpCode() == OP_cior ||
instance_of<CompareNode>(from) ||
instance_of<DepositbitsNode>(from));
instance_of<CompareNode>(from));
REGISTER_SAFE_CAST(CompareNode, from.GetOpCode() == OP_eq ||
from.GetOpCode() = OP_ge ||
from.GetOpCode() = OP_gt ||
from.GetOpCode() = OP_le ||
from.GetOpCode() = OP_lt ||
from.GetOpCode() = OP_ne ||
from.GetOpCode() = OP_cmp ||
from.GetOpCode() = OP_cmpl ||
from.GetOpCode() = OP_cmpg);
REGISTER_SAFE_CAST(DepositbitsNode, from.GetOpCode() == OP_depositbits);
REGISTER_SAFE_CAST(NaryNode, instance_of<IntrinsicopNode>(from) ||
instance_of<ArrayNode>(from));
from.GetOpCode() = OP_cmp);
REGISTER_SAFE_CAST(NaryNode, instance_of<IntrinsicopNode>(from));
REGISTER_SAFE_CAST(IntrinsicopNode, from.GetOpCode() == OP_intrinsicop);
REGISTER_SAFE_CAST(ConstvalNode, from.GetOpCode() == OP_constval);
REGISTER_SAFE_CAST(ConststrNode, from.GetOpCode() == OP_conststr);
REGISTER_SAFE_CAST(Conststr16Node, from.GetOpCode() == OP_conststr16);
REGISTER_SAFE_CAST(ArrayNode, from.GetOpCode() == OP_array);
REGISTER_SAFE_CAST(AddrofNode, from.GetOpCode() == OP_dread ||
from.GetOpCode() == OP_addrof);
REGISTER_SAFE_CAST(RegreadNode, from.GetOpCode() == OP_regread);
REGISTER_SAFE_CAST(AddroffuncNode, from.GetOpCode() == OP_addroffunc);
REGISTER_SAFE_CAST(AddroflabelNode, from.GetOpCode() == OP_addroflabel);
REGISTER_SAFE_CAST(StmtNode, from.GetOpCode() == OP_endtry ||
instance_of<IassignNode>(from) ||
REGISTER_SAFE_CAST(StmtNode, instance_of<IassignNode>(from) ||
instance_of<GotoNode>(from) ||
instance_of<SwitchNode>(from) ||
instance_of<UnaryStmtNode>(from) ||
@ -115,7 +98,6 @@ REGISTER_SAFE_CAST(IassignNode, from.GetOpCode() == OP_iassign);
REGISTER_SAFE_CAST(GotoNode, from.GetOpCode() == OP_goto);
REGISTER_SAFE_CAST(SwitchNode, from.GetOpCode() == OP_switch);
REGISTER_SAFE_CAST(UnaryStmtNode, from.GetOpCode() == OP_eval ||
(kOpcodeInfo.IsAssertNonnull(from.GetOpCode())) ||
instance_of<DassignNode>(from) ||
instance_of<RegassignNode>(from) ||
instance_of<CondGotoNode>(from) ||
@ -128,29 +110,19 @@ REGISTER_SAFE_CAST(CondGotoNode, from.GetOpCode() == OP_brtrue ||
REGISTER_SAFE_CAST(RangeGotoNode, from.GetOpCode() == OP_rangegoto);
REGISTER_SAFE_CAST(BlockNode, from.GetOpCode() == OP_block);
REGISTER_SAFE_CAST(IfStmtNode, from.GetOpCode() == OP_if);
REGISTER_SAFE_CAST(BinaryStmtNode, instance_of<IassignoffNode>(from));
REGISTER_SAFE_CAST(IassignoffNode, from.GetOpCode() == OP_iassignoff);
REGISTER_SAFE_CAST(NaryStmtNode, from.GetOpCode() == OP_return ||
instance_of<CallNode>(from) ||
instance_of<IcallNode>(from) ||
instance_of<IntrinsiccallNode>(from));
REGISTER_SAFE_CAST(CallNode, from.GetOpCode() == OP_call ||
from.GetOpCode() == OP_virtualcall ||
from.GetOpCode() == OP_superclasscall ||
from.GetOpCode() == OP_interfacecall ||
from.GetOpCode() == OP_callassigned ||
from.GetOpCode() == OP_virtualcallassigned ||
from.GetOpCode() == OP_superclasscallassigned);
from.GetOpCode() == OP_callassigned);
REGISTER_SAFE_CAST(IcallNode, from.GetOpCode() == OP_icall ||
from.GetOpCode() == OP_icallassigned ||
from.GetOpCode() == OP_icallproto ||
from.GetOpCode() == OP_icallprotoassigned);
REGISTER_SAFE_CAST(IntrinsiccallNode, from.GetOpCode() == OP_intrinsiccall ||
from.GetOpCode() == OP_intrinsiccallwithtype ||
from.GetOpCode() == OP_xintrinsiccall ||
from.GetOpCode() == OP_intrinsiccallassigned ||
from.GetOpCode() == OP_intrinsiccallwithtypeassigned ||
from.GetOpCode() == OP_xintrinsiccallassigned);
from.GetOpCode() == OP_intrinsiccallassigned);
REGISTER_SAFE_CAST(LabelNode, from.GetOpCode() == OP_label);
REGISTER_SAFE_CAST(CommentNode, from.GetOpCode() == OP_comment);
#endif

View File

@ -116,7 +116,6 @@ public:
MIRFunction *GetOrCreateFunction(const std::string &, TyIdx);
MIRFunction *GetFunctionFromSymbol(const MIRSymbol &funcst);
MIRFunction *GetFunctionFromName(const std::string &);
// for creating Function.
MIRSymbol *GetFunctionArgument(MIRFunction &fun, uint32 index) const
{
@ -182,8 +181,6 @@ public:
TypeCvtNode *CreateExprTypeCvt(Opcode o, PrimType toPrimType, PrimType fromPrimType, BaseNode &opnd);
TypeCvtNode *CreateExprTypeCvt(Opcode o, const MIRType &type, const MIRType &fromtype, BaseNode *opnd);
ExtractbitsNode *CreateExprExtractbits(Opcode o, PrimType type, uint32 bOffset, uint32 bSize, BaseNode *opnd);
DepositbitsNode *CreateExprDepositbits(Opcode o, PrimType type, uint32 bOffset, uint32 bSize, BaseNode *leftOpnd,
BaseNode *rightOpnd);
RetypeNode *CreateExprRetype(const MIRType &type, const MIRType &fromType, BaseNode *opnd);
RetypeNode *CreateExprRetype(const MIRType &type, PrimType fromType, BaseNode *opnd);
IntrinsicopNode *CreateExprIntrinsicop(MIRIntrinsicID id, Opcode op, PrimType primType, TyIdx tyIdx,
@ -198,34 +195,17 @@ public:
RegassignNode *CreateStmtRegassign(PrimType pty, PregIdx regIdx, BaseNode *src);
IassignNode *CreateStmtIassign(const MIRType &type, FieldID fieldID, BaseNode *addr, BaseNode *src);
CallNode *CreateStmtCall(PUIdx puIdx, const MapleVector<BaseNode *> &args, Opcode opcode = OP_call);
CallNode *CreateStmtVirtualCall(PUIdx puIdx, const MapleVector<BaseNode *> &args)
{
return CreateStmtCall(puIdx, args, OP_virtualcall);
}
CallNode *CreateStmtSuperclassCall(PUIdx puIdx, const MapleVector<BaseNode *> &args)
{
return CreateStmtCall(puIdx, args, OP_superclasscall);
}
CallNode *CreateStmtInterfaceCall(PUIdx puIdx, const MapleVector<BaseNode *> &args)
{
return CreateStmtCall(puIdx, args, OP_interfacecall);
}
IcallNode *CreateStmtIcall(const MapleVector<BaseNode *> &args);
IcallNode *CreateStmtIcallAssigned(const MapleVector<BaseNode *> &args, const MIRSymbol &ret);
IcallNode *CreateStmtIcallAssigned(const MapleVector<BaseNode *> &args, PregIdx pregIdx);
IcallNode *CreateStmtIcallproto(const MapleVector<BaseNode *> &args, const TyIdx &prototypeIdx);
// For Call, VirtualCall, SuperclassCall, InterfaceCall
// For Call
IntrinsiccallNode *CreateStmtIntrinsicCall(MIRIntrinsicID idx, const MapleVector<BaseNode *> &arguments,
TyIdx tyIdx = TyIdx());
IntrinsiccallNode *CreateStmtXintrinsicCall(MIRIntrinsicID idx, const MapleVector<BaseNode *> &arguments);
CallNode *CreateStmtCallRegassigned(PUIdx, const MapleVector<BaseNode *> &, PregIdx, Opcode);
IntrinsiccallNode *CreateStmtIntrinsicCallAssigned(MIRIntrinsicID idx, const MapleVector<BaseNode *> &arguments,
PregIdx retPregIdx1, PregIdx retPregIdx2);
IntrinsiccallNode *CreateStmtIntrinsicCallAssigned(MIRIntrinsicID idx, const MapleVector<BaseNode *> &arguments,
const MIRSymbol *ret, TyIdx tyIdx = TyIdx());
IfStmtNode *CreateStmtIf(BaseNode *cond);
SwitchNode *CreateStmtSwitch(BaseNode *opnd, LabelIdx defaultLabel, const CaseVector &switchTable);
GotoNode *CreateStmtGoto(Opcode o, LabelIdx labIdx);

View File

@ -30,7 +30,6 @@ constexpr uint32 kNodeThirdOpnd = 2;
enum MirLowerPhase : uint8 { kLowerUnder, kLowerMe, kLowerExpandArray, kLowerBe, kLowerCG, kLowerLNO };
constexpr uint32 kShiftLowerMe = 1U << kLowerMe;
constexpr uint32 kShiftLowerExpandArray = 1U << kLowerExpandArray;
constexpr uint32 kShiftLowerBe = 1U << kLowerBe;
constexpr uint32 kShiftLowerCG = 1U << kLowerCG;
constexpr uint32 kShiftLowerLNO = 1U << kLowerLNO;
@ -75,10 +74,6 @@ public:
void LowerCandCior(BlockNode &block);
void LowerBuiltinExpect(BlockNode &block);
void LowerFunc(MIRFunction &func);
void ExpandArrayMrt(MIRFunction &func);
IfStmtNode *ExpandArrayMrtIfBlock(IfStmtNode &node);
BlockNode *ExpandArrayMrtBlock(BlockNode &block);
void AddArrayMrtMpl(BaseNode &exp, BlockNode &newblk);
MIRFuncType *FuncTypeFromFuncPtrExpr(BaseNode *x);
void SetLowerME()
{
@ -90,11 +85,6 @@ public:
lowerPhase |= kShiftLowerLNO;
}
void SetLowerExpandArray()
{
lowerPhase |= kShiftLowerExpandArray;
}
void SetLowerBE()
{
lowerPhase |= kShiftLowerBe;
@ -125,11 +115,6 @@ public:
return lowerPhase & kShiftLowerLNO;
}
bool IsLowerExpandArray() const
{
return lowerPhase & kShiftLowerExpandArray;
}
bool IsLowerBE() const
{
return lowerPhase & kShiftLowerBe;

View File

@ -465,41 +465,6 @@ protected:
// IaddrofNode has the same member fields and member methods as IreadNode
using IaddrofNode = IreadNode;
class IreadoffNode : public UnaryNode {
public:
IreadoffNode() : UnaryNode(OP_ireadoff) {}
IreadoffNode(PrimType ptyp, int32 ofst) : UnaryNode(OP_ireadoff, ptyp), offset(ofst) {}
IreadoffNode(PrimType ptyp, BaseNode *opnd, int32 ofst) : UnaryNode(OP_ireadoff, ptyp, opnd), offset(ofst) {}
virtual ~IreadoffNode() = default;
#ifdef ARK_LITECG_DEBUG
void Dump(int32 indent) const override;
#endif
IreadoffNode *CloneTree(MapleAllocator &allocator) const override
{
auto *node = allocator.GetMemPool()->New<IreadoffNode>(*this);
node->SetOpnd(Opnd(0)->CloneTree(allocator), 0);
return node;
}
int32 GetOffset() const
{
return offset;
}
void SetOffset(int32 offsetValue)
{
offset = offsetValue;
}
private:
int32 offset = 0;
};
class BinaryOpnds {
public:
virtual ~BinaryOpnds() = default;
@ -633,56 +598,6 @@ private:
PrimType opndType = kPtyInvalid; // type of operands.
};
class DepositbitsNode : public BinaryNode {
public:
DepositbitsNode() : BinaryNode(OP_depositbits) {}
DepositbitsNode(Opcode o, PrimType typ) : BinaryNode(o, typ) {}
DepositbitsNode(Opcode o, PrimType typ, uint8 offset, uint8 size, BaseNode *l, BaseNode *r)
: BinaryNode(o, typ, l, r), bitsOffset(offset), bitsSize(size)
{
}
virtual ~DepositbitsNode() = default;
#ifdef ARK_LITECG_DEBUG
void Dump(int32 indent) const override;
#endif
DepositbitsNode *CloneTree(MapleAllocator &allocator) const override
{
auto *node = allocator.GetMemPool()->New<DepositbitsNode>(*this);
node->SetBOpnd(GetBOpnd(0)->CloneTree(allocator), 0);
node->SetBOpnd(GetBOpnd(1)->CloneTree(allocator), 1);
return node;
}
uint8 GetBitsOffset() const
{
return bitsOffset;
}
void SetBitsOffset(uint8 offset)
{
bitsOffset = offset;
}
uint8 GetBitsSize() const
{
return bitsSize;
}
void SetBitsSize(uint8 size)
{
bitsSize = size;
}
private:
uint8 bitsOffset = 0;
uint8 bitsSize = 0;
};
class NaryOpnds {
public:
explicit NaryOpnds(MapleAllocator &mpallocter) : nOpnd(mpallocter.Adapter()) {}
@ -990,161 +905,6 @@ private:
MIRConst *constVal = nullptr;
};
class ConststrNode : public BaseNode {
public:
ConststrNode() : BaseNode(OP_conststr) {}
explicit ConststrNode(UStrIdx i) : BaseNode(OP_conststr), strIdx(i) {}
ConststrNode(PrimType typ, UStrIdx i) : BaseNode(OP_conststr, typ, 0), strIdx(i) {}
virtual ~ConststrNode() = default;
#ifdef ARK_LITECG_DEBUG
void Dump(int32 indent) const override;
#endif
ConststrNode *CloneTree(MapleAllocator &allocator) const override
{
return allocator.GetMemPool()->New<ConststrNode>(*this);
}
UStrIdx GetStrIdx() const
{
return strIdx;
}
void SetStrIdx(UStrIdx idx)
{
strIdx = idx;
}
private:
UStrIdx strIdx = UStrIdx(0);
};
class Conststr16Node : public BaseNode {
public:
Conststr16Node() : BaseNode(OP_conststr16) {}
explicit Conststr16Node(U16StrIdx i) : BaseNode(OP_conststr16), strIdx(i) {}
Conststr16Node(PrimType typ, U16StrIdx i) : BaseNode(OP_conststr16, typ, 0), strIdx(i) {}
virtual ~Conststr16Node() = default;
#ifdef ARK_LITECG_DEBUG
void Dump(int32 indent) const override;
#endif
Conststr16Node *CloneTree(MapleAllocator &allocator) const override
{
return allocator.GetMemPool()->New<Conststr16Node>(*this);
}
U16StrIdx GetStrIdx() const
{
return strIdx;
}
void SetStrIdx(U16StrIdx idx)
{
strIdx = idx;
}
private:
U16StrIdx strIdx = U16StrIdx(0);
};
class ArrayNode : public NaryNode {
public:
ArrayNode(MapleAllocator &allocator) : NaryNode(allocator, OP_array) {}
explicit ArrayNode(const MIRModule &mod) : ArrayNode(mod.GetCurFuncCodeMPAllocator()) {}
ArrayNode(MapleAllocator &allocator, PrimType typ, TyIdx idx) : NaryNode(allocator, OP_array, typ), tyIdx(idx) {}
ArrayNode(const MIRModule &mod, PrimType typ, TyIdx idx) : ArrayNode(mod.GetCurFuncCodeMPAllocator(), typ, idx) {}
ArrayNode(MapleAllocator &allocator, PrimType typ, TyIdx idx, bool bcheck)
: NaryNode(allocator, OP_array, typ), tyIdx(idx), boundsCheck(bcheck)
{
}
ArrayNode(const MIRModule &mod, PrimType typ, TyIdx idx, bool bcheck)
: ArrayNode(mod.GetCurFuncCodeMPAllocator(), typ, idx, bcheck)
{
}
ArrayNode(MapleAllocator &allocator, const ArrayNode &node)
: NaryNode(allocator, node), tyIdx(node.tyIdx), boundsCheck(node.boundsCheck)
{
}
ArrayNode(const MIRModule &mod, const ArrayNode &node) : ArrayNode(mod.GetCurFuncCodeMPAllocator(), node) {}
ArrayNode(ArrayNode &node) = delete;
ArrayNode &operator=(const ArrayNode &node) = delete;
virtual ~ArrayNode() = default;
#ifdef ARK_LITECG_DEBUG
void Dump(int32 indent) const override;
#endif
bool IsSameBase(ArrayNode *);
size_t NumOpnds() const override
{
DEBUG_ASSERT(numOpnds == GetNopndSize(), "ArrayNode has wrong numOpnds field");
return GetNopndSize();
}
ArrayNode *CloneTree(MapleAllocator &allocator) const override
{
auto *node = allocator.GetMemPool()->New<ArrayNode>(allocator, *this);
for (size_t i = 0; i < GetNopndSize(); ++i) {
node->GetNopnd().push_back(GetNopndAt(i)->CloneTree(allocator));
}
node->boundsCheck = boundsCheck;
node->SetNumOpnds(GetNopndSize());
return node;
}
BaseNode *GetIndex(size_t i)
{
return Opnd(i + 1);
}
BaseNode *GetBase()
{
return Opnd(0);
}
TyIdx GetTyIdx() const
{
return tyIdx;
}
void SetTyIdx(TyIdx idx)
{
tyIdx = idx;
}
bool GetBoundsCheck() const
{
return boundsCheck;
}
void SetBoundsCheck(bool check)
{
boundsCheck = check;
}
private:
TyIdx tyIdx;
bool boundsCheck = true;
};
class AddrofNode : public BaseNode {
public:
explicit AddrofNode(Opcode o) : BaseNode(o), stIdx() {}
@ -1219,9 +979,6 @@ public:
int32 offset = 0;
};
// AddrofoffNode has the same member fields and member methods as DreadoffNode
using AddrofoffNode = DreadoffNode;
class RegreadNode : public BaseNode {
public:
RegreadNode() : BaseNode(OP_regread) {}
@ -1257,69 +1014,7 @@ private:
PregIdx regIdx = 0; // 32bit, negative if special register
};
class AddroffuncNode : public BaseNode {
public:
AddroffuncNode() : BaseNode(OP_addroffunc) {}
AddroffuncNode(PrimType typ, PUIdx pIdx) : BaseNode(OP_addroffunc, typ, 0), puIdx(pIdx) {}
virtual ~AddroffuncNode() = default;
#ifdef ARK_LITECG_DEBUG
void Dump(int32 indent) const override;
#endif
AddroffuncNode *CloneTree(MapleAllocator &allocator) const override
{
return allocator.GetMemPool()->New<AddroffuncNode>(*this);
}
PUIdx GetPUIdx() const
{
return puIdx;
}
void SetPUIdx(PUIdx puIdxValue)
{
puIdx = puIdxValue;
}
private:
PUIdx puIdx = 0; // 32bit now
};
class AddroflabelNode : public BaseNode {
public:
AddroflabelNode() : BaseNode(OP_addroflabel) {}
explicit AddroflabelNode(uint32 ofst) : BaseNode(OP_addroflabel), offset(ofst) {}
virtual ~AddroflabelNode() = default;
#ifdef ARK_LITECG_DEBUG
void Dump(int32 indent) const override;
#endif
AddroflabelNode *CloneTree(MapleAllocator &allocator) const override
{
return allocator.GetMemPool()->New<AddroflabelNode>(*this);
}
uint32 GetOffset() const
{
return offset;
}
void SetOffset(uint32 offsetValue)
{
offset = offsetValue;
}
private:
LabelIdx offset = 0;
};
// for finally, endtry
// for finally
class StmtNode : public BaseNode, public PtrListNodeBase<StmtNode> {
public:
static std::atomic<uint32> stmtIDNext; // for assigning stmtID, initialized to 1; 0 is reserved
@ -1743,7 +1438,7 @@ private:
using MCasePair = std::pair<BaseNode *, LabelIdx>;
using MCaseVector = MapleVector<MCasePair>;
// eval, throw, free, assertnonnull
// eval, throw, free
class UnaryStmtNode : public StmtNode {
public:
explicit UnaryStmtNode(Opcode o) : StmtNode(o, 1) {}
@ -2298,48 +1993,6 @@ public:
}
};
class IassignoffNode : public BinaryStmtNode {
public:
IassignoffNode() : BinaryStmtNode(OP_iassignoff) {}
explicit IassignoffNode(int32 ofst) : BinaryStmtNode(OP_iassignoff), offset(ofst) {}
IassignoffNode(PrimType primType, int32 offset, BaseNode *addrOpnd, BaseNode *srcOpnd) : IassignoffNode(offset)
{
BaseNodeT::SetPrimType(primType);
SetBOpnd(addrOpnd, 0);
SetBOpnd(srcOpnd, 1);
}
virtual ~IassignoffNode() = default;
#ifdef ARK_LITECG_DEBUG
void Dump(int32 indent) const override;
#endif
IassignoffNode *CloneTree(MapleAllocator &allocator) const override
{
auto *node = allocator.GetMemPool()->New<IassignoffNode>(*this);
node->SetStmtID(stmtIDNext++);
node->SetBOpnd(GetBOpnd(0)->CloneTree(allocator), 0);
node->SetBOpnd(GetBOpnd(1)->CloneTree(allocator), 1);
return node;
}
int32 GetOffset() const
{
return offset;
}
void SetOffset(int32 newOffset)
{
offset = newOffset;
}
private:
int32 offset = 0;
};
// used by return
class NaryStmtNode : public StmtNode, public NaryOpnds {
public:
@ -2460,28 +2113,8 @@ private:
GStrIdx funcNameIdx;
};
// used by assertnonnull
class AssertNonnullStmtNode : public UnaryStmtNode, public SafetyCheckStmtNode {
public:
AssertNonnullStmtNode(Opcode o, GStrIdx funcNameIdx) : UnaryStmtNode(o), SafetyCheckStmtNode(funcNameIdx) {}
virtual ~AssertNonnullStmtNode() {}
#ifdef ARK_LITECG_DEBUG
void Dump(int32 indent) const override;
#endif
AssertNonnullStmtNode *CloneTree(MapleAllocator &allocator) const override
{
auto *node = allocator.GetMemPool()->New<AssertNonnullStmtNode>(*this);
node->SetStmtID(stmtIDNext++);
node->SetOpnd(Opnd()->CloneTree(allocator), 0);
return node;
}
};
// used by call, virtualcall, superclasscall, interfacecall,
// callassigned, virtualcallassigned,
// superclasscallassigned,
// used by call,
// callassigned,
class CallNode : public NaryStmtNode, public DeoptBundleInfo {
public:
CallNode(MapleAllocator &allocator, Opcode o)
@ -2727,7 +2360,7 @@ private:
CallReturnVector returnValues;
};
// used by intrinsiccall and xintrinsiccall
// used by intrinsiccall
class IntrinsiccallNode : public NaryStmtNode, public DeoptBundleInfo {
public:
IntrinsiccallNode(MapleAllocator &allocator, Opcode o)
@ -2957,77 +2590,6 @@ enum AsmQualifierKind : unsigned { // they are alreadgy Maple IR keywords
kASMgoto,
};
class AsmNode : public NaryStmtNode {
public:
explicit AsmNode(MapleAllocator *alloc)
: NaryStmtNode(*alloc, OP_asm),
asmString(alloc->GetMemPool()),
inputConstraints(alloc->Adapter()),
asmOutputs(alloc->Adapter()),
outputConstraints(alloc->Adapter()),
clobberList(alloc->Adapter()),
gotoLabels(alloc->Adapter()),
qualifiers(0)
{
}
AsmNode(MapleAllocator &allocator, const AsmNode &node)
: NaryStmtNode(allocator, node),
asmString(node.asmString, allocator.GetMemPool()),
inputConstraints(allocator.Adapter()),
asmOutputs(allocator.Adapter()),
outputConstraints(allocator.Adapter()),
clobberList(allocator.Adapter()),
gotoLabels(allocator.Adapter()),
qualifiers(node.qualifiers)
{
}
virtual ~AsmNode() = default;
void SetQualifier(AsmQualifierKind x)
{
qualifiers |= (1U << static_cast<uint32>(x));
}
bool GetQualifier(AsmQualifierKind x) const
{
return (qualifiers & (1U << static_cast<uint32>(x))) != 0;
}
CallReturnVector *GetCallReturnVector() override
{
return &asmOutputs;
}
void SetHasWriteInputs()
{
hasWriteInputs = true;
}
bool HasWriteInputs() const
{
return hasWriteInputs;
}
#ifdef ARK_LITECG_DEBUG
void DumpOutputs(int32 indent, std::string &uStr) const;
void DumpInputOperands(int32 indent, std::string &uStr) const;
void Dump(int32 indent) const override;
#endif
MapleString asmString;
MapleVector<UStrIdx> inputConstraints; // length is numOpnds
CallReturnVector asmOutputs;
MapleVector<UStrIdx> outputConstraints; // length is returnValues.size()
MapleVector<UStrIdx> clobberList;
MapleVector<LabelIdx> gotoLabels;
uint32 qualifiers;
private:
bool hasWriteInputs = false;
};
#ifdef ARK_LITECG_DEBUG
void DumpCallReturns(const MIRModule &mod, CallReturnVector nrets, int32 indent);
#endif

View File

@ -818,7 +818,7 @@ public:
strIdxToLabIdxMap.erase(idx);
}
MapleUnorderedSet<LabelIdx> addrTakenLabels; // those appeared in addroflabel or MIRLblConst
MapleUnorderedSet<LabelIdx> addrTakenLabels; // those appeared in MIRLblConst
MapleUnorderedSet<LabelIdx> caseLabelSet; // labels marking starts of switch cases
private:

View File

@ -34,7 +34,6 @@ enum OpcodeProp {
// returned values
kOpcodePropNotPure, // The operation does not return same result with idential operands
kOpcodePropMayThrowException,
kOpcodePropIsAssertNonnull, // The operation check nonnnull
kOpcodePropIsAssertUpperBoundary, // The operation check upper boundary
kOpcodePropIsAssertLowerBoundary, // The operation check lower boundary
};
@ -50,7 +49,6 @@ constexpr unsigned long OPCODEISCALL = 1ULL << kOpcodePropIsCall;
constexpr unsigned long OPCODEISCALLASSIGNED = 1ULL << kOpcodePropIsCallAssigned;
constexpr unsigned long OPCODENOTPURE = 1ULL << kOpcodePropNotPure;
constexpr unsigned long OPCODEMAYTHROWEXCEPTION = 1ULL << kOpcodePropMayThrowException;
constexpr unsigned long OPCODEASSERTNONNULL = 1ULL << kOpcodePropIsAssertNonnull;
constexpr unsigned long OPCODEASSERTUPPERBOUNDARY = 1ULL << kOpcodePropIsAssertUpperBoundary;
constexpr unsigned long OPCODEASSERTLOWERBOUNDARY = 1ULL << kOpcodePropIsAssertLowerBoundary;
@ -167,12 +165,6 @@ public:
return o == OP_dassign || o == OP_regassign;
}
bool IsAssertNonnull(Opcode o) const
{
DEBUG_ASSERT(o < OP_last, "invalid opcode");
return table[o].flag & OPCODEASSERTNONNULL;
}
bool IsAssertBoundary(Opcode o) const
{
DEBUG_ASSERT(o < OP_last, "invalid opcode");

View File

@ -24,19 +24,14 @@
// other opcodes
OPCODE(comment, CommentNode, (OPCODEISSTMT | OPCODENOTMMPL), 0)
OPCODE(eval, UnaryStmtNode, (OPCODEISSTMT | OPCODENOTMMPL), 8)
OPCODE(assertnonnull, UnaryStmtNode, (OPCODEISSTMT | OPCODENOTMMPL | OPCODEASSERTNONNULL), 8)
// Expr & Notmmpl
// storage access opcodes
OPCODE(dread, AddrofNode, (OPCODENOTMMPL | OPCODEHASSSAUSE), 12)
OPCODE(iread, IreadNode, (OPCODENOTMMPL | OPCODEHASSSAUSE), 12)
// leaf opcodes
OPCODE(addrof, AddrofNode, OPCODENOTMMPL, 12)
OPCODE(iaddrof, IreadNode, OPCODENOTMMPL, 12)
// N-ary expression opcodes
OPCODE(array, ArrayNode, (OPCODEISVARSIZE | OPCODENOTMMPL | OPCODEMAYTHROWEXCEPTION), 8)
// Stmt
// storage access opcodes
OPCODE(iassignoff, IassignoffNode, OPCODEISSTMT, 8)
OPCODE(regassign, RegassignNode, (OPCODEISSTMT | OPCODEHASSSADEF), 8)
// flat control flow opcodes
OPCODE(goto, GotoNode, OPCODEISSTMT, 8)
@ -46,34 +41,19 @@
OPCODE(rangegoto, RangeGotoNode, OPCODEISSTMT, 8)
// call opcodes
OPCODE(call, CallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL), 8)
OPCODE(virtualcall, CallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL), 8)
OPCODE(superclasscall, CallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL), 8)
OPCODE(interfacecall, CallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL), 8)
OPCODE(icall, IcallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL), 8)
OPCODE(intrinsiccall, IntrinsiccallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL), 8)
OPCODE(intrinsiccallwithtype, IntrinsiccallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL), 12)
OPCODE(xintrinsiccall, IntrinsiccallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL), 8)
OPCODE(callassigned, CallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL | OPCODEISCALLASSIGNED), 0)
OPCODE(virtualcallassigned, CallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL | OPCODEISCALLASSIGNED), 0)
OPCODE(superclasscallassigned, CallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL | OPCODEISCALLASSIGNED), 0)
OPCODE(icallassigned, IcallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL | OPCODEISCALLASSIGNED), 0)
OPCODE(intrinsiccallassigned, IntrinsiccallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL | OPCODEISCALLASSIGNED), 0)
OPCODE(intrinsiccallwithtypeassigned, IntrinsiccallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL | OPCODEISCALLASSIGNED), 0)
OPCODE(xintrinsiccallassigned, IntrinsiccallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL | OPCODEISCALLASSIGNED), 0)
// exception handling
OPCODE(endtry, StmtNode, OPCODEISSTMT, 6)
// other opcodes
OPCODE(label, LabelNode, OPCODEISSTMT, 8)
// Expr
// storage access opcodes
OPCODE(ireadoff, IreadoffNode, 0, 8)
OPCODE(regread, RegreadNode, OPCODEHASSSAUSE, 8)
// leaf opcodes
OPCODE(addroffunc, AddroffuncNode, 0, 8)
OPCODE(addroflabel, AddroflabelNode, 0, 8)
OPCODE(constval, ConstvalNode, 0, 8)
OPCODE(conststr, ConststrNode, OPCODENOTMMPL, 8)
OPCODE(conststr16, Conststr16Node, OPCODENOTMMPL, 8)
// type conversion expression opcodes
OPCODE(ceil, TypeCvtNode, OPCODEISTYPECVT, 8)
OPCODE(cvt, TypeCvtNode, OPCODEISTYPECVT, 8)
@ -103,7 +83,6 @@
OPCODE(band, BinaryNode, 0, 0)
OPCODE(bior, BinaryNode, 0, 0)
OPCODE(bxor, BinaryNode, 0, 0)
OPCODE(CG_array_elem_add, BinaryNode, 0, 0)
OPCODE(eq, CompareNode, OPCODEISCOMPARE, 8)
OPCODE(ge, CompareNode, OPCODEISCOMPARE, 8)
OPCODE(gt, CompareNode, OPCODEISCOMPARE, 8)
@ -111,17 +90,12 @@
OPCODE(lt, CompareNode, OPCODEISCOMPARE, 8)
OPCODE(ne, CompareNode, OPCODEISCOMPARE, 8)
OPCODE(cmp, CompareNode, OPCODEISCOMPARE, 8)
OPCODE(cmpl, CompareNode, OPCODEISCOMPARE, 8)
OPCODE(cmpg, CompareNode, OPCODEISCOMPARE, 8)
OPCODE(cand, BinaryNode, OPCODENOTMMPL, 0)
OPCODE(cior, BinaryNode, OPCODENOTMMPL, 0)
// N-ary expression opcodes
OPCODE(intrinsicop, IntrinsicopNode, OPCODEISVARSIZE, 8)
// Other expression opcodes
OPCODE(extractbits, ExtractbitsNode, 0, 8)
OPCODE(depositbits, DepositbitsNode, 0, 8)
// leaf node
OPCODE(asm, AsmNode, OPCODEISSTMT | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALLASSIGNED, 0)
OPCODE(addrofoff, addrofoffNode, 0, 12)
OPCODE(icallproto, IcallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL), 8)
OPCODE(icallprotoassigned, IcallNode, (OPCODEISSTMT | OPCODEISVARSIZE | OPCODEHASSSAUSE | OPCODEHASSSADEF | OPCODEISCALL | OPCODEISCALLASSIGNED), 8)

View File

@ -27,9 +27,6 @@ enum Opcode : uint8 {
OP_last,
};
#define CASE_OP_ASSERT_NONNULL \
case OP_assertnonnull:
inline constexpr bool IsDAssign(Opcode code)
{
return (code == OP_dassign);
@ -37,10 +34,8 @@ inline constexpr bool IsDAssign(Opcode code)
inline constexpr bool IsCallAssigned(Opcode code)
{
return (code == OP_callassigned || code == OP_virtualcallassigned ||
code == OP_superclasscallassigned ||
code == OP_icallassigned || code == OP_icallprotoassigned || code == OP_intrinsiccallassigned ||
code == OP_xintrinsiccallassigned || code == OP_intrinsiccallwithtypeassigned);
return (code == OP_callassigned ||
code == OP_icallassigned || code == OP_icallprotoassigned || code == OP_intrinsiccallassigned);
}
inline constexpr bool IsBranch(Opcode opcode)
@ -74,27 +69,16 @@ constexpr bool IsCommutative(Opcode opcode)
constexpr bool IsStmtMustRequire(Opcode opcode)
{
switch (opcode) {
case OP_endtry:
case OP_return:
case OP_call:
case OP_virtualcall:
case OP_superclasscall:
case OP_interfacecall:
case OP_callassigned:
case OP_virtualcallassigned:
case OP_superclasscallassigned:
case OP_icall:
case OP_icallassigned:
case OP_icallproto:
case OP_icallprotoassigned:
case OP_intrinsiccall:
case OP_xintrinsiccall:
case OP_intrinsiccallassigned:
case OP_xintrinsiccallassigned:
case OP_intrinsiccallwithtype:
case OP_intrinsiccallwithtypeassigned:
case OP_asm:
CASE_OP_ASSERT_NONNULL {
case OP_intrinsiccallwithtype: {
return true;
}
default:
@ -165,10 +149,8 @@ constexpr bool IsSupportedOpForCopyInPhasesLoopUnrollAndVRP(Opcode op)
case OP_brfalse:
case OP_brtrue:
case OP_iassign:
CASE_OP_ASSERT_NONNULL
case OP_call:
case OP_callassigned:
case OP_virtualcallassigned:
case OP_intrinsiccall:
case OP_intrinsiccallassigned:
case OP_intrinsiccallwithtype: {

View File

@ -379,15 +379,6 @@ IntrinsiccallNode *MIRBuilder::CreateStmtIntrinsicCall(MIRIntrinsicID idx, const
return stmt;
}
IntrinsiccallNode *MIRBuilder::CreateStmtXintrinsicCall(MIRIntrinsicID idx, const MapleVector<BaseNode *> &arguments)
{
auto *stmt =
NewNode<IntrinsiccallNode>(*GetCurrentFuncCodeMpAllocator(), OP_xintrinsiccall, idx);
DEBUG_ASSERT(stmt != nullptr, "stmt is null");
stmt->SetOpnds(arguments);
return stmt;
}
CallNode *MIRBuilder::CreateStmtCallRegassigned(PUIdx puIdx, const MapleVector<BaseNode *> &args, PregIdx pRegIdx,
Opcode opcode)
{
@ -416,23 +407,6 @@ IntrinsiccallNode *MIRBuilder::CreateStmtIntrinsicCallAssigned(MIRIntrinsicID id
return stmt;
}
IntrinsiccallNode *MIRBuilder::CreateStmtIntrinsicCallAssigned(MIRIntrinsicID idx, const MapleVector<BaseNode *> &args,
const MIRSymbol *ret, TyIdx tyIdx)
{
auto *stmt = NewNode<IntrinsiccallNode>(
*GetCurrentFuncCodeMpAllocator(), tyIdx == 0u ? OP_intrinsiccallassigned : OP_intrinsiccallwithtypeassigned,
idx);
stmt->SetTyIdx(tyIdx);
stmt->SetOpnds(args);
CallReturnVector nrets(GetCurrentFuncCodeMpAllocator()->Adapter());
if (ret != nullptr) {
DEBUG_ASSERT(ret->IsLocal(), "Not Excepted ret");
nrets.push_back(CallReturnPair(ret->GetStIdx(), RegFieldPair(0, 0)));
}
stmt->SetReturnVec(nrets);
return stmt;
}
NaryStmtNode *MIRBuilder::CreateStmtReturn(BaseNode *rVal)
{
auto *stmt = NewNode<NaryStmtNode>(*GetCurrentFuncCodeMpAllocator(), OP_return);

View File

@ -381,30 +381,7 @@ BlockNode *MIRLower::LowerBlock(BlockNode &block)
BaseNode *MIRLower::LowerEmbeddedCandCior(BaseNode *x, StmtNode *curstmt, BlockNode *blk)
{
DEBUG_ASSERT(x != nullptr, "nullptr check");
if (x->GetOpCode() == OP_cand || x->GetOpCode() == OP_cior) {
MIRBuilder *builder = mirModule.GetMIRBuilder();
BinaryNode *bnode = static_cast<BinaryNode *>(x);
bnode->SetOpnd(LowerEmbeddedCandCior(bnode->Opnd(0), curstmt, blk), 0);
PregIdx pregIdx = mirFunc->GetPregTab()->CreatePreg(x->GetPrimType());
RegassignNode *regass = builder->CreateStmtRegassign(x->GetPrimType(), pregIdx, bnode->Opnd(0));
blk->InsertBefore(curstmt, regass);
LabelIdx labIdx = mirFunc->GetLabelTab()->CreateLabel();
mirFunc->GetLabelTab()->AddToStringLabelMap(labIdx);
BaseNode *cond = builder->CreateExprRegread(x->GetPrimType(), pregIdx);
CondGotoNode *cgoto =
mirFunc->GetCodeMempool()->New<CondGotoNode>(x->GetOpCode() == OP_cior ? OP_brtrue : OP_brfalse);
cgoto->SetOpnd(cond, 0);
cgoto->SetOffset(labIdx);
blk->InsertBefore(curstmt, cgoto);
bnode->SetOpnd(LowerEmbeddedCandCior(bnode->Opnd(1), curstmt, blk), 1);
regass = builder->CreateStmtRegassign(x->GetPrimType(), pregIdx, bnode->Opnd(1));
blk->InsertBefore(curstmt, regass);
LabelNode *lbl = mirFunc->GetCodeMempool()->New<LabelNode>();
lbl->SetLabelIdx(labIdx);
blk->InsertBefore(curstmt, lbl);
return builder->CreateExprRegread(x->GetPrimType(), pregIdx);
} else {
{
for (size_t i = 0; i < x->GetNumOpnds(); i++) {
x->SetOpnd(LowerEmbeddedCandCior(x->Opnd(i), curstmt, blk), i);
}
@ -423,42 +400,7 @@ void MIRLower::LowerCandCior(BlockNode &block)
do {
StmtNode *stmt = nextStmt;
nextStmt = stmt->GetNext();
if (stmt->IsCondBr() && (stmt->Opnd(0) != nullptr &&
(stmt->Opnd(0)->GetOpCode() == OP_cand || stmt->Opnd(0)->GetOpCode() == OP_cior))) {
CondGotoNode *condGoto = static_cast<CondGotoNode *>(stmt);
BinaryNode *cond = static_cast<BinaryNode *>(condGoto->Opnd(0));
if ((stmt->GetOpCode() == OP_brfalse && cond->GetOpCode() == OP_cand) ||
(stmt->GetOpCode() == OP_brtrue && cond->GetOpCode() == OP_cior)) {
// short-circuit target label is same as original condGoto stmt
condGoto->SetOpnd(cond->GetBOpnd(0), 0);
auto *newCondGoto = mirModule.CurFuncCodeMemPool()->New<CondGotoNode>(Opcode(stmt->GetOpCode()));
newCondGoto->SetOpnd(cond->GetBOpnd(1), 0);
newCondGoto->SetOffset(condGoto->GetOffset());
block.InsertAfter(condGoto, newCondGoto);
nextStmt = stmt; // so it will be re-processed if another cand/cior
} else { // short-circuit target is next statement
LabelIdx lIdx;
LabelNode *labelStmt = nullptr;
if (nextStmt->GetOpCode() == OP_label) {
labelStmt = static_cast<LabelNode *>(nextStmt);
lIdx = labelStmt->GetLabelIdx();
} else {
DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "mirModule.CurFunction() should not be nullptr");
lIdx = mirModule.CurFunction()->GetLabelTab()->CreateLabel();
mirModule.CurFunction()->GetLabelTab()->AddToStringLabelMap(lIdx);
labelStmt = mirModule.CurFuncCodeMemPool()->New<LabelNode>();
labelStmt->SetLabelIdx(lIdx);
block.InsertAfter(condGoto, labelStmt);
}
auto *newCondGoto = mirModule.CurFuncCodeMemPool()->New<CondGotoNode>(
stmt->GetOpCode() == OP_brfalse ? OP_brtrue : OP_brfalse);
newCondGoto->SetOpnd(cond->GetBOpnd(0), 0);
newCondGoto->SetOffset(lIdx);
block.InsertBefore(condGoto, newCondGoto);
condGoto->SetOpnd(cond->GetBOpnd(1), 0);
nextStmt = newCondGoto; // so it will be re-processed if another cand/cior
}
} else { // call LowerEmbeddedCandCior() for all the expression operands
{ // call LowerEmbeddedCandCior() for all the expression operands
for (size_t i = 0; i < stmt->GetNumOpnds(); i++) {
stmt->SetOpnd(LowerEmbeddedCandCior(stmt->Opnd(i), stmt, &block), i);
}
@ -469,9 +411,6 @@ void MIRLower::LowerCandCior(BlockNode &block)
void MIRLower::LowerFunc(MIRFunction &func)
{
mirModule.SetCurFunction(&func);
if (IsLowerExpandArray()) {
ExpandArrayMrt(func);
}
BlockNode *origBody = func.GetBody();
DEBUG_ASSERT(origBody != nullptr, "nullptr check");
BlockNode *newBody = LowerBlock(*origBody);
@ -483,72 +422,6 @@ void MIRLower::LowerFunc(MIRFunction &func)
func.SetBody(newBody);
}
IfStmtNode *MIRLower::ExpandArrayMrtIfBlock(IfStmtNode &node)
{
if (node.GetThenPart() != nullptr) {
node.SetThenPart(ExpandArrayMrtBlock(*node.GetThenPart()));
}
if (node.GetElsePart() != nullptr) {
node.SetElsePart(ExpandArrayMrtBlock(*node.GetElsePart()));
}
return &node;
}
void MIRLower::AddArrayMrtMpl(BaseNode &exp, BlockNode &newBlock)
{
MIRModule &mod = mirModule;
MIRBuilder *builder = mod.GetMIRBuilder();
for (size_t i = 0; i < exp.NumOpnds(); ++i) {
DEBUG_ASSERT(exp.Opnd(i) != nullptr, "nullptr check");
AddArrayMrtMpl(*exp.Opnd(i), newBlock);
}
if (exp.GetOpCode() == OP_array) {
auto &arrayNode = static_cast<ArrayNode &>(exp);
if (arrayNode.GetBoundsCheck()) {
BaseNode *arrAddr = arrayNode.Opnd(0);
UnaryStmtNode *nullCheck = builder->CreateStmtUnary(OP_assertnonnull, arrAddr);
newBlock.AddStatement(nullCheck);
}
}
}
BlockNode *MIRLower::ExpandArrayMrtBlock(BlockNode &block)
{
auto *newBlock = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
if (block.GetFirst() == nullptr) {
return newBlock;
}
StmtNode *nextStmt = block.GetFirst();
do {
StmtNode *stmt = nextStmt;
DEBUG_ASSERT(stmt != nullptr, "nullptr check");
nextStmt = stmt->GetNext();
switch (stmt->GetOpCode()) {
case OP_if:
newBlock->AddStatement(ExpandArrayMrtIfBlock(static_cast<IfStmtNode &>(*stmt)));
break;
case OP_block:
newBlock->AddStatement(ExpandArrayMrtBlock(static_cast<BlockNode &>(*stmt)));
break;
default:
AddArrayMrtMpl(*stmt, *newBlock);
newBlock->AddStatement(stmt);
break;
}
} while (nextStmt != nullptr);
return newBlock;
}
void MIRLower::ExpandArrayMrt(MIRFunction &func)
{
if (ShouldOptArrayMrt(func)) {
BlockNode *origBody = func.GetBody();
DEBUG_ASSERT(origBody != nullptr, "nullptr check");
BlockNode *newBody = ExpandArrayMrtBlock(*origBody);
func.SetBody(newBody);
}
}
MIRFuncType *MIRLower::FuncTypeFromFuncPtrExpr(BaseNode *x)
{
DEBUG_ASSERT(x != nullptr, "nullptr check");
@ -605,13 +478,6 @@ MIRFuncType *MIRLower::FuncTypeFromFuncPtrExpr(BaseNode *x)
}
break;
}
case OP_addroffunc: {
AddroffuncNode *addrofFunc = static_cast<AddroffuncNode *>(x);
PUIdx puIdx = addrofFunc->GetPUIdx();
MIRFunction *f = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(puIdx);
res = f->GetMIRFuncType();
break;
}
case OP_retype: {
MIRType *mirType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(static_cast<RetypeNode *>(x)->GetTyIdx());
if (mirType->GetKind() == kTypePointer) {

View File

@ -167,13 +167,6 @@ void IreadNode::Dump(int32 indent) const
DumpOpnd(*theMIRModule, indent);
}
void IreadoffNode::Dump(int32 indent) const
{
LogInfo::MapleLogger() << kOpcodeInfo.GetTableItemAt(GetOpCode()).name << " " << GetPrimTypeName(GetPrimType());
LogInfo::MapleLogger() << " " << offset;
DumpOpnd(*theMIRModule, indent);
}
void BinaryNode::Dump(int32 indent) const
{
BaseNode::DumpBase(0);
@ -205,25 +198,6 @@ void CompareNode::Dump(int32 indent) const
BinaryOpnds::Dump(indent);
}
void DepositbitsNode::Dump(int32 indent) const
{
BaseNode::DumpBase(0);
LogInfo::MapleLogger() << " " << static_cast<int32>(bitsOffset) << " " << static_cast<int32>(bitsSize) << " (";
if (GetBOpnd(0)->IsLeaf() && GetBOpnd(1)->IsLeaf()) {
GetBOpnd(0)->Dump(0);
LogInfo::MapleLogger() << ", ";
GetBOpnd(1)->Dump(0);
} else {
LogInfo::MapleLogger() << '\n';
PrintIndentation(indent + 1);
GetBOpnd(0)->Dump(indent + 1);
LogInfo::MapleLogger() << ",\n";
PrintIndentation(indent + 1);
GetBOpnd(1)->Dump(indent + 1);
}
LogInfo::MapleLogger() << ")";
}
void NaryOpnds::Dump(int32 indent) const
{
LogInfo::MapleLogger() << " (";
@ -293,37 +267,8 @@ void NaryNode::Dump(int32 indent) const
BaseNode::DumpBase(0);
NaryOpnds::Dump(indent);
}
void ArrayNode::Dump(int32 indent) const
{
PrintIndentation(0);
LogInfo::MapleLogger() << kOpcodeInfo.GetTableItemAt(GetOpCode()).name << " ";
if (boundsCheck) {
LogInfo::MapleLogger() << "1 ";
} else {
LogInfo::MapleLogger() << "0 ";
}
LogInfo::MapleLogger() << GetPrimTypeName(GetPrimType());
LogInfo::MapleLogger() << " ";
GlobalTables::GetTypeTable().GetTypeFromTyIdx(tyIdx)->Dump(0);
NaryOpnds::Dump(indent);
}
#endif
bool ArrayNode::IsSameBase(ArrayNode *arry)
{
DEBUG_ASSERT(arry != nullptr, "null ptr check");
if (arry == this) {
return true;
}
BaseNode *curBase = this->GetBase();
BaseNode *otherBase = arry->GetBase();
if (curBase->GetOpCode() != OP_addrof || otherBase->GetOpCode() != OP_addrof) {
return false;
}
return static_cast<AddrofNode *>(curBase)->GetStIdx() == static_cast<AddrofNode *>(otherBase)->GetStIdx();
}
#ifdef ARK_LITECG_DEBUG
void IntrinsicopNode::Dump(int32 indent) const
{
@ -341,23 +286,6 @@ void ConstvalNode::Dump(int32) const
GetConstVal()->Dump();
}
void ConststrNode::Dump(int32) const
{
BaseNode::DumpBase(0);
const std::string kStr = GlobalTables::GetUStrTable().GetStringFromStrIdx(UStrIdx(strIdx));
PrintString(kStr);
}
void Conststr16Node::Dump(int32) const
{
BaseNode::DumpBase(0);
const std::u16string kStr16 = GlobalTables::GetU16StrTable().GetStringFromStrIdx(U16StrIdx(strIdx));
// UTF-16 string are dumped as UTF-8 string in mpl to keep the printable chars in ascii form
std::string str;
(void)namemangler::UTF16ToUTF8(str, kStr16);
PrintString(str);
}
void AddrofNode::Dump(int32) const
{
LogInfo::MapleLogger() << kOpcodeInfo.GetTableItemAt(GetOpCode()).name << " " << GetPrimTypeName(GetPrimType());
@ -404,24 +332,6 @@ void RegreadNode::Dump(int32) const
}
}
void AddroffuncNode::Dump(int32) const
{
LogInfo::MapleLogger() << kOpcodeInfo.GetTableItemAt(GetOpCode()).name << " " << GetPrimTypeName(GetPrimType());
MIRFunction *func = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(puIdx);
CHECK_FATAL(func != nullptr, "null ptr");
MIRSymbol *symbol = GlobalTables::GetGsymTable().GetSymbolFromStidx(func->GetStIdx().Idx());
if (symbol != nullptr) {
LogInfo::MapleLogger() << " &" << symbol->GetName();
}
}
void AddroflabelNode::Dump(int32) const
{
DEBUG_ASSERT(theMIRModule->CurFunction() != nullptr, "theMIRModule->CurFunction() should not be nullptr");
LogInfo::MapleLogger() << kOpcodeInfo.GetTableItemAt(GetOpCode()).name << " " << GetPrimTypeName(GetPrimType());
LogInfo::MapleLogger() << " @" << theMIRModule->CurFunction()->GetLabelName(static_cast<LabelIdx>(offset));
}
void StmtNode::DumpBase(int32 indent) const
{
srcPosition.DumpLoc(lastPrintedLineNum, lastPrintedColumnNum);
@ -504,14 +414,6 @@ void IassignNode::Dump(int32 indent) const
LogInfo::MapleLogger() << ")\n";
}
void IassignoffNode::Dump(int32 indent) const
{
StmtNode::DumpBase(indent);
LogInfo::MapleLogger() << " " << GetPrimTypeName(GetPrimType()) << " " << offset;
BinaryOpnds::Dump(indent);
LogInfo::MapleLogger() << '\n';
}
void GotoNode::Dump(int32 indent) const
{
StmtNode::DumpBase(indent);
@ -618,15 +520,6 @@ void NaryStmtNode::Dump(int32 indent) const
LogInfo::MapleLogger() << '\n';
}
void AssertNonnullStmtNode::Dump(int32 indent) const
{
StmtNode::DumpBase(indent);
if (theMIRModule->IsCModule()) {
SafetyCheckStmtNode::Dump();
}
UnaryStmtNode::DumpOpnd(indent);
}
void DumpCallReturns(const MIRModule &mod, CallReturnVector nrets, int32 indent)
{
const MIRFunction *mirFunc = mod.CurFunction();
@ -737,7 +630,7 @@ void IntrinsiccallNode::Dump(int32 indent, bool newline) const
GlobalTables::GetTypeTable().GetTypeFromTyIdx(tyIdx)->Dump(indent + 1);
}
if (GetOpCode() == OP_intrinsiccall || GetOpCode() == OP_intrinsiccallassigned ||
GetOpCode() == OP_intrinsiccallwithtype || GetOpCode() == OP_intrinsiccallwithtypeassigned) {
GetOpCode() == OP_intrinsiccallwithtype) {
LogInfo::MapleLogger() << " " << GetIntrinsicName(intrinsic);
} else {
LogInfo::MapleLogger() << " " << intrinsic;
@ -862,120 +755,6 @@ void EmitStr(const MapleString &mplStr)
LogInfo::MapleLogger() << "\"\n";
}
void AsmNode::DumpOutputs(int32 indent, std::string &uStr) const
{
PrintIndentation(indent + 1);
LogInfo::MapleLogger() << " :";
size_t numOutputs = asmOutputs.size();
const MIRFunction *mirFunc = theMIRModule->CurFunction();
if (numOutputs == 0) {
LogInfo::MapleLogger() << '\n';
} else {
for (size_t i = 0; i < numOutputs; i++) {
if (i != 0) {
PrintIndentation(indent + 2); // Increase the indent by 2 bytes.
}
uStr = GlobalTables::GetUStrTable().GetStringFromStrIdx(outputConstraints[i]);
PrintString(uStr);
LogInfo::MapleLogger() << " ";
StIdx stIdx = asmOutputs[i].first;
RegFieldPair regFieldPair = asmOutputs[i].second;
if (!regFieldPair.IsReg()) {
FieldID fieldID = regFieldPair.GetFieldID();
LogInfo::MapleLogger() << "dassign";
const MIRSymbol *st = mirFunc->GetLocalOrGlobalSymbol(stIdx);
DEBUG_ASSERT(st != nullptr, "st is null");
LogInfo::MapleLogger() << (stIdx.Islocal() ? " %" : " $");
LogInfo::MapleLogger() << st->GetName() << " " << fieldID;
} else {
PregIdx regIdx = regFieldPair.GetPregIdx();
const MIRPreg *mirPreg = mirFunc->GetPregItem(static_cast<PregIdx>(regIdx));
DEBUG_ASSERT(mirPreg != nullptr, "mirPreg is null");
LogInfo::MapleLogger() << "regassign"
<< " " << GetPrimTypeName(mirPreg->GetPrimType());
LogInfo::MapleLogger() << " %" << mirPreg->GetPregNo();
}
if (i != numOutputs - 1) {
LogInfo::MapleLogger() << ',';
}
LogInfo::MapleLogger() << '\n';
}
}
}
void AsmNode::DumpInputOperands(int32 indent, std::string &uStr) const
{
PrintIndentation(indent + 1);
LogInfo::MapleLogger() << " :";
if (numOpnds == 0) {
LogInfo::MapleLogger() << '\n';
} else {
for (size_t i = 0; i < numOpnds; i++) {
if (i != 0) {
PrintIndentation(indent + 2); // Increase the indent by 2 bytes.
}
uStr = GlobalTables::GetUStrTable().GetStringFromStrIdx(inputConstraints[i]);
PrintString(uStr);
LogInfo::MapleLogger() << " (";
GetNopndAt(i)->Dump(indent + 4); // Increase the indent by 4 bytes.
LogInfo::MapleLogger() << ")";
if (i != static_cast<size_t>(static_cast<int64>(numOpnds - 1))) {
LogInfo::MapleLogger() << ',';
}
LogInfo::MapleLogger() << "\n";
}
}
}
void AsmNode::Dump(int32 indent) const
{
srcPosition.DumpLoc(lastPrintedLineNum, lastPrintedColumnNum);
PrintIndentation(indent);
LogInfo::MapleLogger() << kOpcodeInfo.GetName(op);
if (GetQualifier(kASMvolatile)) {
LogInfo::MapleLogger() << " volatile";
}
if (GetQualifier(kASMinline)) {
LogInfo::MapleLogger() << " inline";
}
if (GetQualifier(kASMgoto)) {
LogInfo::MapleLogger() << " goto";
}
LogInfo::MapleLogger() << " { ";
EmitStr(asmString);
// print outputs
std::string uStr;
DumpOutputs(indent, uStr);
// print input operands
DumpInputOperands(indent, uStr);
// print clobber list
PrintIndentation(indent + 1);
LogInfo::MapleLogger() << " :";
for (size_t i = 0; i < clobberList.size(); i++) {
uStr = GlobalTables::GetUStrTable().GetStringFromStrIdx(clobberList[i]);
PrintString(uStr);
DEBUG_ASSERT(clobberList.size() > 0, "must not be zero");
if (i != clobberList.size() - 1) {
LogInfo::MapleLogger() << ',';
}
}
LogInfo::MapleLogger() << '\n';
// print labels
PrintIndentation(indent + 1);
LogInfo::MapleLogger() << " :";
size_t labelSize = gotoLabels.size();
DEBUG_ASSERT(theMIRModule->CurFunction() != nullptr, "theMIRModule->CurFunction() should not be nullptr");
for (size_t i = 0; i < labelSize; i++) {
LabelIdx offset = gotoLabels[i];
LogInfo::MapleLogger() << " @" << theMIRModule->CurFunction()->GetLabelName(offset);
if (i != labelSize - 1) {
LogInfo::MapleLogger() << ',';
}
}
LogInfo::MapleLogger() << " }\n";
}
std::string SafetyCheckStmtNode::GetFuncName() const
{
return GlobalTables::GetStrTable().GetStringFromStrIdx(funcNameIdx);

View File

@ -55,11 +55,9 @@ public:
static bool IntegerOpIsOverflow(Opcode op, PrimType primType, int64 cstA, int64 cstB);
static MIRIntConst *FoldIntConstUnaryMIRConst(Opcode opcode, PrimType resultType, const MIRIntConst *constNode);
private:
std::pair<BaseNode*, std::optional<IntVal>> FoldArray(ArrayNode *node);
std::pair<BaseNode*, std::optional<IntVal>> FoldBase(BaseNode *node) const;
std::pair<BaseNode*, std::optional<IntVal>> FoldBinary(BinaryNode *node);
std::pair<BaseNode*, std::optional<IntVal>> FoldCompare(CompareNode *node);
std::pair<BaseNode*, std::optional<IntVal>> FoldDepositbits(DepositbitsNode *node);
std::pair<BaseNode*, std::optional<IntVal>> FoldExtractbits(ExtractbitsNode *node);
ConstvalNode *FoldSignExtend(Opcode opcode, PrimType resultType, uint8 size, const ConstvalNode &cst) const;
std::pair<BaseNode*, std::optional<IntVal>> FoldIread(IreadNode *node);

View File

@ -190,7 +190,6 @@ std::pair<BaseNode*, std::optional<IntVal>> ConstantFold::DispatchFold(BaseNode
case OP_zext:
case OP_extractbits:
return FoldExtractbits(static_cast<ExtractbitsNode*>(node));
case OP_iaddrof:
case OP_iread:
return FoldIread(static_cast<IreadNode*>(node));
case OP_add:
@ -198,8 +197,6 @@ std::pair<BaseNode*, std::optional<IntVal>> ConstantFold::DispatchFold(BaseNode
case OP_band:
case OP_bior:
case OP_bxor:
case OP_cand:
case OP_cior:
case OP_div:
case OP_lshr:
case OP_max:
@ -217,10 +214,6 @@ std::pair<BaseNode*, std::optional<IntVal>> ConstantFold::DispatchFold(BaseNode
case OP_lt:
case OP_cmp:
return FoldCompare(static_cast<CompareNode*>(node));
case OP_depositbits:
return FoldDepositbits(static_cast<DepositbitsNode*>(node));
case OP_array:
return FoldArray(static_cast<ArrayNode*>(node));
case OP_retype:
return FoldRetype(static_cast<RetypeNode*>(node));
default:
@ -399,19 +392,6 @@ MIRConst *ConstantFold::FoldIntConstBinaryMIRConst(Opcode opcode, PrimType resul
result = intVal0.Xor(intVal1, resultType);
break;
}
case OP_cand: {
result = IntVal(intVal0.GetExtValue() && intVal1.GetExtValue(), resultType);
break;
}
case OP_cior: {
result = IntVal(intVal0.GetExtValue() || intVal1.GetExtValue(), resultType);
break;
}
case OP_depositbits: {
// handled in FoldDepositbits
DEBUG_ASSERT(false, "Unexpected opcode in FoldIntConstBinary");
break;
}
default:
DEBUG_ASSERT(false, "Unknown opcode for FoldIntConstBinary");
break;
@ -523,10 +503,7 @@ ConstvalNode *ConstantFold::FoldFPConstBinary(Opcode opcode, PrimType resultType
case OP_shl:
case OP_band:
case OP_bior:
case OP_bxor:
case OP_cand:
case OP_cior:
case OP_depositbits: {
case OP_bxor: {
DEBUG_ASSERT(false, "Unexpected opcode in FoldFPConstBinary");
break;
}
@ -603,13 +580,6 @@ int64 ConstantFold::ComparisonResult(Opcode op, T *leftConst, T *rightConst) con
result = !FullyEqual(leftValue, rightValue);
break;
}
case OP_cmpl:
case OP_cmpg: {
if (std::isnan(leftValue) || std::isnan(rightValue)) {
result = (op == OP_cmpg) ? kGreater : kLess;
break;
}
}
[[clang::fallthrough]];
case OP_cmp: {
if (leftValue > rightValue) {
@ -1463,13 +1433,7 @@ std::pair<BaseNode*, std::optional<IntVal>> ConstantFold::FoldIread(IreadNode *n
}
Opcode op = node->GetOpCode();
FieldID fieldID = node->GetFieldID();
if (op == OP_iaddrof) {
AddrofNode *newAddrof = addrofNode->CloneTree(mirModule->GetCurFuncCodeMPAllocator());
CHECK_NULL_FATAL(newAddrof);
newAddrof->SetFieldID(newAddrof->GetFieldID() + fieldID);
result = newAddrof;
} else if (op == OP_iread) {
if (op == OP_iread) {
result = mirModule->CurFuncCodeMemPool()->New<AddrofNode>(OP_dread, node->GetPrimType(), addrofNode->GetStIdx(),
node->GetFieldID() + addrofNode->GetFieldID());
}
@ -1568,7 +1532,7 @@ std::pair<BaseNode*, std::optional<IntVal>> ConstantFold::FoldBinary(BinaryNode
}
result = NegateTree(r);
} else if ((op == OP_mul || op == OP_div || op == OP_rem || op == OP_ashr || op == OP_lshr || op == OP_shl ||
op == OP_band || op == OP_cand) &&
op == OP_band) &&
cst == 0) {
// 0 * X -> 0
// 0 / X -> 0
@ -1592,21 +1556,6 @@ std::pair<BaseNode*, std::optional<IntVal>> ConstantFold::FoldBinary(BinaryNode
rp.first = mirModule->CurFuncCodeMemPool()->New<TypeCvtNode>(OP_cvt, primType, PTY_i32, rp.first);
}
result = NewBinaryNode(node, OP_mul, primType, lConst, rp.first);
} else if (op == OP_cior) {
if (cst != 0) {
// 5 || X -> 1
result = mirModule->GetMIRBuilder()->CreateIntConst(1, cstTyp);
} else {
// when cst is zero
// 0 || X -> (X != 0);
result = mirModule->CurFuncCodeMemPool()->New<CompareNode>(
OP_ne, primType, r->GetPrimType(), r,
mirModule->GetMIRBuilder()->CreateIntConst(0, r->GetPrimType()));
}
} else if ((op == OP_cand) && cst != 0) {
// 5 && X -> (X != 0)
result = mirModule->CurFuncCodeMemPool()->New<CompareNode>(
OP_ne, primType, r->GetPrimType(), r, mirModule->GetMIRBuilder()->CreateIntConst(0, r->GetPrimType()));
} else if ((op == OP_bior || op == OP_bxor) && cst == 0) {
// 0 | X -> X
// 0 ^ X -> X
@ -1633,7 +1582,7 @@ std::pair<BaseNode*, std::optional<IntVal>> ConstantFold::FoldBinary(BinaryNode
} else if (op == OP_sub && (!cst.IsSigned() || !cst.IsMinValue())) {
result = l;
sum = lp.second - cst;
} else if ((op == OP_mul || op == OP_band || op == OP_cand) && cst == 0) {
} else if ((op == OP_mul || op == OP_band) && cst == 0) {
// X * 0 -> 0
// X & 0 -> 0
// X && 0 -> 0
@ -1725,17 +1674,6 @@ std::pair<BaseNode*, std::optional<IntVal>> ConstantFold::FoldBinary(BinaryNode
} else if (op == OP_bior && cst == -1) {
// X | (-1) -> -1
result = mirModule->GetMIRBuilder()->CreateIntConst(-1ULL, cstTyp);
} else if (op == OP_cior) {
if (cst == 0) {
// X || 0 -> X
sum = lp.second;
result = l;
} else if (!cst.GetSignBit()) {
// X || 5 -> 1
result = mirModule->GetMIRBuilder()->CreateIntConst(1, cstTyp);
} else {
result = NewBinaryNode(node, op, primType, PairToExpr(lPrimTypes, lp), r);
}
} else if ((op == OP_ashr || op == OP_lshr || op == OP_shl || op == OP_bior || op == OP_bxor) && cst == 0) {
// X >> 0 -> X
// X << 0 -> X
@ -1903,72 +1841,4 @@ BaseNode *ConstantFold::Fold(BaseNode *node)
return result;
}
std::pair<BaseNode*, std::optional<IntVal>> ConstantFold::FoldDepositbits(DepositbitsNode *node)
{
CHECK_NULL_FATAL(node);
BaseNode *result = nullptr;
uint8 bitsOffset = node->GetBitsOffset();
uint8 bitsSize = node->GetBitsSize();
std::pair<BaseNode*, std::optional<IntVal>> leftPair = DispatchFold(node->Opnd(0));
std::pair<BaseNode*, std::optional<IntVal>> rightPair = DispatchFold(node->Opnd(1));
ConstvalNode *leftConst = safe_cast<ConstvalNode>(leftPair.first);
ConstvalNode *rightConst = safe_cast<ConstvalNode>(rightPair.first);
if (leftConst != nullptr && rightConst != nullptr) {
MIRIntConst *intConst0 = safe_cast<MIRIntConst>(leftConst->GetConstVal());
MIRIntConst *intConst1 = safe_cast<MIRIntConst>(rightConst->GetConstVal());
ASSERT_NOT_NULL(intConst0);
ASSERT_NOT_NULL(intConst1);
ConstvalNode *resultConst = mirModule->CurFuncCodeMemPool()->New<ConstvalNode>();
resultConst->SetPrimType(node->GetPrimType());
MIRType &type = *GlobalTables::GetTypeTable().GetPrimType(node->GetPrimType());
MIRIntConst *constValue = GlobalTables::GetIntConstTable().GetOrCreateIntConst(0, type);
uint64 op0ExtractVal = 0;
uint64 op1ExtractVal = 0;
uint64 mask0 = (1LLU << (bitsSize + bitsOffset)) - 1;
uint64 mask1 = (1LLU << bitsOffset) - 1;
uint64 op0Mask = ~(mask0 ^ mask1);
op0ExtractVal = (static_cast<uint64>(intConst0->GetExtValue()) & op0Mask);
op1ExtractVal = (static_cast<uint64>(intConst1->GetExtValue()) << bitsOffset) &
((1ULL << (bitsSize + bitsOffset)) - 1);
constValue = GlobalTables::GetIntConstTable().GetOrCreateIntConst(
(op0ExtractVal | op1ExtractVal), constValue->GetType());
resultConst->SetConstVal(constValue);
result = resultConst;
} else {
BaseNode *l = PairToExpr(node->Opnd(0)->GetPrimType(), leftPair);
BaseNode *r = PairToExpr(node->Opnd(1)->GetPrimType(), rightPair);
if (l != node->Opnd(0) || r != node->Opnd(1)) {
result = mirModule->CurFuncCodeMemPool()->New<DepositbitsNode>(
Opcode(node->GetOpCode()), PrimType(node->GetPrimType()), bitsOffset, bitsSize, l, r);
} else {
result = node;
}
}
return std::make_pair(result, std::nullopt);
}
std::pair<BaseNode*, std::optional<IntVal>> ConstantFold::FoldArray(ArrayNode *node)
{
CHECK_NULL_FATAL(node);
BaseNode *result = nullptr;
size_t i = 0;
bool isFolded = false;
ArrayNode *arrNode = mirModule->CurFuncCodeMemPool()->New<ArrayNode>(*mirModule, PrimType(node->GetPrimType()),
node->GetTyIdx(), node->GetBoundsCheck());
for (i = 0; i < node->GetNopndSize(); i++) {
std::pair<BaseNode*, std::optional<IntVal>> p = DispatchFold(node->GetNopndAt(i));
BaseNode *tmpNode = PairToExpr(node->GetNopndAt(i)->GetPrimType(), p);
if (tmpNode != node->GetNopndAt(i)) {
isFolded = true;
}
arrNode->GetNopnd().push_back(tmpNode);
arrNode->SetNumOpnds(arrNode->GetNumOpnds() + 1);
}
if (isFolded) {
result = arrNode;
} else {
result = node;
}
return std::make_pair(result, std::nullopt);
}
} // namespace maple