mirror of
https://github.com/capstone-engine/llvm-capstone.git
synced 2024-11-30 00:51:02 +00:00
[llvm] Remove redundant return and continue statements (NFC)
Identified with readability-redundant-control-flow.
This commit is contained in:
parent
2efcbe24a7
commit
7dc3575ef2
@ -67,7 +67,6 @@ public:
|
||||
UsedRegUnits.addReg(Reg);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/// Initialize and clear the set.
|
||||
|
@ -1751,7 +1751,6 @@ void SwingSchedulerDAG::checkNodeSets(NodeSetType &NodeSets) {
|
||||
}
|
||||
NodeSets.clear();
|
||||
LLVM_DEBUG(dbgs() << "Clear recurrence node-sets\n");
|
||||
return;
|
||||
}
|
||||
|
||||
/// Add the nodes that do not belong to a recurrence set into groups
|
||||
|
@ -649,5 +649,4 @@ void SpecialTableSymbolNode::output(OutputStream &OS, OutputFlags Flags) const {
|
||||
TargetName->output(OS, Flags);
|
||||
OS << "'}";
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
@ -2318,7 +2318,6 @@ bool FileCheckString::CheckNot(const SourceMgr &SM, StringRef Buffer,
|
||||
PrintMatch(false, SM, Prefix, Pat->getLoc(), *Pat, 1, Buffer, Pos, MatchLen,
|
||||
Req, Diags);
|
||||
DirectiveFail = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
return DirectiveFail;
|
||||
|
@ -1769,7 +1769,6 @@ void llvm::UpgradeInlineAsmString(std::string *AsmStr) {
|
||||
(Pos = AsmStr->find("# marker")) != std::string::npos) {
|
||||
AsmStr->replace(Pos, 1, ";");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/// Upgrade a call to an old intrinsic. All argument and return casting must be
|
||||
|
@ -501,7 +501,6 @@ void PrintIRInstrumentation::printBeforePass(StringRef PassID, Any IR) {
|
||||
|
||||
SmallString<20> Banner = formatv("*** IR Dump Before {0} ***", PassID);
|
||||
unwrapAndPrint(dbgs(), IR, Banner, forcePrintModuleIR());
|
||||
return;
|
||||
}
|
||||
|
||||
void PrintIRInstrumentation::printAfterPass(StringRef PassID, Any IR) {
|
||||
|
@ -685,8 +685,6 @@ void expand_tilde(const Twine &path, SmallVectorImpl<char> &dest) {
|
||||
|
||||
path.toVector(dest);
|
||||
expandTildeExpr(dest);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static file_type typeForMode(mode_t Mode) {
|
||||
|
@ -3955,7 +3955,6 @@ void selectGatherScatterAddrMode(SDValue &BasePtr, SDValue &Index, EVT MemVT,
|
||||
Opcode = NewOp;
|
||||
BasePtr = Index->getOperand(0);
|
||||
Index = ConstOffset;
|
||||
return;
|
||||
}
|
||||
|
||||
SDValue AArch64TargetLowering::LowerMGATHER(SDValue Op,
|
||||
|
@ -1873,7 +1873,6 @@ void AArch64InstructionSelector::materializeLargeCMVal(
|
||||
AArch64II::MO_G1 | AArch64II::MO_NC, 16, 0);
|
||||
DstReg = BuildMovK(DstReg, AArch64II::MO_G2 | AArch64II::MO_NC, 32, 0);
|
||||
BuildMovK(DstReg, AArch64II::MO_G3, 48, I.getOperand(0).getReg());
|
||||
return;
|
||||
}
|
||||
|
||||
bool AArch64InstructionSelector::preISelLower(MachineInstr &I) {
|
||||
|
@ -58,7 +58,6 @@ void AMDGPUAsmBackend::relaxInstruction(MCInst &Inst,
|
||||
Res.setOpcode(RelaxedOpcode);
|
||||
Res.addOperand(Inst.getOperand(0));
|
||||
Inst = std::move(Res);
|
||||
return;
|
||||
}
|
||||
|
||||
bool AMDGPUAsmBackend::fixupNeedsRelaxation(const MCFixup &Fixup,
|
||||
|
@ -3087,7 +3087,6 @@ public:
|
||||
// This is container for the immediate that we will create the constant
|
||||
// pool from
|
||||
addExpr(Inst, getConstantPoolImm());
|
||||
return;
|
||||
}
|
||||
|
||||
void addMemTBBOperands(MCInst &Inst, unsigned N) const {
|
||||
|
@ -853,7 +853,6 @@ void MVEGatherScatterLowering::pushOutMul(PHINode *&Phi,
|
||||
Phi->addIncoming(NewIncrement, Phi->getIncomingBlock(LoopIncrement));
|
||||
Phi->removeIncomingValue((unsigned)0);
|
||||
Phi->removeIncomingValue((unsigned)0);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check whether all usages of this instruction are as offsets of
|
||||
|
@ -494,8 +494,6 @@ void BPFDAGToDAGISel::PreprocessTrunc(SDNode *Node,
|
||||
CurDAG->ReplaceAllUsesWith(SDValue(Node, 0), BaseV);
|
||||
I++;
|
||||
CurDAG->DeleteNode(Node);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
FunctionPass *llvm::createBPFISelDag(BPFTargetMachine &TM) {
|
||||
|
@ -13805,7 +13805,6 @@ static void fixupShuffleMaskForPermutedSToV(SmallVectorImpl<int> &ShuffV,
|
||||
if ((Idx >= 0 && Idx < LHSMaxIdx) || (Idx >= RHSMinIdx && Idx < RHSMaxIdx))
|
||||
ShuffV[i] += HalfVec;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Replace a SCALAR_TO_VECTOR with a SCALAR_TO_VECTOR_PERMUTED except if
|
||||
|
@ -165,7 +165,6 @@ void RISCVInstPrinter::printAtomicMemOp(const MCInst *MI, unsigned OpNo,
|
||||
O << "(";
|
||||
printRegName(O, MO.getReg());
|
||||
O << ")";
|
||||
return;
|
||||
}
|
||||
|
||||
void RISCVInstPrinter::printVTypeI(const MCInst *MI, unsigned OpNo,
|
||||
|
@ -981,5 +981,4 @@ void X86FlagsCopyLoweringPass::rewriteSetCC(MachineBasicBlock &TestMBB,
|
||||
MIB.setMemRefs(SetCCI.memoperands());
|
||||
|
||||
SetCCI.eraseFromParent();
|
||||
return;
|
||||
}
|
||||
|
@ -95,7 +95,6 @@ struct LVIThunkInserter : ThunkInserter<LVIThunkInserter> {
|
||||
BuildMI(&MF.front(), DebugLoc(), TII->get(X86::LFENCE));
|
||||
BuildMI(&MF.front(), DebugLoc(), TII->get(X86::JMP64r)).addReg(X86::R11);
|
||||
MF.front().addLiveIn(X86::R11);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -69,7 +69,6 @@ void InterfaceFile::addParentUmbrella(const Target &Target_, StringRef Parent) {
|
||||
}
|
||||
|
||||
ParentUmbrellas.emplace(Iter, Target_, std::string(Parent));
|
||||
return;
|
||||
}
|
||||
|
||||
void InterfaceFile::addUUID(const Target &Target_, StringRef UUID) {
|
||||
@ -83,7 +82,6 @@ void InterfaceFile::addUUID(const Target &Target_, StringRef UUID) {
|
||||
}
|
||||
|
||||
UUIDs.emplace(Iter, Target_, std::string(UUID));
|
||||
return;
|
||||
}
|
||||
|
||||
void InterfaceFile::addUUID(const Target &Target, uint8_t UUID[16]) {
|
||||
|
@ -2011,8 +2011,6 @@ static void sinkSpillUsesAfterCoroBegin(Function &F,
|
||||
Instruction *InsertPt = CoroBegin->getNextNode();
|
||||
for (Instruction *Inst : InsertionList)
|
||||
Inst->moveBefore(InsertPt);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
/// For each local variable that all of its user are only used inside one of
|
||||
|
@ -3499,7 +3499,6 @@ struct AADereferenceableImpl : AADereferenceable {
|
||||
State.addAccessedBytes(Offset, Size);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/// See followUsesInMBEC
|
||||
|
@ -1127,8 +1127,6 @@ void createSwitchStatement(Module &M, OutlinableGroup &OG, BasicBlock *EndBB,
|
||||
Term->moveBefore(*EndBB, EndBB->end());
|
||||
OutputBlock->eraseFromParent();
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
/// Fill the new function that will serve as the replacement function for all of
|
||||
|
@ -1134,7 +1134,6 @@ private:
|
||||
|
||||
RFI.foreachUse(SCC, CheckGlobalization);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/// Maps the values stored in the offload arrays passed as arguments to
|
||||
|
@ -2214,6 +2214,4 @@ void DevirtIndex::run() {
|
||||
if (PrintSummaryDevirt)
|
||||
for (const auto &DT : DevirtTargets)
|
||||
errs() << "Devirtualized call to " << DT << "\n";
|
||||
|
||||
return;
|
||||
}
|
||||
|
@ -1809,7 +1809,6 @@ public:
|
||||
|
||||
for (Value *Op : cast<Instruction>(V)->operand_values())
|
||||
collectSharedInfo(Leaf, Op, ExprsInSubprogram, Shared);
|
||||
return;
|
||||
}
|
||||
|
||||
/// Calculate the number of exclusive and shared op counts for expression
|
||||
|
@ -63,7 +63,6 @@ void CodeRegions::beginRegion(StringRef Description, SMLoc Loc) {
|
||||
|
||||
ActiveRegions[Description] = Regions.size();
|
||||
Regions.emplace_back(std::make_unique<CodeRegion>(Description, Loc));
|
||||
return;
|
||||
}
|
||||
|
||||
void CodeRegions::endRegion(StringRef Description, SMLoc Loc) {
|
||||
|
@ -1907,7 +1907,6 @@ void DumpOutputStyle::dumpSectionHeaders(StringRef Label, DbgHeaderType Type) {
|
||||
P.getIndentLevel(), Header.Characteristics, 1, ""));
|
||||
++I;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
Error DumpOutputStyle::dumpSectionContribs() {
|
||||
|
@ -2254,7 +2254,6 @@ static void dumpHotFunctionList(const std::vector<std::string> &ColumnTitle,
|
||||
FOS.PadToColumn(ColumnOffset[3]);
|
||||
FOS << R.FuncName << "\n";
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
static int
|
||||
|
@ -108,8 +108,6 @@ void ProfiledBinary::load() {
|
||||
ProEpilogTracker.inferEpilogOffsets(RetAddrs);
|
||||
|
||||
// TODO: decode other sections.
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
bool ProfiledBinary::inlineContextEqual(uint64_t Address1,
|
||||
|
@ -571,8 +571,6 @@ void X86FoldTablesEmitter::updateTables(const CodeGenInstruction *RegInstr,
|
||||
getRegOperandSize(RegOpRec) == getMemOperandSize(MemOpRec))
|
||||
addEntryWithFlags(Table0, RegInstr, MemInstr, S, 0);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
void X86FoldTablesEmitter::run(formatted_raw_ostream &OS) {
|
||||
|
Loading…
Reference in New Issue
Block a user