mirror of
https://github.com/RPCS3/llvm.git
synced 2025-04-06 15:21:37 +00:00

This is a prep commit before fixing MachineBasicBlock::reverse_iterator invalidation semantics, ala r281167 for ilist::reverse_iterator. This changes MachineBasicBlock::Instructions to track which node is the sentinel regardless of LLVM_ENABLE_ABI_BREAKING_CHECKS. There's almost no functionality change (aside from ABI). However, in the rare configuration: #if !defined(NDEBUG) && !defined(LLVM_ENABLE_ABI_BREAKING_CHECKS) the isKnownSentinel() assertions in ilist_iterator<>::operator* suddenly have teeth for MachineInstr. If these assertions start firing for your out-of-tree backend, have a look at the suggestions in the commit message for r279314, and at some of the commits leading up to it that avoid dereferencing the end() iterator. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@281168 91177308-0d34-0410-b5e6-96231b3b80d8
96 lines
2.9 KiB
C++
96 lines
2.9 KiB
C++
//===- MachineInstrBundleIteratorTest.cpp ---------------------------------===//
|
|
//
|
|
// The LLVM Compiler Infrastructure
|
|
//
|
|
// This file is distributed under the University of Illinois Open Source
|
|
// License. See LICENSE.TXT for details.
|
|
//
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
#include "llvm/ADT/ilist_node.h"
|
|
#include "llvm/CodeGen/MachineInstrBundleIterator.h"
|
|
#include "gtest/gtest.h"
|
|
|
|
using namespace llvm;
|
|
|
|
namespace {
|
|
|
|
struct MyBundledInstr
|
|
: public ilist_node<MyBundledInstr, ilist_sentinel_tracking<true>> {
|
|
bool isBundledWithPred() const { return true; }
|
|
bool isBundledWithSucc() const { return true; }
|
|
};
|
|
typedef MachineInstrBundleIterator<MyBundledInstr> bundled_iterator;
|
|
typedef MachineInstrBundleIterator<const MyBundledInstr> const_bundled_iterator;
|
|
|
|
#ifdef GTEST_HAS_DEATH_TEST
|
|
#ifndef NDEBUG
|
|
TEST(MachineInstrBundleIteratorTest, CheckForBundles) {
|
|
MyBundledInstr MBI;
|
|
|
|
// Confirm that MBI is always considered bundled.
|
|
EXPECT_TRUE(MBI.isBundledWithPred());
|
|
EXPECT_TRUE(MBI.isBundledWithSucc());
|
|
|
|
// Confirm that iterators check in their constructor for bundled iterators.
|
|
EXPECT_DEATH((void)static_cast<bundled_iterator>(MBI),
|
|
"not legal to initialize");
|
|
EXPECT_DEATH((void)static_cast<bundled_iterator>(&MBI),
|
|
"not legal to initialize");
|
|
EXPECT_DEATH((void)static_cast<const_bundled_iterator>(MBI),
|
|
"not legal to initialize");
|
|
EXPECT_DEATH((void)static_cast<const_bundled_iterator>(&MBI),
|
|
"not legal to initialize");
|
|
}
|
|
#endif
|
|
#endif
|
|
|
|
TEST(MachineInstrBundleIteratorTest, CompareToBundledMI) {
|
|
MyBundledInstr MBI;
|
|
const MyBundledInstr &CMBI = MBI;
|
|
bundled_iterator I;
|
|
const_bundled_iterator CI;
|
|
|
|
// Confirm that MBI is always considered bundled.
|
|
EXPECT_TRUE(MBI.isBundledWithPred());
|
|
EXPECT_TRUE(MBI.isBundledWithSucc());
|
|
|
|
// These invocations will crash when !NDEBUG if a conversion is taking place.
|
|
// These checks confirm that comparison operators don't use any conversion
|
|
// operators.
|
|
ASSERT_FALSE(MBI == I);
|
|
ASSERT_FALSE(&MBI == I);
|
|
ASSERT_FALSE(CMBI == I);
|
|
ASSERT_FALSE(&CMBI == I);
|
|
ASSERT_FALSE(I == MBI);
|
|
ASSERT_FALSE(I == &MBI);
|
|
ASSERT_FALSE(I == CMBI);
|
|
ASSERT_FALSE(I == &CMBI);
|
|
ASSERT_FALSE(MBI == CI);
|
|
ASSERT_FALSE(&MBI == CI);
|
|
ASSERT_FALSE(CMBI == CI);
|
|
ASSERT_FALSE(&CMBI == CI);
|
|
ASSERT_FALSE(CI == MBI);
|
|
ASSERT_FALSE(CI == &MBI);
|
|
ASSERT_FALSE(CI == CMBI);
|
|
ASSERT_FALSE(CI == &CMBI);
|
|
ASSERT_TRUE(MBI != I);
|
|
ASSERT_TRUE(&MBI != I);
|
|
ASSERT_TRUE(CMBI != I);
|
|
ASSERT_TRUE(&CMBI != I);
|
|
ASSERT_TRUE(I != MBI);
|
|
ASSERT_TRUE(I != &MBI);
|
|
ASSERT_TRUE(I != CMBI);
|
|
ASSERT_TRUE(I != &CMBI);
|
|
ASSERT_TRUE(MBI != CI);
|
|
ASSERT_TRUE(&MBI != CI);
|
|
ASSERT_TRUE(CMBI != CI);
|
|
ASSERT_TRUE(&CMBI != CI);
|
|
ASSERT_TRUE(CI != MBI);
|
|
ASSERT_TRUE(CI != &MBI);
|
|
ASSERT_TRUE(CI != CMBI);
|
|
ASSERT_TRUE(CI != &CMBI);
|
|
}
|
|
|
|
} // end namespace
|