mirror of
https://github.com/capstone-engine/llvm-capstone.git
synced 2024-11-24 06:10:12 +00:00
[Lex] Introduce Preprocessor::LexTokensUntilEOF()
This new method repeatedly calls Lex() until end of file is reached and optionally fills a std::vector of Tokens. Use it in Clang's unit tests to avoid quite some code duplication. Differential Revision: https://reviews.llvm.org/D158413
This commit is contained in:
parent
7fa33773e3
commit
3116d60494
@ -1722,6 +1722,9 @@ public:
|
||||
/// Lex the next token for this preprocessor.
|
||||
void Lex(Token &Result);
|
||||
|
||||
/// Lex all tokens for this preprocessor until (and excluding) end of file.
|
||||
void LexTokensUntilEOF(std::vector<Token> *Tokens = nullptr);
|
||||
|
||||
/// Lex a token, forming a header-name token if possible.
|
||||
bool LexHeaderName(Token &Result, bool AllowMacroExpansion = true);
|
||||
|
||||
|
@ -998,6 +998,17 @@ void Preprocessor::Lex(Token &Result) {
|
||||
}
|
||||
}
|
||||
|
||||
void Preprocessor::LexTokensUntilEOF(std::vector<Token> *Tokens) {
|
||||
while (1) {
|
||||
Token Tok;
|
||||
Lex(Tok);
|
||||
if (Tok.isOneOf(tok::unknown, tok::eof, tok::eod))
|
||||
break;
|
||||
if (Tokens != nullptr)
|
||||
Tokens->push_back(Tok);
|
||||
}
|
||||
}
|
||||
|
||||
/// Lex a header-name token (including one formed from header-name-tokens if
|
||||
/// \p AllowConcatenation is \c true).
|
||||
///
|
||||
|
@ -73,12 +73,7 @@ protected:
|
||||
// Lex source text.
|
||||
PP.EnterMainSourceFile();
|
||||
|
||||
while (true) {
|
||||
Token Tok;
|
||||
PP.Lex(Tok);
|
||||
if (Tok.is(tok::eof))
|
||||
break;
|
||||
}
|
||||
PP.LexTokensUntilEOF();
|
||||
|
||||
// Callbacks have been executed at this point.
|
||||
return Ctx;
|
||||
|
@ -138,13 +138,7 @@ TEST_F(SourceManagerTest, isBeforeInTranslationUnit) {
|
||||
PP.EnterMainSourceFile();
|
||||
|
||||
std::vector<Token> toks;
|
||||
while (1) {
|
||||
Token tok;
|
||||
PP.Lex(tok);
|
||||
if (tok.is(tok::eof))
|
||||
break;
|
||||
toks.push_back(tok);
|
||||
}
|
||||
PP.LexTokensUntilEOF(&toks);
|
||||
|
||||
// Make sure we got the tokens that we expected.
|
||||
ASSERT_EQ(3U, toks.size());
|
||||
@ -195,13 +189,7 @@ TEST_F(SourceManagerTest, isBeforeInTranslationUnitWithTokenSplit) {
|
||||
llvm::SmallString<8> Scratch;
|
||||
|
||||
std::vector<Token> toks;
|
||||
while (1) {
|
||||
Token tok;
|
||||
PP.Lex(tok);
|
||||
if (tok.is(tok::eof))
|
||||
break;
|
||||
toks.push_back(tok);
|
||||
}
|
||||
PP.LexTokensUntilEOF(&toks);
|
||||
|
||||
// Make sure we got the tokens that we expected.
|
||||
ASSERT_EQ(4U, toks.size()) << "a >> b c";
|
||||
@ -452,13 +440,7 @@ TEST_F(SourceManagerTest, getMacroArgExpandedLocation) {
|
||||
PP.EnterMainSourceFile();
|
||||
|
||||
std::vector<Token> toks;
|
||||
while (1) {
|
||||
Token tok;
|
||||
PP.Lex(tok);
|
||||
if (tok.is(tok::eof))
|
||||
break;
|
||||
toks.push_back(tok);
|
||||
}
|
||||
PP.LexTokensUntilEOF(&toks);
|
||||
|
||||
// Make sure we got the tokens that we expected.
|
||||
ASSERT_EQ(4U, toks.size());
|
||||
@ -574,13 +556,7 @@ TEST_F(SourceManagerTest, isBeforeInTranslationUnitWithMacroInInclude) {
|
||||
PP.EnterMainSourceFile();
|
||||
|
||||
std::vector<Token> toks;
|
||||
while (1) {
|
||||
Token tok;
|
||||
PP.Lex(tok);
|
||||
if (tok.is(tok::eof))
|
||||
break;
|
||||
toks.push_back(tok);
|
||||
}
|
||||
PP.LexTokensUntilEOF(&toks);
|
||||
|
||||
// Make sure we got the tokens that we expected.
|
||||
ASSERT_EQ(0U, toks.size());
|
||||
|
@ -74,13 +74,7 @@ protected:
|
||||
PP = CreatePP(Source, ModLoader);
|
||||
|
||||
std::vector<Token> toks;
|
||||
while (1) {
|
||||
Token tok;
|
||||
PP->Lex(tok);
|
||||
if (tok.is(tok::eof))
|
||||
break;
|
||||
toks.push_back(tok);
|
||||
}
|
||||
PP->LexTokensUntilEOF(&toks);
|
||||
|
||||
return toks;
|
||||
}
|
||||
@ -628,12 +622,7 @@ TEST_F(LexerTest, FindNextToken) {
|
||||
TEST_F(LexerTest, CreatedFIDCountForPredefinedBuffer) {
|
||||
TrivialModuleLoader ModLoader;
|
||||
auto PP = CreatePP("", ModLoader);
|
||||
while (1) {
|
||||
Token tok;
|
||||
PP->Lex(tok);
|
||||
if (tok.is(tok::eof))
|
||||
break;
|
||||
}
|
||||
PP->LexTokensUntilEOF();
|
||||
EXPECT_EQ(SourceMgr.getNumCreatedFIDsForFileID(PP->getPredefinesFileID()),
|
||||
1U);
|
||||
}
|
||||
|
@ -90,12 +90,7 @@ protected:
|
||||
PP.addPPCallbacks(std::move(C));
|
||||
PP.EnterMainSourceFile();
|
||||
|
||||
while (1) {
|
||||
Token tok;
|
||||
PP.Lex(tok);
|
||||
if (tok.is(tok::eof))
|
||||
break;
|
||||
}
|
||||
PP.LexTokensUntilEOF();
|
||||
}
|
||||
|
||||
FileSystemOptions FileMgrOpts;
|
||||
|
@ -229,13 +229,7 @@ protected:
|
||||
|
||||
// Lex source text.
|
||||
PP.EnterMainSourceFile();
|
||||
|
||||
while (true) {
|
||||
Token Tok;
|
||||
PP.Lex(Tok);
|
||||
if (Tok.is(tok::eof))
|
||||
break;
|
||||
}
|
||||
PP.LexTokensUntilEOF();
|
||||
|
||||
// Callbacks have been executed at this point -- return filename range.
|
||||
return Callbacks;
|
||||
@ -259,13 +253,7 @@ protected:
|
||||
|
||||
// Lex source text.
|
||||
PP.EnterMainSourceFile();
|
||||
|
||||
while (true) {
|
||||
Token Tok;
|
||||
PP.Lex(Tok);
|
||||
if (Tok.is(tok::eof))
|
||||
break;
|
||||
}
|
||||
PP.LexTokensUntilEOF();
|
||||
|
||||
return Callbacks->Results;
|
||||
}
|
||||
@ -290,12 +278,7 @@ protected:
|
||||
|
||||
// Lex source text.
|
||||
PP.EnterMainSourceFile();
|
||||
while (true) {
|
||||
Token Tok;
|
||||
PP.Lex(Tok);
|
||||
if (Tok.is(tok::eof))
|
||||
break;
|
||||
}
|
||||
PP.LexTokensUntilEOF();
|
||||
|
||||
return Callbacks->Marks;
|
||||
}
|
||||
@ -334,12 +317,7 @@ protected:
|
||||
|
||||
// Lex source text.
|
||||
PP.EnterMainSourceFile();
|
||||
while (true) {
|
||||
Token Tok;
|
||||
PP.Lex(Tok);
|
||||
if (Tok.is(tok::eof))
|
||||
break;
|
||||
}
|
||||
PP.LexTokensUntilEOF();
|
||||
|
||||
PragmaOpenCLExtensionCallbacks::CallbackParameters RetVal = {
|
||||
Callbacks->Name,
|
||||
@ -477,12 +455,7 @@ TEST_F(PPCallbacksTest, FileNotFoundSkipped) {
|
||||
|
||||
// Lex source text.
|
||||
PP.EnterMainSourceFile();
|
||||
while (true) {
|
||||
Token Tok;
|
||||
PP.Lex(Tok);
|
||||
if (Tok.is(tok::eof))
|
||||
break;
|
||||
}
|
||||
PP.LexTokensUntilEOF();
|
||||
|
||||
ASSERT_EQ(1u, Callbacks->NumCalls);
|
||||
ASSERT_EQ(0u, DiagConsumer->getNumErrors());
|
||||
|
@ -87,13 +87,7 @@ TEST_F(PPConditionalDirectiveRecordTest, PPRecAPI) {
|
||||
PP.EnterMainSourceFile();
|
||||
|
||||
std::vector<Token> toks;
|
||||
while (1) {
|
||||
Token tok;
|
||||
PP.Lex(tok);
|
||||
if (tok.is(tok::eof))
|
||||
break;
|
||||
toks.push_back(tok);
|
||||
}
|
||||
PP.LexTokensUntilEOF(&toks);
|
||||
|
||||
// Make sure we got the tokens that we expected.
|
||||
ASSERT_EQ(10U, toks.size());
|
||||
|
@ -133,12 +133,7 @@ TEST_F(PPDependencyDirectivesTest, MacroGuard) {
|
||||
SmallVector<StringRef> IncludedFiles;
|
||||
PP.addPPCallbacks(std::make_unique<IncludeCollector>(PP, IncludedFiles));
|
||||
PP.EnterMainSourceFile();
|
||||
while (true) {
|
||||
Token tok;
|
||||
PP.Lex(tok);
|
||||
if (tok.is(tok::eof))
|
||||
break;
|
||||
}
|
||||
PP.LexTokensUntilEOF();
|
||||
|
||||
SmallVector<std::string> IncludedFilesSlash;
|
||||
for (StringRef IncludedFile : IncludedFiles)
|
||||
|
@ -75,12 +75,7 @@ TEST_F(PPMemoryAllocationsTest, PPMacroDefinesAllocations) {
|
||||
PP.Initialize(*Target);
|
||||
PP.EnterMainSourceFile();
|
||||
|
||||
while (1) {
|
||||
Token tok;
|
||||
PP.Lex(tok);
|
||||
if (tok.is(tok::eof))
|
||||
break;
|
||||
}
|
||||
PP.LexTokensUntilEOF();
|
||||
|
||||
size_t NumAllocated = PP.getPreprocessorAllocator().getBytesAllocated();
|
||||
float BytesPerDefine = float(NumAllocated) / float(NumMacros);
|
||||
|
Loading…
Reference in New Issue
Block a user