1 //===--- PPLexerChange.cpp - Handle changing lexers in the preprocessor ---===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements pieces of the Preprocessor interface that manage the
11 // current lexer stack.
12 //
13 //===----------------------------------------------------------------------===//
14
15 #include "clang/Lex/Preprocessor.h"
16 #include "clang/Basic/FileManager.h"
17 #include "clang/Basic/SourceManager.h"
18 #include "clang/Lex/HeaderSearch.h"
19 #include "clang/Lex/LexDiagnostic.h"
20 #include "clang/Lex/MacroInfo.h"
21 #include "llvm/ADT/StringSwitch.h"
22 #include "llvm/Support/FileSystem.h"
23 #include "llvm/Support/MemoryBuffer.h"
24 #include "llvm/Support/Path.h"
25 using namespace clang;
26
~PPCallbacks()27 PPCallbacks::~PPCallbacks() {}
28
29 //===----------------------------------------------------------------------===//
30 // Miscellaneous Methods.
31 //===----------------------------------------------------------------------===//
32
33 /// isInPrimaryFile - Return true if we're in the top-level file, not in a
34 /// \#include. This looks through macro expansions and active _Pragma lexers.
isInPrimaryFile() const35 bool Preprocessor::isInPrimaryFile() const {
36 if (IsFileLexer())
37 return IncludeMacroStack.empty();
38
39 // If there are any stacked lexers, we're in a #include.
40 assert(IsFileLexer(IncludeMacroStack[0]) &&
41 "Top level include stack isn't our primary lexer?");
42 for (unsigned i = 1, e = IncludeMacroStack.size(); i != e; ++i)
43 if (IsFileLexer(IncludeMacroStack[i]))
44 return false;
45 return true;
46 }
47
48 /// getCurrentLexer - Return the current file lexer being lexed from. Note
49 /// that this ignores any potentially active macro expansions and _Pragma
50 /// expansions going on at the time.
getCurrentFileLexer() const51 PreprocessorLexer *Preprocessor::getCurrentFileLexer() const {
52 if (IsFileLexer())
53 return CurPPLexer;
54
55 // Look for a stacked lexer.
56 for (unsigned i = IncludeMacroStack.size(); i != 0; --i) {
57 const IncludeStackInfo& ISI = IncludeMacroStack[i-1];
58 if (IsFileLexer(ISI))
59 return ISI.ThePPLexer;
60 }
61 return nullptr;
62 }
63
64
65 //===----------------------------------------------------------------------===//
66 // Methods for Entering and Callbacks for leaving various contexts
67 //===----------------------------------------------------------------------===//
68
69 /// EnterSourceFile - Add a source file to the top of the include stack and
70 /// start lexing tokens from it instead of the current buffer.
EnterSourceFile(FileID FID,const DirectoryLookup * CurDir,SourceLocation Loc)71 bool Preprocessor::EnterSourceFile(FileID FID, const DirectoryLookup *CurDir,
72 SourceLocation Loc) {
73 assert(!CurTokenLexer && "Cannot #include a file inside a macro!");
74 ++NumEnteredSourceFiles;
75
76 if (MaxIncludeStackDepth < IncludeMacroStack.size())
77 MaxIncludeStackDepth = IncludeMacroStack.size();
78
79 if (PTH) {
80 if (PTHLexer *PL = PTH->CreateLexer(FID)) {
81 EnterSourceFileWithPTH(PL, CurDir);
82 return false;
83 }
84 }
85
86 // Get the MemoryBuffer for this FID, if it fails, we fail.
87 bool Invalid = false;
88 const llvm::MemoryBuffer *InputFile =
89 getSourceManager().getBuffer(FID, Loc, &Invalid);
90 if (Invalid) {
91 SourceLocation FileStart = SourceMgr.getLocForStartOfFile(FID);
92 Diag(Loc, diag::err_pp_error_opening_file)
93 << std::string(SourceMgr.getBufferName(FileStart)) << "";
94 return true;
95 }
96
97 if (isCodeCompletionEnabled() &&
98 SourceMgr.getFileEntryForID(FID) == CodeCompletionFile) {
99 CodeCompletionFileLoc = SourceMgr.getLocForStartOfFile(FID);
100 CodeCompletionLoc =
101 CodeCompletionFileLoc.getLocWithOffset(CodeCompletionOffset);
102 }
103
104 EnterSourceFileWithLexer(new Lexer(FID, InputFile, *this), CurDir);
105 return false;
106 }
107
108 /// EnterSourceFileWithLexer - Add a source file to the top of the include stack
109 /// and start lexing tokens from it instead of the current buffer.
EnterSourceFileWithLexer(Lexer * TheLexer,const DirectoryLookup * CurDir)110 void Preprocessor::EnterSourceFileWithLexer(Lexer *TheLexer,
111 const DirectoryLookup *CurDir) {
112
113 // Add the current lexer to the include stack.
114 if (CurPPLexer || CurTokenLexer)
115 PushIncludeMacroStack();
116
117 CurLexer.reset(TheLexer);
118 CurPPLexer = TheLexer;
119 CurDirLookup = CurDir;
120 CurSubmodule = nullptr;
121 if (CurLexerKind != CLK_LexAfterModuleImport)
122 CurLexerKind = CLK_Lexer;
123
124 // Notify the client, if desired, that we are in a new source file.
125 if (Callbacks && !CurLexer->Is_PragmaLexer) {
126 SrcMgr::CharacteristicKind FileType =
127 SourceMgr.getFileCharacteristic(CurLexer->getFileLoc());
128
129 Callbacks->FileChanged(CurLexer->getFileLoc(),
130 PPCallbacks::EnterFile, FileType);
131 }
132 }
133
134 /// EnterSourceFileWithPTH - Add a source file to the top of the include stack
135 /// and start getting tokens from it using the PTH cache.
EnterSourceFileWithPTH(PTHLexer * PL,const DirectoryLookup * CurDir)136 void Preprocessor::EnterSourceFileWithPTH(PTHLexer *PL,
137 const DirectoryLookup *CurDir) {
138
139 if (CurPPLexer || CurTokenLexer)
140 PushIncludeMacroStack();
141
142 CurDirLookup = CurDir;
143 CurPTHLexer.reset(PL);
144 CurPPLexer = CurPTHLexer.get();
145 CurSubmodule = nullptr;
146 if (CurLexerKind != CLK_LexAfterModuleImport)
147 CurLexerKind = CLK_PTHLexer;
148
149 // Notify the client, if desired, that we are in a new source file.
150 if (Callbacks) {
151 FileID FID = CurPPLexer->getFileID();
152 SourceLocation EnterLoc = SourceMgr.getLocForStartOfFile(FID);
153 SrcMgr::CharacteristicKind FileType =
154 SourceMgr.getFileCharacteristic(EnterLoc);
155 Callbacks->FileChanged(EnterLoc, PPCallbacks::EnterFile, FileType);
156 }
157 }
158
159 /// EnterMacro - Add a Macro to the top of the include stack and start lexing
160 /// tokens from it instead of the current buffer.
EnterMacro(Token & Tok,SourceLocation ILEnd,MacroInfo * Macro,MacroArgs * Args)161 void Preprocessor::EnterMacro(Token &Tok, SourceLocation ILEnd,
162 MacroInfo *Macro, MacroArgs *Args) {
163 std::unique_ptr<TokenLexer> TokLexer;
164 if (NumCachedTokenLexers == 0) {
165 TokLexer = llvm::make_unique<TokenLexer>(Tok, ILEnd, Macro, Args, *this);
166 } else {
167 TokLexer = std::move(TokenLexerCache[--NumCachedTokenLexers]);
168 TokLexer->Init(Tok, ILEnd, Macro, Args);
169 }
170
171 PushIncludeMacroStack();
172 CurDirLookup = nullptr;
173 CurTokenLexer = std::move(TokLexer);
174 if (CurLexerKind != CLK_LexAfterModuleImport)
175 CurLexerKind = CLK_TokenLexer;
176 }
177
178 /// EnterTokenStream - Add a "macro" context to the top of the include stack,
179 /// which will cause the lexer to start returning the specified tokens.
180 ///
181 /// If DisableMacroExpansion is true, tokens lexed from the token stream will
182 /// not be subject to further macro expansion. Otherwise, these tokens will
183 /// be re-macro-expanded when/if expansion is enabled.
184 ///
185 /// If OwnsTokens is false, this method assumes that the specified stream of
186 /// tokens has a permanent owner somewhere, so they do not need to be copied.
187 /// If it is true, it assumes the array of tokens is allocated with new[] and
188 /// must be freed.
189 ///
EnterTokenStream(const Token * Toks,unsigned NumToks,bool DisableMacroExpansion,bool OwnsTokens)190 void Preprocessor::EnterTokenStream(const Token *Toks, unsigned NumToks,
191 bool DisableMacroExpansion,
192 bool OwnsTokens) {
193 if (CurLexerKind == CLK_CachingLexer) {
194 if (CachedLexPos < CachedTokens.size()) {
195 // We're entering tokens into the middle of our cached token stream. We
196 // can't represent that, so just insert the tokens into the buffer.
197 CachedTokens.insert(CachedTokens.begin() + CachedLexPos,
198 Toks, Toks + NumToks);
199 if (OwnsTokens)
200 delete [] Toks;
201 return;
202 }
203
204 // New tokens are at the end of the cached token sequnece; insert the
205 // token stream underneath the caching lexer.
206 ExitCachingLexMode();
207 EnterTokenStream(Toks, NumToks, DisableMacroExpansion, OwnsTokens);
208 EnterCachingLexMode();
209 return;
210 }
211
212 // Create a macro expander to expand from the specified token stream.
213 std::unique_ptr<TokenLexer> TokLexer;
214 if (NumCachedTokenLexers == 0) {
215 TokLexer = llvm::make_unique<TokenLexer>(
216 Toks, NumToks, DisableMacroExpansion, OwnsTokens, *this);
217 } else {
218 TokLexer = std::move(TokenLexerCache[--NumCachedTokenLexers]);
219 TokLexer->Init(Toks, NumToks, DisableMacroExpansion, OwnsTokens);
220 }
221
222 // Save our current state.
223 PushIncludeMacroStack();
224 CurDirLookup = nullptr;
225 CurTokenLexer = std::move(TokLexer);
226 if (CurLexerKind != CLK_LexAfterModuleImport)
227 CurLexerKind = CLK_TokenLexer;
228 }
229
230 /// \brief Compute the relative path that names the given file relative to
231 /// the given directory.
computeRelativePath(FileManager & FM,const DirectoryEntry * Dir,const FileEntry * File,SmallString<128> & Result)232 static void computeRelativePath(FileManager &FM, const DirectoryEntry *Dir,
233 const FileEntry *File,
234 SmallString<128> &Result) {
235 Result.clear();
236
237 StringRef FilePath = File->getDir()->getName();
238 StringRef Path = FilePath;
239 while (!Path.empty()) {
240 if (const DirectoryEntry *CurDir = FM.getDirectory(Path)) {
241 if (CurDir == Dir) {
242 Result = FilePath.substr(Path.size());
243 llvm::sys::path::append(Result,
244 llvm::sys::path::filename(File->getName()));
245 return;
246 }
247 }
248
249 Path = llvm::sys::path::parent_path(Path);
250 }
251
252 Result = File->getName();
253 }
254
PropagateLineStartLeadingSpaceInfo(Token & Result)255 void Preprocessor::PropagateLineStartLeadingSpaceInfo(Token &Result) {
256 if (CurTokenLexer) {
257 CurTokenLexer->PropagateLineStartLeadingSpaceInfo(Result);
258 return;
259 }
260 if (CurLexer) {
261 CurLexer->PropagateLineStartLeadingSpaceInfo(Result);
262 return;
263 }
264 // FIXME: Handle other kinds of lexers? It generally shouldn't matter,
265 // but it might if they're empty?
266 }
267
268 /// \brief Determine the location to use as the end of the buffer for a lexer.
269 ///
270 /// If the file ends with a newline, form the EOF token on the newline itself,
271 /// rather than "on the line following it", which doesn't exist. This makes
272 /// diagnostics relating to the end of file include the last file that the user
273 /// actually typed, which is goodness.
getCurLexerEndPos()274 const char *Preprocessor::getCurLexerEndPos() {
275 const char *EndPos = CurLexer->BufferEnd;
276 if (EndPos != CurLexer->BufferStart &&
277 (EndPos[-1] == '\n' || EndPos[-1] == '\r')) {
278 --EndPos;
279
280 // Handle \n\r and \r\n:
281 if (EndPos != CurLexer->BufferStart &&
282 (EndPos[-1] == '\n' || EndPos[-1] == '\r') &&
283 EndPos[-1] != EndPos[0])
284 --EndPos;
285 }
286
287 return EndPos;
288 }
289
290
291 /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
292 /// the current file. This either returns the EOF token or pops a level off
293 /// the include stack and keeps going.
HandleEndOfFile(Token & Result,bool isEndOfMacro)294 bool Preprocessor::HandleEndOfFile(Token &Result, bool isEndOfMacro) {
295 assert(!CurTokenLexer &&
296 "Ending a file when currently in a macro!");
297
298 // See if this file had a controlling macro.
299 if (CurPPLexer) { // Not ending a macro, ignore it.
300 if (const IdentifierInfo *ControllingMacro =
301 CurPPLexer->MIOpt.GetControllingMacroAtEndOfFile()) {
302 // Okay, this has a controlling macro, remember in HeaderFileInfo.
303 if (const FileEntry *FE =
304 SourceMgr.getFileEntryForID(CurPPLexer->getFileID())) {
305 HeaderInfo.SetFileControllingMacro(FE, ControllingMacro);
306 if (MacroInfo *MI =
307 getMacroInfo(const_cast<IdentifierInfo*>(ControllingMacro))) {
308 MI->UsedForHeaderGuard = true;
309 }
310 if (const IdentifierInfo *DefinedMacro =
311 CurPPLexer->MIOpt.GetDefinedMacro()) {
312 if (!ControllingMacro->hasMacroDefinition() &&
313 DefinedMacro != ControllingMacro &&
314 HeaderInfo.FirstTimeLexingFile(FE)) {
315
316 // If the edit distance between the two macros is more than 50%,
317 // DefinedMacro may not be header guard, or can be header guard of
318 // another header file. Therefore, it maybe defining something
319 // completely different. This can be observed in the wild when
320 // handling feature macros or header guards in different files.
321
322 const StringRef ControllingMacroName = ControllingMacro->getName();
323 const StringRef DefinedMacroName = DefinedMacro->getName();
324 const size_t MaxHalfLength = std::max(ControllingMacroName.size(),
325 DefinedMacroName.size()) / 2;
326 const unsigned ED = ControllingMacroName.edit_distance(
327 DefinedMacroName, true, MaxHalfLength);
328 if (ED <= MaxHalfLength) {
329 // Emit a warning for a bad header guard.
330 Diag(CurPPLexer->MIOpt.GetMacroLocation(),
331 diag::warn_header_guard)
332 << CurPPLexer->MIOpt.GetMacroLocation() << ControllingMacro;
333 Diag(CurPPLexer->MIOpt.GetDefinedLocation(),
334 diag::note_header_guard)
335 << CurPPLexer->MIOpt.GetDefinedLocation() << DefinedMacro
336 << ControllingMacro
337 << FixItHint::CreateReplacement(
338 CurPPLexer->MIOpt.GetDefinedLocation(),
339 ControllingMacro->getName());
340 }
341 }
342 }
343 }
344 }
345 }
346
347 // Complain about reaching a true EOF within arc_cf_code_audited.
348 // We don't want to complain about reaching the end of a macro
349 // instantiation or a _Pragma.
350 if (PragmaARCCFCodeAuditedLoc.isValid() &&
351 !isEndOfMacro && !(CurLexer && CurLexer->Is_PragmaLexer)) {
352 Diag(PragmaARCCFCodeAuditedLoc, diag::err_pp_eof_in_arc_cf_code_audited);
353
354 // Recover by leaving immediately.
355 PragmaARCCFCodeAuditedLoc = SourceLocation();
356 }
357
358 // If this is a #include'd file, pop it off the include stack and continue
359 // lexing the #includer file.
360 if (!IncludeMacroStack.empty()) {
361
362 // If we lexed the code-completion file, act as if we reached EOF.
363 if (isCodeCompletionEnabled() && CurPPLexer &&
364 SourceMgr.getLocForStartOfFile(CurPPLexer->getFileID()) ==
365 CodeCompletionFileLoc) {
366 if (CurLexer) {
367 Result.startToken();
368 CurLexer->FormTokenWithChars(Result, CurLexer->BufferEnd, tok::eof);
369 CurLexer.reset();
370 } else {
371 assert(CurPTHLexer && "Got EOF but no current lexer set!");
372 CurPTHLexer->getEOF(Result);
373 CurPTHLexer.reset();
374 }
375
376 CurPPLexer = nullptr;
377 return true;
378 }
379
380 if (!isEndOfMacro && CurPPLexer &&
381 SourceMgr.getIncludeLoc(CurPPLexer->getFileID()).isValid()) {
382 // Notify SourceManager to record the number of FileIDs that were created
383 // during lexing of the #include'd file.
384 unsigned NumFIDs =
385 SourceMgr.local_sloc_entry_size() -
386 CurPPLexer->getInitialNumSLocEntries() + 1/*#include'd file*/;
387 SourceMgr.setNumCreatedFIDsForFileID(CurPPLexer->getFileID(), NumFIDs);
388 }
389
390 FileID ExitedFID;
391 if (Callbacks && !isEndOfMacro && CurPPLexer)
392 ExitedFID = CurPPLexer->getFileID();
393
394 bool LeavingSubmodule = CurSubmodule && CurLexer;
395 if (LeavingSubmodule) {
396 // Notify the parser that we've left the module.
397 const char *EndPos = getCurLexerEndPos();
398 Result.startToken();
399 CurLexer->BufferPtr = EndPos;
400 CurLexer->FormTokenWithChars(Result, EndPos, tok::annot_module_end);
401 Result.setAnnotationEndLoc(Result.getLocation());
402 Result.setAnnotationValue(CurSubmodule);
403 }
404
405 // We're done with the #included file.
406 RemoveTopOfLexerStack();
407
408 // Propagate info about start-of-line/leading white-space/etc.
409 PropagateLineStartLeadingSpaceInfo(Result);
410
411 // Notify the client, if desired, that we are in a new source file.
412 if (Callbacks && !isEndOfMacro && CurPPLexer) {
413 SrcMgr::CharacteristicKind FileType =
414 SourceMgr.getFileCharacteristic(CurPPLexer->getSourceLocation());
415 Callbacks->FileChanged(CurPPLexer->getSourceLocation(),
416 PPCallbacks::ExitFile, FileType, ExitedFID);
417 }
418
419 // Client should lex another token unless we generated an EOM.
420 return LeavingSubmodule;
421 }
422
423 // If this is the end of the main file, form an EOF token.
424 if (CurLexer) {
425 const char *EndPos = getCurLexerEndPos();
426 Result.startToken();
427 CurLexer->BufferPtr = EndPos;
428 CurLexer->FormTokenWithChars(Result, EndPos, tok::eof);
429
430 if (isCodeCompletionEnabled()) {
431 // Inserting the code-completion point increases the source buffer by 1,
432 // but the main FileID was created before inserting the point.
433 // Compensate by reducing the EOF location by 1, otherwise the location
434 // will point to the next FileID.
435 // FIXME: This is hacky, the code-completion point should probably be
436 // inserted before the main FileID is created.
437 if (CurLexer->getFileLoc() == CodeCompletionFileLoc)
438 Result.setLocation(Result.getLocation().getLocWithOffset(-1));
439 }
440
441 if (!isIncrementalProcessingEnabled())
442 // We're done with lexing.
443 CurLexer.reset();
444 } else {
445 assert(CurPTHLexer && "Got EOF but no current lexer set!");
446 CurPTHLexer->getEOF(Result);
447 CurPTHLexer.reset();
448 }
449
450 if (!isIncrementalProcessingEnabled())
451 CurPPLexer = nullptr;
452
453 if (TUKind == TU_Complete) {
454 // This is the end of the top-level file. 'WarnUnusedMacroLocs' has
455 // collected all macro locations that we need to warn because they are not
456 // used.
457 for (WarnUnusedMacroLocsTy::iterator
458 I=WarnUnusedMacroLocs.begin(), E=WarnUnusedMacroLocs.end();
459 I!=E; ++I)
460 Diag(*I, diag::pp_macro_not_used);
461 }
462
463 // If we are building a module that has an umbrella header, make sure that
464 // each of the headers within the directory covered by the umbrella header
465 // was actually included by the umbrella header.
466 if (Module *Mod = getCurrentModule()) {
467 if (Mod->getUmbrellaHeader()) {
468 SourceLocation StartLoc
469 = SourceMgr.getLocForStartOfFile(SourceMgr.getMainFileID());
470
471 if (!getDiagnostics().isIgnored(diag::warn_uncovered_module_header,
472 StartLoc)) {
473 ModuleMap &ModMap = getHeaderSearchInfo().getModuleMap();
474 const DirectoryEntry *Dir = Mod->getUmbrellaDir();
475 vfs::FileSystem &FS = *FileMgr.getVirtualFileSystem();
476 std::error_code EC;
477 for (vfs::recursive_directory_iterator Entry(FS, Dir->getName(), EC), End;
478 Entry != End && !EC; Entry.increment(EC)) {
479 using llvm::StringSwitch;
480
481 // Check whether this entry has an extension typically associated with
482 // headers.
483 if (!StringSwitch<bool>(llvm::sys::path::extension(Entry->getName()))
484 .Cases(".h", ".H", ".hh", ".hpp", true)
485 .Default(false))
486 continue;
487
488 if (const FileEntry *Header =
489 getFileManager().getFile(Entry->getName()))
490 if (!getSourceManager().hasFileInfo(Header)) {
491 if (!ModMap.isHeaderInUnavailableModule(Header)) {
492 // Find the relative path that would access this header.
493 SmallString<128> RelativePath;
494 computeRelativePath(FileMgr, Dir, Header, RelativePath);
495 Diag(StartLoc, diag::warn_uncovered_module_header)
496 << Mod->getFullModuleName() << RelativePath;
497 }
498 }
499 }
500 }
501 }
502 }
503
504 return true;
505 }
506
507 /// HandleEndOfTokenLexer - This callback is invoked when the current TokenLexer
508 /// hits the end of its token stream.
HandleEndOfTokenLexer(Token & Result)509 bool Preprocessor::HandleEndOfTokenLexer(Token &Result) {
510 assert(CurTokenLexer && !CurPPLexer &&
511 "Ending a macro when currently in a #include file!");
512
513 if (!MacroExpandingLexersStack.empty() &&
514 MacroExpandingLexersStack.back().first == CurTokenLexer.get())
515 removeCachedMacroExpandedTokensOfLastLexer();
516
517 // Delete or cache the now-dead macro expander.
518 if (NumCachedTokenLexers == TokenLexerCacheSize)
519 CurTokenLexer.reset();
520 else
521 TokenLexerCache[NumCachedTokenLexers++] = std::move(CurTokenLexer);
522
523 // Handle this like a #include file being popped off the stack.
524 return HandleEndOfFile(Result, true);
525 }
526
527 /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
528 /// lexer stack. This should only be used in situations where the current
529 /// state of the top-of-stack lexer is unknown.
RemoveTopOfLexerStack()530 void Preprocessor::RemoveTopOfLexerStack() {
531 assert(!IncludeMacroStack.empty() && "Ran out of stack entries to load");
532
533 if (CurTokenLexer) {
534 // Delete or cache the now-dead macro expander.
535 if (NumCachedTokenLexers == TokenLexerCacheSize)
536 CurTokenLexer.reset();
537 else
538 TokenLexerCache[NumCachedTokenLexers++] = std::move(CurTokenLexer);
539 }
540
541 PopIncludeMacroStack();
542 }
543
544 /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
545 /// comment (/##/) in microsoft mode, this method handles updating the current
546 /// state, returning the token on the next source line.
HandleMicrosoftCommentPaste(Token & Tok)547 void Preprocessor::HandleMicrosoftCommentPaste(Token &Tok) {
548 assert(CurTokenLexer && !CurPPLexer &&
549 "Pasted comment can only be formed from macro");
550
551 // We handle this by scanning for the closest real lexer, switching it to
552 // raw mode and preprocessor mode. This will cause it to return \n as an
553 // explicit EOD token.
554 PreprocessorLexer *FoundLexer = nullptr;
555 bool LexerWasInPPMode = false;
556 for (unsigned i = 0, e = IncludeMacroStack.size(); i != e; ++i) {
557 IncludeStackInfo &ISI = *(IncludeMacroStack.end()-i-1);
558 if (ISI.ThePPLexer == nullptr) continue; // Scan for a real lexer.
559
560 // Once we find a real lexer, mark it as raw mode (disabling macro
561 // expansions) and preprocessor mode (return EOD). We know that the lexer
562 // was *not* in raw mode before, because the macro that the comment came
563 // from was expanded. However, it could have already been in preprocessor
564 // mode (#if COMMENT) in which case we have to return it to that mode and
565 // return EOD.
566 FoundLexer = ISI.ThePPLexer;
567 FoundLexer->LexingRawMode = true;
568 LexerWasInPPMode = FoundLexer->ParsingPreprocessorDirective;
569 FoundLexer->ParsingPreprocessorDirective = true;
570 break;
571 }
572
573 // Okay, we either found and switched over the lexer, or we didn't find a
574 // lexer. In either case, finish off the macro the comment came from, getting
575 // the next token.
576 if (!HandleEndOfTokenLexer(Tok)) Lex(Tok);
577
578 // Discarding comments as long as we don't have EOF or EOD. This 'comments
579 // out' the rest of the line, including any tokens that came from other macros
580 // that were active, as in:
581 // #define submacro a COMMENT b
582 // submacro c
583 // which should lex to 'a' only: 'b' and 'c' should be removed.
584 while (Tok.isNot(tok::eod) && Tok.isNot(tok::eof))
585 Lex(Tok);
586
587 // If we got an eod token, then we successfully found the end of the line.
588 if (Tok.is(tok::eod)) {
589 assert(FoundLexer && "Can't get end of line without an active lexer");
590 // Restore the lexer back to normal mode instead of raw mode.
591 FoundLexer->LexingRawMode = false;
592
593 // If the lexer was already in preprocessor mode, just return the EOD token
594 // to finish the preprocessor line.
595 if (LexerWasInPPMode) return;
596
597 // Otherwise, switch out of PP mode and return the next lexed token.
598 FoundLexer->ParsingPreprocessorDirective = false;
599 return Lex(Tok);
600 }
601
602 // If we got an EOF token, then we reached the end of the token stream but
603 // didn't find an explicit \n. This can only happen if there was no lexer
604 // active (an active lexer would return EOD at EOF if there was no \n in
605 // preprocessor directive mode), so just return EOF as our token.
606 assert(!FoundLexer && "Lexer should return EOD before EOF in PP mode");
607 }
608