Preprocessor.h revision 88710f22be1acb04b7d812aebe87bea56394c07c
1//===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines the Preprocessor interface.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15#define LLVM_CLANG_LEX_PREPROCESSOR_H
16
17#include "clang/Lex/MacroInfo.h"
18#include "clang/Lex/Lexer.h"
19#include "clang/Lex/PTHLexer.h"
20#include "clang/Lex/PPCallbacks.h"
21#include "clang/Lex/TokenLexer.h"
22#include "clang/Lex/PTHManager.h"
23#include "clang/Basic/Builtins.h"
24#include "clang/Basic/Diagnostic.h"
25#include "clang/Basic/IdentifierTable.h"
26#include "clang/Basic/SourceLocation.h"
27#include "llvm/ADT/DenseMap.h"
28#include "llvm/ADT/IntrusiveRefCntPtr.h"
29#include "llvm/ADT/SmallPtrSet.h"
30#include "llvm/ADT/OwningPtr.h"
31#include "llvm/ADT/SmallVector.h"
32#include "llvm/ADT/ArrayRef.h"
33#include "llvm/Support/Allocator.h"
34#include <vector>
35
36namespace clang {
37
38class SourceManager;
39class ExternalPreprocessorSource;
40class FileManager;
41class FileEntry;
42class HeaderSearch;
43class PragmaNamespace;
44class PragmaHandler;
45class CommentHandler;
46class ScratchBuffer;
47class TargetInfo;
48class PPCallbacks;
49class CodeCompletionHandler;
50class DirectoryLookup;
51class PreprocessingRecord;
52
53/// Preprocessor - This object engages in a tight little dance with the lexer to
54/// efficiently preprocess tokens.  Lexers know only about tokens within a
55/// single source file, and don't know anything about preprocessor-level issues
56/// like the #include stack, token expansion, etc.
57///
58class Preprocessor : public llvm::RefCountedBase<Preprocessor> {
59  Diagnostic        *Diags;
60  LangOptions        Features;
61  const TargetInfo  &Target;
62  FileManager       &FileMgr;
63  SourceManager     &SourceMgr;
64  ScratchBuffer     *ScratchBuf;
65  HeaderSearch      &HeaderInfo;
66
67  /// \brief External source of macros.
68  ExternalPreprocessorSource *ExternalSource;
69
70  /// PTH - An optional PTHManager object used for getting tokens from
71  ///  a token cache rather than lexing the original source file.
72  llvm::OwningPtr<PTHManager> PTH;
73
74  /// BP - A BumpPtrAllocator object used to quickly allocate and release
75  ///  objects internal to the Preprocessor.
76  llvm::BumpPtrAllocator BP;
77
78  /// Identifiers for builtin macros and other builtins.
79  IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
80  IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
81  IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
82  IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
83  IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
84  IdentifierInfo *Ident__COUNTER__;                // __COUNTER__
85  IdentifierInfo *Ident_Pragma, *Ident__pragma;    // _Pragma, __pragma
86  IdentifierInfo *Ident__VA_ARGS__;                // __VA_ARGS__
87  IdentifierInfo *Ident__has_feature;              // __has_feature
88  IdentifierInfo *Ident__has_extension;            // __has_extension
89  IdentifierInfo *Ident__has_builtin;              // __has_builtin
90  IdentifierInfo *Ident__has_attribute;            // __has_attribute
91  IdentifierInfo *Ident__has_include;              // __has_include
92  IdentifierInfo *Ident__has_include_next;         // __has_include_next
93
94  SourceLocation DATELoc, TIMELoc;
95  unsigned CounterValue;  // Next __COUNTER__ value.
96
97  enum {
98    /// MaxIncludeStackDepth - Maximum depth of #includes.
99    MaxAllowedIncludeStackDepth = 200
100  };
101
102  // State that is set before the preprocessor begins.
103  bool KeepComments : 1;
104  bool KeepMacroComments : 1;
105
106  // State that changes while the preprocessor runs:
107  bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
108
109  /// Whether the preprocessor owns the header search object.
110  bool OwnsHeaderSearch : 1;
111
112  /// DisableMacroExpansion - True if macro expansion is disabled.
113  bool DisableMacroExpansion : 1;
114
115  /// \brief Whether we have already loaded macros from the external source.
116  mutable bool ReadMacrosFromExternalSource : 1;
117
118  /// Identifiers - This is mapping/lookup information for all identifiers in
119  /// the program, including program keywords.
120  mutable IdentifierTable Identifiers;
121
122  /// Selectors - This table contains all the selectors in the program. Unlike
123  /// IdentifierTable above, this table *isn't* populated by the preprocessor.
124  /// It is declared/expanded here because it's role/lifetime is
125  /// conceptually similar the IdentifierTable. In addition, the current control
126  /// flow (in clang::ParseAST()), make it convenient to put here.
127  /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
128  /// the lifetime of the preprocessor.
129  SelectorTable Selectors;
130
131  /// BuiltinInfo - Information about builtins.
132  Builtin::Context BuiltinInfo;
133
134  /// PragmaHandlers - This tracks all of the pragmas that the client registered
135  /// with this preprocessor.
136  PragmaNamespace *PragmaHandlers;
137
138  /// \brief Tracks all of the comment handlers that the client registered
139  /// with this preprocessor.
140  std::vector<CommentHandler *> CommentHandlers;
141
142  /// \brief The code-completion handler.
143  CodeCompletionHandler *CodeComplete;
144
145  /// \brief The file that we're performing code-completion for, if any.
146  const FileEntry *CodeCompletionFile;
147
148  /// \brief The number of bytes that we will initially skip when entering the
149  /// main file, which is used when loading a precompiled preamble, along
150  /// with a flag that indicates whether skipping this number of bytes will
151  /// place the lexer at the start of a line.
152  std::pair<unsigned, bool> SkipMainFilePreamble;
153
154  /// CurLexer - This is the current top of the stack that we're lexing from if
155  /// not expanding a macro and we are lexing directly from source code.
156  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
157  llvm::OwningPtr<Lexer> CurLexer;
158
159  /// CurPTHLexer - This is the current top of stack that we're lexing from if
160  ///  not expanding from a macro and we are lexing from a PTH cache.
161  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
162  llvm::OwningPtr<PTHLexer> CurPTHLexer;
163
164  /// CurPPLexer - This is the current top of the stack what we're lexing from
165  ///  if not expanding a macro.  This is an alias for either CurLexer or
166  ///  CurPTHLexer.
167  PreprocessorLexer *CurPPLexer;
168
169  /// CurLookup - The DirectoryLookup structure used to find the current
170  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
171  /// implement #include_next and find directory-specific properties.
172  const DirectoryLookup *CurDirLookup;
173
174  /// CurTokenLexer - This is the current macro we are expanding, if we are
175  /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
176  llvm::OwningPtr<TokenLexer> CurTokenLexer;
177
178  /// IncludeMacroStack - This keeps track of the stack of files currently
179  /// #included, and macros currently being expanded from, not counting
180  /// CurLexer/CurTokenLexer.
181  struct IncludeStackInfo {
182    Lexer                 *TheLexer;
183    PTHLexer              *ThePTHLexer;
184    PreprocessorLexer     *ThePPLexer;
185    TokenLexer            *TheTokenLexer;
186    const DirectoryLookup *TheDirLookup;
187
188    IncludeStackInfo(Lexer *L, PTHLexer* P, PreprocessorLexer* PPL,
189                     TokenLexer* TL, const DirectoryLookup *D)
190      : TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL), TheTokenLexer(TL),
191        TheDirLookup(D) {}
192  };
193  std::vector<IncludeStackInfo> IncludeMacroStack;
194
195  /// Callbacks - These are actions invoked when some preprocessor activity is
196  /// encountered (e.g. a file is #included, etc).
197  PPCallbacks *Callbacks;
198
199  /// Macros - For each IdentifierInfo with 'HasMacro' set, we keep a mapping
200  /// to the actual definition of the macro.
201  llvm::DenseMap<IdentifierInfo*, MacroInfo*> Macros;
202
203  /// \brief Macros that we want to warn because they are not used at the end
204  /// of the translation unit; we store just their SourceLocations instead
205  /// something like MacroInfo*. The benefit of this is that when we are
206  /// deserializing from PCH, we don't need to deserialize identifier & macros
207  /// just so that we can report that they are unused, we just warn using
208  /// the SourceLocations of this set (that will be filled by the ASTReader).
209  /// We are using SmallPtrSet instead of a vector for faster removal.
210  typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy;
211  WarnUnusedMacroLocsTy WarnUnusedMacroLocs;
212
213  /// MacroArgCache - This is a "freelist" of MacroArg objects that can be
214  /// reused for quick allocation.
215  MacroArgs *MacroArgCache;
216  friend class MacroArgs;
217
218  /// PragmaPushMacroInfo - For each IdentifierInfo used in a #pragma
219  /// push_macro directive, we keep a MacroInfo stack used to restore
220  /// previous macro value.
221  llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo;
222
223  /// \brief Expansion source location for the last macro that expanded
224  /// to no tokens.
225  SourceLocation LastEmptyMacroExpansionLoc;
226
227  // Various statistics we track for performance analysis.
228  unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
229  unsigned NumIf, NumElse, NumEndif;
230  unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
231  unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
232  unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
233  unsigned NumSkipped;
234
235  /// Predefines - This string is the predefined macros that preprocessor
236  /// should use from the command line etc.
237  std::string Predefines;
238
239  /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
240  enum { TokenLexerCacheSize = 8 };
241  unsigned NumCachedTokenLexers;
242  TokenLexer *TokenLexerCache[TokenLexerCacheSize];
243
244  /// \brief Keeps macro expanded tokens for TokenLexers.
245  //
246  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
247  /// going to lex in the cache and when it finishes the tokens are removed
248  /// from the end of the cache.
249  SmallVector<Token, 16> MacroExpandedTokens;
250  std::vector<std::pair<TokenLexer *, size_t> > MacroExpandingLexersStack;
251
252  /// \brief A record of the macro definitions and expansions that
253  /// occurred during preprocessing.
254  ///
255  /// This is an optional side structure that can be enabled with
256  /// \c createPreprocessingRecord() prior to preprocessing.
257  PreprocessingRecord *Record;
258
259private:  // Cached tokens state.
260  typedef SmallVector<Token, 1> CachedTokensTy;
261
262  /// CachedTokens - Cached tokens are stored here when we do backtracking or
263  /// lookahead. They are "lexed" by the CachingLex() method.
264  CachedTokensTy CachedTokens;
265
266  /// CachedLexPos - The position of the cached token that CachingLex() should
267  /// "lex" next. If it points beyond the CachedTokens vector, it means that
268  /// a normal Lex() should be invoked.
269  CachedTokensTy::size_type CachedLexPos;
270
271  /// BacktrackPositions - Stack of backtrack positions, allowing nested
272  /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
273  /// indicate where CachedLexPos should be set when the BackTrack() method is
274  /// invoked (at which point the last position is popped).
275  std::vector<CachedTokensTy::size_type> BacktrackPositions;
276
277  struct MacroInfoChain {
278    MacroInfo MI;
279    MacroInfoChain *Next;
280    MacroInfoChain *Prev;
281  };
282
283  /// MacroInfos are managed as a chain for easy disposal.  This is the head
284  /// of that list.
285  MacroInfoChain *MIChainHead;
286
287  /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
288  /// allocation.
289  MacroInfoChain *MICache;
290
291  MacroInfo *getInfoForMacro(IdentifierInfo *II) const;
292
293public:
294  Preprocessor(Diagnostic &diags, const LangOptions &opts,
295               const TargetInfo &target,
296               SourceManager &SM, HeaderSearch &Headers,
297               IdentifierInfoLookup *IILookup = 0,
298               bool OwnsHeaderSearch = false);
299
300  ~Preprocessor();
301
302  Diagnostic &getDiagnostics() const { return *Diags; }
303  void setDiagnostics(Diagnostic &D) { Diags = &D; }
304
305  const LangOptions &getLangOptions() const { return Features; }
306  const TargetInfo &getTargetInfo() const { return Target; }
307  FileManager &getFileManager() const { return FileMgr; }
308  SourceManager &getSourceManager() const { return SourceMgr; }
309  HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
310
311  IdentifierTable &getIdentifierTable() { return Identifiers; }
312  SelectorTable &getSelectorTable() { return Selectors; }
313  Builtin::Context &getBuiltinInfo() { return BuiltinInfo; }
314  llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
315
316  void setPTHManager(PTHManager* pm);
317
318  PTHManager *getPTHManager() { return PTH.get(); }
319
320  void setExternalSource(ExternalPreprocessorSource *Source) {
321    ExternalSource = Source;
322  }
323
324  ExternalPreprocessorSource *getExternalSource() const {
325    return ExternalSource;
326  }
327
328  /// SetCommentRetentionState - Control whether or not the preprocessor retains
329  /// comments in output.
330  void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
331    this->KeepComments = KeepComments | KeepMacroComments;
332    this->KeepMacroComments = KeepMacroComments;
333  }
334
335  bool getCommentRetentionState() const { return KeepComments; }
336
337  /// isCurrentLexer - Return true if we are lexing directly from the specified
338  /// lexer.
339  bool isCurrentLexer(const PreprocessorLexer *L) const {
340    return CurPPLexer == L;
341  }
342
343  /// getCurrentLexer - Return the current lexer being lexed from.  Note
344  /// that this ignores any potentially active macro expansions and _Pragma
345  /// expansions going on at the time.
346  PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; }
347
348  /// getCurrentFileLexer - Return the current file lexer being lexed from.
349  /// Note that this ignores any potentially active macro expansions and _Pragma
350  /// expansions going on at the time.
351  PreprocessorLexer *getCurrentFileLexer() const;
352
353  /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks.
354  /// Note that this class takes ownership of any PPCallbacks object given to
355  /// it.
356  PPCallbacks *getPPCallbacks() const { return Callbacks; }
357  void addPPCallbacks(PPCallbacks *C) {
358    if (Callbacks)
359      C = new PPChainedCallbacks(C, Callbacks);
360    Callbacks = C;
361  }
362
363  /// getMacroInfo - Given an identifier, return the MacroInfo it is #defined to
364  /// or null if it isn't #define'd.
365  MacroInfo *getMacroInfo(IdentifierInfo *II) const {
366    if (!II->hasMacroDefinition())
367      return 0;
368
369    return getInfoForMacro(II);
370  }
371
372  /// setMacroInfo - Specify a macro for this identifier.
373  ///
374  void setMacroInfo(IdentifierInfo *II, MacroInfo *MI);
375
376  /// macro_iterator/macro_begin/macro_end - This allows you to walk the current
377  /// state of the macro table.  This visits every currently-defined macro.
378  typedef llvm::DenseMap<IdentifierInfo*,
379                         MacroInfo*>::const_iterator macro_iterator;
380  macro_iterator macro_begin(bool IncludeExternalMacros = true) const;
381  macro_iterator macro_end(bool IncludeExternalMacros = true) const;
382
383  /// \brief Expansion source location for the last macro that expanded
384  /// to no tokens.
385  SourceLocation getLastEmptyMacroExpansionLoc() const {
386    return LastEmptyMacroExpansionLoc;
387  }
388
389  const std::string &getPredefines() const { return Predefines; }
390  /// setPredefines - Set the predefines for this Preprocessor.  These
391  /// predefines are automatically injected when parsing the main file.
392  void setPredefines(const char *P) { Predefines = P; }
393  void setPredefines(const std::string &P) { Predefines = P; }
394
395  /// getIdentifierInfo - Return information about the specified preprocessor
396  /// identifier token.  The version of this method that takes two character
397  /// pointers is preferred unless the identifier is already available as a
398  /// string (this avoids allocation and copying of memory to construct an
399  /// std::string).
400  IdentifierInfo *getIdentifierInfo(StringRef Name) const {
401    return &Identifiers.get(Name);
402  }
403
404  /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
405  /// If 'Namespace' is non-null, then it is a token required to exist on the
406  /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
407  void AddPragmaHandler(StringRef Namespace, PragmaHandler *Handler);
408  void AddPragmaHandler(PragmaHandler *Handler) {
409    AddPragmaHandler(StringRef(), Handler);
410  }
411
412  /// RemovePragmaHandler - Remove the specific pragma handler from
413  /// the preprocessor. If \arg Namespace is non-null, then it should
414  /// be the namespace that \arg Handler was added to. It is an error
415  /// to remove a handler that has not been registered.
416  void RemovePragmaHandler(StringRef Namespace, PragmaHandler *Handler);
417  void RemovePragmaHandler(PragmaHandler *Handler) {
418    RemovePragmaHandler(StringRef(), Handler);
419  }
420
421  /// \brief Add the specified comment handler to the preprocessor.
422  void AddCommentHandler(CommentHandler *Handler);
423
424  /// \brief Remove the specified comment handler.
425  ///
426  /// It is an error to remove a handler that has not been registered.
427  void RemoveCommentHandler(CommentHandler *Handler);
428
429  /// \brief Set the code completion handler to the given object.
430  void setCodeCompletionHandler(CodeCompletionHandler &Handler) {
431    CodeComplete = &Handler;
432  }
433
434  /// \brief Retrieve the current code-completion handler.
435  CodeCompletionHandler *getCodeCompletionHandler() const {
436    return CodeComplete;
437  }
438
439  /// \brief Clear out the code completion handler.
440  void clearCodeCompletionHandler() {
441    CodeComplete = 0;
442  }
443
444  /// \brief Hook used by the lexer to invoke the "natural language" code
445  /// completion point.
446  void CodeCompleteNaturalLanguage();
447
448  /// \brief Retrieve the preprocessing record, or NULL if there is no
449  /// preprocessing record.
450  PreprocessingRecord *getPreprocessingRecord() const { return Record; }
451
452  /// \brief Create a new preprocessing record, which will keep track of
453  /// all macro expansions, macro definitions, etc.
454  void createPreprocessingRecord(bool IncludeNestedMacroExpansions);
455
456  /// EnterMainSourceFile - Enter the specified FileID as the main source file,
457  /// which implicitly adds the builtin defines etc.
458  void EnterMainSourceFile();
459
460  /// EndSourceFile - Inform the preprocessor callbacks that processing is
461  /// complete.
462  void EndSourceFile();
463
464  /// EnterSourceFile - Add a source file to the top of the include stack and
465  /// start lexing tokens from it instead of the current buffer.  Emit an error
466  /// and don't enter the file on error.
467  void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir,
468                       SourceLocation Loc);
469
470  /// EnterMacro - Add a Macro to the top of the include stack and start lexing
471  /// tokens from it instead of the current buffer.  Args specifies the
472  /// tokens input to a function-like macro.
473  ///
474  /// ILEnd specifies the location of the ')' for a function-like macro or the
475  /// identifier for an object-like macro.
476  void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroArgs *Args);
477
478  /// EnterTokenStream - Add a "macro" context to the top of the include stack,
479  /// which will cause the lexer to start returning the specified tokens.
480  ///
481  /// If DisableMacroExpansion is true, tokens lexed from the token stream will
482  /// not be subject to further macro expansion.  Otherwise, these tokens will
483  /// be re-macro-expanded when/if expansion is enabled.
484  ///
485  /// If OwnsTokens is false, this method assumes that the specified stream of
486  /// tokens has a permanent owner somewhere, so they do not need to be copied.
487  /// If it is true, it assumes the array of tokens is allocated with new[] and
488  /// must be freed.
489  ///
490  void EnterTokenStream(const Token *Toks, unsigned NumToks,
491                        bool DisableMacroExpansion, bool OwnsTokens);
492
493  /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
494  /// lexer stack.  This should only be used in situations where the current
495  /// state of the top-of-stack lexer is known.
496  void RemoveTopOfLexerStack();
497
498  /// EnableBacktrackAtThisPos - From the point that this method is called, and
499  /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
500  /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
501  /// make the Preprocessor re-lex the same tokens.
502  ///
503  /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
504  /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
505  /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
506  ///
507  /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
508  /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
509  /// tokens will continue indefinitely.
510  ///
511  void EnableBacktrackAtThisPos();
512
513  /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
514  void CommitBacktrackedTokens();
515
516  /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
517  /// EnableBacktrackAtThisPos() was previously called.
518  void Backtrack();
519
520  /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
521  /// caching of tokens is on.
522  bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
523
524  /// Lex - To lex a token from the preprocessor, just pull a token from the
525  /// current lexer or macro object.
526  void Lex(Token &Result) {
527    if (CurLexer)
528      CurLexer->Lex(Result);
529    else if (CurPTHLexer)
530      CurPTHLexer->Lex(Result);
531    else if (CurTokenLexer)
532      CurTokenLexer->Lex(Result);
533    else
534      CachingLex(Result);
535  }
536
537  /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
538  /// something not a comment.  This is useful in -E -C mode where comments
539  /// would foul up preprocessor directive handling.
540  void LexNonComment(Token &Result) {
541    do
542      Lex(Result);
543    while (Result.getKind() == tok::comment);
544  }
545
546  /// LexUnexpandedToken - This is just like Lex, but this disables macro
547  /// expansion of identifier tokens.
548  void LexUnexpandedToken(Token &Result) {
549    // Disable macro expansion.
550    bool OldVal = DisableMacroExpansion;
551    DisableMacroExpansion = true;
552    // Lex the token.
553    Lex(Result);
554
555    // Reenable it.
556    DisableMacroExpansion = OldVal;
557  }
558
559  /// LexUnexpandedNonComment - Like LexNonComment, but this disables macro
560  /// expansion of identifier tokens.
561  void LexUnexpandedNonComment(Token &Result) {
562    do
563      LexUnexpandedToken(Result);
564    while (Result.getKind() == tok::comment);
565  }
566
567  /// LookAhead - This peeks ahead N tokens and returns that token without
568  /// consuming any tokens.  LookAhead(0) returns the next token that would be
569  /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
570  /// returns normal tokens after phase 5.  As such, it is equivalent to using
571  /// 'Lex', not 'LexUnexpandedToken'.
572  const Token &LookAhead(unsigned N) {
573    if (CachedLexPos + N < CachedTokens.size())
574      return CachedTokens[CachedLexPos+N];
575    else
576      return PeekAhead(N+1);
577  }
578
579  /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
580  /// this allows to revert a specific number of tokens.
581  /// Note that the number of tokens being reverted should be up to the last
582  /// backtrack position, not more.
583  void RevertCachedTokens(unsigned N) {
584    assert(isBacktrackEnabled() &&
585           "Should only be called when tokens are cached for backtracking");
586    assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
587         && "Should revert tokens up to the last backtrack position, not more");
588    assert(signed(CachedLexPos) - signed(N) >= 0 &&
589           "Corrupted backtrack positions ?");
590    CachedLexPos -= N;
591  }
592
593  /// EnterToken - Enters a token in the token stream to be lexed next. If
594  /// BackTrack() is called afterwards, the token will remain at the insertion
595  /// point.
596  void EnterToken(const Token &Tok) {
597    EnterCachingLexMode();
598    CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
599  }
600
601  /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
602  /// tokens (because backtrack is enabled) it should replace the most recent
603  /// cached tokens with the given annotation token. This function has no effect
604  /// if backtracking is not enabled.
605  ///
606  /// Note that the use of this function is just for optimization; so that the
607  /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
608  /// invoked.
609  void AnnotateCachedTokens(const Token &Tok) {
610    assert(Tok.isAnnotation() && "Expected annotation token");
611    if (CachedLexPos != 0 && isBacktrackEnabled())
612      AnnotatePreviousCachedTokens(Tok);
613  }
614
615  /// \brief Replace the last token with an annotation token.
616  ///
617  /// Like AnnotateCachedTokens(), this routine replaces an
618  /// already-parsed (and resolved) token with an annotation
619  /// token. However, this routine only replaces the last token with
620  /// the annotation token; it does not affect any other cached
621  /// tokens. This function has no effect if backtracking is not
622  /// enabled.
623  void ReplaceLastTokenWithAnnotation(const Token &Tok) {
624    assert(Tok.isAnnotation() && "Expected annotation token");
625    if (CachedLexPos != 0 && isBacktrackEnabled())
626      CachedTokens[CachedLexPos-1] = Tok;
627  }
628
629  /// \brief Specify the point at which code-completion will be performed.
630  ///
631  /// \param File the file in which code completion should occur. If
632  /// this file is included multiple times, code-completion will
633  /// perform completion the first time it is included. If NULL, this
634  /// function clears out the code-completion point.
635  ///
636  /// \param Line the line at which code completion should occur
637  /// (1-based).
638  ///
639  /// \param Column the column at which code completion should occur
640  /// (1-based).
641  ///
642  /// \returns true if an error occurred, false otherwise.
643  bool SetCodeCompletionPoint(const FileEntry *File,
644                              unsigned Line, unsigned Column);
645
646  /// \brief Determine if this source location refers into the file
647  /// for which we are performing code completion.
648  bool isCodeCompletionFile(SourceLocation FileLoc) const;
649
650  /// \brief Determine if we are performing code completion.
651  bool isCodeCompletionEnabled() const { return CodeCompletionFile != 0; }
652
653  /// \brief Instruct the preprocessor to skip part of the main
654  /// the main source file.
655  ///
656  /// \brief Bytes The number of bytes in the preamble to skip.
657  ///
658  /// \brief StartOfLine Whether skipping these bytes puts the lexer at the
659  /// start of a line.
660  void setSkipMainFilePreamble(unsigned Bytes, bool StartOfLine) {
661    SkipMainFilePreamble.first = Bytes;
662    SkipMainFilePreamble.second = StartOfLine;
663  }
664
665  /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
666  /// the specified Token's location, translating the token's start
667  /// position in the current buffer into a SourcePosition object for rendering.
668  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) {
669    return Diags->Report(Loc, DiagID);
670  }
671
672  DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) {
673    return Diags->Report(Tok.getLocation(), DiagID);
674  }
675
676  /// getSpelling() - Return the 'spelling' of the token at the given
677  /// location; does not go up to the spelling location or down to the
678  /// expansion location.
679  ///
680  /// \param buffer A buffer which will be used only if the token requires
681  ///   "cleaning", e.g. if it contains trigraphs or escaped newlines
682  /// \param invalid If non-null, will be set \c true if an error occurs.
683  StringRef getSpelling(SourceLocation loc,
684                              SmallVectorImpl<char> &buffer,
685                              bool *invalid = 0) const {
686    return Lexer::getSpelling(loc, buffer, SourceMgr, Features, invalid);
687  }
688
689  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
690  /// token is the characters used to represent the token in the source file
691  /// after trigraph expansion and escaped-newline folding.  In particular, this
692  /// wants to get the true, uncanonicalized, spelling of things like digraphs
693  /// UCNs, etc.
694  ///
695  /// \param Invalid If non-null, will be set \c true if an error occurs.
696  std::string getSpelling(const Token &Tok, bool *Invalid = 0) const {
697    return Lexer::getSpelling(Tok, SourceMgr, Features, Invalid);
698  }
699
700  /// getSpelling - This method is used to get the spelling of a token into a
701  /// preallocated buffer, instead of as an std::string.  The caller is required
702  /// to allocate enough space for the token, which is guaranteed to be at least
703  /// Tok.getLength() bytes long.  The length of the actual result is returned.
704  ///
705  /// Note that this method may do two possible things: it may either fill in
706  /// the buffer specified with characters, or it may *change the input pointer*
707  /// to point to a constant buffer with the data already in it (avoiding a
708  /// copy).  The caller is not allowed to modify the returned buffer pointer
709  /// if an internal buffer is returned.
710  unsigned getSpelling(const Token &Tok, const char *&Buffer,
711                       bool *Invalid = 0) const {
712    return Lexer::getSpelling(Tok, Buffer, SourceMgr, Features, Invalid);
713  }
714
715  /// getSpelling - This method is used to get the spelling of a token into a
716  /// SmallVector. Note that the returned StringRef may not point to the
717  /// supplied buffer if a copy can be avoided.
718  StringRef getSpelling(const Token &Tok,
719                              SmallVectorImpl<char> &Buffer,
720                              bool *Invalid = 0) const;
721
722  /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
723  /// with length 1, return the character.
724  char getSpellingOfSingleCharacterNumericConstant(const Token &Tok,
725                                                   bool *Invalid = 0) const {
726    assert(Tok.is(tok::numeric_constant) &&
727           Tok.getLength() == 1 && "Called on unsupported token");
728    assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
729
730    // If the token is carrying a literal data pointer, just use it.
731    if (const char *D = Tok.getLiteralData())
732      return *D;
733
734    // Otherwise, fall back on getCharacterData, which is slower, but always
735    // works.
736    return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid);
737  }
738
739  /// CreateString - Plop the specified string into a scratch buffer and set the
740  /// specified token's location and length to it.  If specified, the source
741  /// location provides a location of the expansion point of the token.
742  void CreateString(const char *Buf, unsigned Len,
743                    Token &Tok, SourceLocation SourceLoc = SourceLocation());
744
745  /// \brief Computes the source location just past the end of the
746  /// token at this source location.
747  ///
748  /// This routine can be used to produce a source location that
749  /// points just past the end of the token referenced by \p Loc, and
750  /// is generally used when a diagnostic needs to point just after a
751  /// token where it expected something different that it received. If
752  /// the returned source location would not be meaningful (e.g., if
753  /// it points into a macro), this routine returns an invalid
754  /// source location.
755  ///
756  /// \param Offset an offset from the end of the token, where the source
757  /// location should refer to. The default offset (0) produces a source
758  /// location pointing just past the end of the token; an offset of 1 produces
759  /// a source location pointing to the last character in the token, etc.
760  SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0) {
761    return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, Features);
762  }
763
764  /// \brief Returns true if the given MacroID location points at the first
765  /// token of the macro expansion.
766  bool isAtStartOfMacroExpansion(SourceLocation loc) const {
767    return Lexer::isAtStartOfMacroExpansion(loc, SourceMgr, Features);
768  }
769
770  /// \brief Returns true if the given MacroID location points at the last
771  /// token of the macro expansion.
772  bool isAtEndOfMacroExpansion(SourceLocation loc) const {
773    return Lexer::isAtEndOfMacroExpansion(loc, SourceMgr, Features);
774  }
775
776  /// DumpToken - Print the token to stderr, used for debugging.
777  ///
778  void DumpToken(const Token &Tok, bool DumpFlags = false) const;
779  void DumpLocation(SourceLocation Loc) const;
780  void DumpMacro(const MacroInfo &MI) const;
781
782  /// AdvanceToTokenCharacter - Given a location that specifies the start of a
783  /// token, return a new location that specifies a character within the token.
784  SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,
785                                         unsigned Char) const {
786    return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, Features);
787  }
788
789  /// IncrementPasteCounter - Increment the counters for the number of token
790  /// paste operations performed.  If fast was specified, this is a 'fast paste'
791  /// case we handled.
792  ///
793  void IncrementPasteCounter(bool isFast) {
794    if (isFast)
795      ++NumFastTokenPaste;
796    else
797      ++NumTokenPaste;
798  }
799
800  void PrintStats();
801
802  size_t getTotalMemory() const;
803
804  /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
805  /// comment (/##/) in microsoft mode, this method handles updating the current
806  /// state, returning the token on the next source line.
807  void HandleMicrosoftCommentPaste(Token &Tok);
808
809  //===--------------------------------------------------------------------===//
810  // Preprocessor callback methods.  These are invoked by a lexer as various
811  // directives and events are found.
812
813  /// LookUpIdentifierInfo - Given a tok::raw_identifier token, look up the
814  /// identifier information for the token and install it into the token,
815  /// updating the token kind accordingly.
816  IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const;
817
818private:
819  llvm::DenseMap<IdentifierInfo*,unsigned> PoisonReasons;
820
821public:
822
823  // SetPoisonReason - Call this function to indicate the reason for
824  // poisoning an identifier. If that identifier is accessed while
825  // poisoned, then this reason will be used instead of the default
826  // "poisoned" diagnostic.
827  void SetPoisonReason(IdentifierInfo *II, unsigned DiagID);
828
829  // HandlePoisonedIdentifier - Display reason for poisoned
830  // identifier.
831  void HandlePoisonedIdentifier(Token & Tok);
832
833  void MaybeHandlePoisonedIdentifier(Token & Identifier) {
834    if(IdentifierInfo * II = Identifier.getIdentifierInfo()) {
835      if(II->isPoisoned()) {
836        HandlePoisonedIdentifier(Identifier);
837      }
838    }
839  }
840
841private:
842  /// Identifiers used for SEH handling in Borland. These are only
843  /// allowed in particular circumstances
844  IdentifierInfo *Ident__exception_code, *Ident___exception_code, *Ident_GetExceptionCode; // __except block
845  IdentifierInfo *Ident__exception_info, *Ident___exception_info, *Ident_GetExceptionInfo; // __except filter expression
846  IdentifierInfo *Ident__abnormal_termination, *Ident___abnormal_termination, *Ident_AbnormalTermination; // __finally
847public:
848  void PoisonSEHIdentifiers(bool Poison = true); // Borland
849
850  /// HandleIdentifier - This callback is invoked when the lexer reads an
851  /// identifier and has filled in the tokens IdentifierInfo member.  This
852  /// callback potentially macro expands it or turns it into a named token (like
853  /// 'for').
854  void HandleIdentifier(Token &Identifier);
855
856
857  /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
858  /// the current file.  This either returns the EOF token and returns true, or
859  /// pops a level off the include stack and returns false, at which point the
860  /// client should call lex again.
861  bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
862
863  /// HandleEndOfTokenLexer - This callback is invoked when the current
864  /// TokenLexer hits the end of its token stream.
865  bool HandleEndOfTokenLexer(Token &Result);
866
867  /// HandleDirective - This callback is invoked when the lexer sees a # token
868  /// at the start of a line.  This consumes the directive, modifies the
869  /// lexer/preprocessor state, and advances the lexer(s) so that the next token
870  /// read is the correct one.
871  void HandleDirective(Token &Result);
872
873  /// CheckEndOfDirective - Ensure that the next token is a tok::eod token.  If
874  /// not, emit a diagnostic and consume up until the eod.  If EnableMacros is
875  /// true, then we consider macros that expand to zero tokens as being ok.
876  void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
877
878  /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
879  /// current line until the tok::eod token is found.
880  void DiscardUntilEndOfDirective();
881
882  /// SawDateOrTime - This returns true if the preprocessor has seen a use of
883  /// __DATE__ or __TIME__ in the file so far.
884  bool SawDateOrTime() const {
885    return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
886  }
887  unsigned getCounterValue() const { return CounterValue; }
888  void setCounterValue(unsigned V) { CounterValue = V; }
889
890  /// AllocateMacroInfo - Allocate a new MacroInfo object with the provide
891  ///  SourceLocation.
892  MacroInfo *AllocateMacroInfo(SourceLocation L);
893
894  /// CloneMacroInfo - Allocate a new MacroInfo object which is clone of MI.
895  MacroInfo *CloneMacroInfo(const MacroInfo &MI);
896
897  /// GetIncludeFilenameSpelling - Turn the specified lexer token into a fully
898  /// checked and spelled filename, e.g. as an operand of #include. This returns
899  /// true if the input filename was in <>'s or false if it were in ""'s.  The
900  /// caller is expected to provide a buffer that is large enough to hold the
901  /// spelling of the filename, but is also expected to handle the case when
902  /// this method decides to use a different buffer.
903  bool GetIncludeFilenameSpelling(SourceLocation Loc,StringRef &Filename);
904
905  /// LookupFile - Given a "foo" or <foo> reference, look up the indicated file,
906  /// return null on failure.  isAngled indicates whether the file reference is
907  /// for system #include's or not (i.e. using <> instead of "").
908  const FileEntry *LookupFile(StringRef Filename,
909                              bool isAngled, const DirectoryLookup *FromDir,
910                              const DirectoryLookup *&CurDir,
911                              SmallVectorImpl<char> *SearchPath,
912                              SmallVectorImpl<char> *RelativePath);
913
914  /// GetCurLookup - The DirectoryLookup structure used to find the current
915  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
916  /// implement #include_next and find directory-specific properties.
917  const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; }
918
919  /// isInPrimaryFile - Return true if we're in the top-level file, not in a
920  /// #include.
921  bool isInPrimaryFile() const;
922
923  /// ConcatenateIncludeName - Handle cases where the #include name is expanded
924  /// from a macro as multiple tokens, which need to be glued together.  This
925  /// occurs for code like:
926  ///    #define FOO <a/b.h>
927  ///    #include FOO
928  /// because in this case, "<a/b.h>" is returned as 7 tokens, not one.
929  ///
930  /// This code concatenates and consumes tokens up to the '>' token.  It
931  /// returns false if the > was found, otherwise it returns true if it finds
932  /// and consumes the EOD marker.
933  bool ConcatenateIncludeName(llvm::SmallString<128> &FilenameBuffer,
934                              SourceLocation &End);
935
936  /// LexOnOffSwitch - Lex an on-off-switch (C99 6.10.6p2) and verify that it is
937  /// followed by EOD.  Return true if the token is not a valid on-off-switch.
938  bool LexOnOffSwitch(tok::OnOffSwitch &OOS);
939
940private:
941
942  void PushIncludeMacroStack() {
943    IncludeMacroStack.push_back(IncludeStackInfo(CurLexer.take(),
944                                                 CurPTHLexer.take(),
945                                                 CurPPLexer,
946                                                 CurTokenLexer.take(),
947                                                 CurDirLookup));
948    CurPPLexer = 0;
949  }
950
951  void PopIncludeMacroStack() {
952    CurLexer.reset(IncludeMacroStack.back().TheLexer);
953    CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer);
954    CurPPLexer = IncludeMacroStack.back().ThePPLexer;
955    CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer);
956    CurDirLookup  = IncludeMacroStack.back().TheDirLookup;
957    IncludeMacroStack.pop_back();
958  }
959
960  /// AllocateMacroInfo - Allocate a new MacroInfo object.
961  MacroInfo *AllocateMacroInfo();
962
963  /// ReleaseMacroInfo - Release the specified MacroInfo.  This memory will
964  ///  be reused for allocating new MacroInfo objects.
965  void ReleaseMacroInfo(MacroInfo* MI);
966
967  /// ReadMacroName - Lex and validate a macro name, which occurs after a
968  /// #define or #undef.  This emits a diagnostic, sets the token kind to eod,
969  /// and discards the rest of the macro line if the macro name is invalid.
970  void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
971
972  /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
973  /// definition has just been read.  Lex the rest of the arguments and the
974  /// closing ), updating MI with what we learn.  Return true if an error occurs
975  /// parsing the arg list.
976  bool ReadMacroDefinitionArgList(MacroInfo *MI);
977
978  /// SkipExcludedConditionalBlock - We just read a #if or related directive and
979  /// decided that the subsequent tokens are in the #if'd out portion of the
980  /// file.  Lex the rest of the file, until we see an #endif.  If
981  /// FoundNonSkipPortion is true, then we have already emitted code for part of
982  /// this #if directive, so #else/#elif blocks should never be entered. If
983  /// FoundElse is false, then #else directives are ok, if not, then we have
984  /// already seen one so a #else directive is a duplicate.  When this returns,
985  /// the caller can lex the first valid token.
986  void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
987                                    bool FoundNonSkipPortion, bool FoundElse);
988
989  /// PTHSkipExcludedConditionalBlock - A fast PTH version of
990  ///  SkipExcludedConditionalBlock.
991  void PTHSkipExcludedConditionalBlock();
992
993  /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
994  /// may occur after a #if or #elif directive and return it as a bool.  If the
995  /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
996  bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
997
998  /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
999  /// #pragma GCC poison/system_header/dependency and #pragma once.
1000  void RegisterBuiltinPragmas();
1001
1002  /// RegisterBuiltinMacros - Register builtin macros, such as __LINE__ with the
1003  /// identifier table.
1004  void RegisterBuiltinMacros();
1005
1006  /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
1007  /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
1008  /// the macro should not be expanded return true, otherwise return false.
1009  bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI);
1010
1011  /// \brief Cache macro expanded tokens for TokenLexers.
1012  //
1013  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
1014  /// going to lex in the cache and when it finishes the tokens are removed
1015  /// from the end of the cache.
1016  Token *cacheMacroExpandedTokens(TokenLexer *tokLexer,
1017                                  ArrayRef<Token> tokens);
1018  void removeCachedMacroExpandedTokensOfLastLexer();
1019  friend void TokenLexer::ExpandFunctionArguments();
1020
1021  /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
1022  /// lexed is a '('.  If so, consume the token and return true, if not, this
1023  /// method should have no observable side-effect on the lexed tokens.
1024  bool isNextPPTokenLParen();
1025
1026  /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
1027  /// invoked to read all of the formal arguments specified for the macro
1028  /// invocation.  This returns null on error.
1029  MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
1030                                       SourceLocation &ExpansionEnd);
1031
1032  /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
1033  /// as a builtin macro, handle it and return the next token as 'Tok'.
1034  void ExpandBuiltinMacro(Token &Tok);
1035
1036  /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
1037  /// return the first token after the directive.  The _Pragma token has just
1038  /// been read into 'Tok'.
1039  void Handle_Pragma(Token &Tok);
1040
1041  /// HandleMicrosoft__pragma - Like Handle_Pragma except the pragma text
1042  /// is not enclosed within a string literal.
1043  void HandleMicrosoft__pragma(Token &Tok);
1044
1045  /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
1046  /// start lexing tokens from it instead of the current buffer.
1047  void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
1048
1049  /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
1050  /// start getting tokens from it using the PTH cache.
1051  void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
1052
1053  /// IsFileLexer - Returns true if we are lexing from a file and not a
1054  ///  pragma or a macro.
1055  static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
1056    return L ? !L->isPragmaLexer() : P != 0;
1057  }
1058
1059  static bool IsFileLexer(const IncludeStackInfo& I) {
1060    return IsFileLexer(I.TheLexer, I.ThePPLexer);
1061  }
1062
1063  bool IsFileLexer() const {
1064    return IsFileLexer(CurLexer.get(), CurPPLexer);
1065  }
1066
1067  //===--------------------------------------------------------------------===//
1068  // Caching stuff.
1069  void CachingLex(Token &Result);
1070  bool InCachingLexMode() const {
1071    // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means
1072    // that we are past EOF, not that we are in CachingLex mode.
1073    return CurPPLexer == 0 && CurTokenLexer == 0 && CurPTHLexer == 0 &&
1074           !IncludeMacroStack.empty();
1075  }
1076  void EnterCachingLexMode();
1077  void ExitCachingLexMode() {
1078    if (InCachingLexMode())
1079      RemoveTopOfLexerStack();
1080  }
1081  const Token &PeekAhead(unsigned N);
1082  void AnnotatePreviousCachedTokens(const Token &Tok);
1083
1084  //===--------------------------------------------------------------------===//
1085  /// Handle*Directive - implement the various preprocessor directives.  These
1086  /// should side-effect the current preprocessor object so that the next call
1087  /// to Lex() will return the appropriate token next.
1088  void HandleLineDirective(Token &Tok);
1089  void HandleDigitDirective(Token &Tok);
1090  void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
1091  void HandleIdentSCCSDirective(Token &Tok);
1092
1093  // File inclusion.
1094  void HandleIncludeDirective(SourceLocation HashLoc,
1095                              Token &Tok,
1096                              const DirectoryLookup *LookupFrom = 0,
1097                              bool isImport = false);
1098  void HandleIncludeNextDirective(SourceLocation HashLoc, Token &Tok);
1099  void HandleIncludeMacrosDirective(SourceLocation HashLoc, Token &Tok);
1100  void HandleImportDirective(SourceLocation HashLoc, Token &Tok);
1101
1102  // Macro handling.
1103  void HandleDefineDirective(Token &Tok);
1104  void HandleUndefDirective(Token &Tok);
1105
1106  // Conditional Inclusion.
1107  void HandleIfdefDirective(Token &Tok, bool isIfndef,
1108                            bool ReadAnyTokensBeforeDirective);
1109  void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
1110  void HandleEndifDirective(Token &Tok);
1111  void HandleElseDirective(Token &Tok);
1112  void HandleElifDirective(Token &Tok);
1113
1114  // Pragmas.
1115  void HandlePragmaDirective(unsigned Introducer);
1116public:
1117  void HandlePragmaOnce(Token &OnceTok);
1118  void HandlePragmaMark();
1119  void HandlePragmaPoison(Token &PoisonTok);
1120  void HandlePragmaSystemHeader(Token &SysHeaderTok);
1121  void HandlePragmaDependency(Token &DependencyTok);
1122  void HandlePragmaComment(Token &CommentTok);
1123  void HandlePragmaMessage(Token &MessageTok);
1124  void HandlePragmaPushMacro(Token &Tok);
1125  void HandlePragmaPopMacro(Token &Tok);
1126  IdentifierInfo *ParsePragmaPushOrPopMacro(Token &Tok);
1127
1128  // Return true and store the first token only if any CommentHandler
1129  // has inserted some tokens and getCommentRetentionState() is false.
1130  bool HandleComment(Token &Token, SourceRange Comment);
1131
1132  /// \brief A macro is used, update information about macros that need unused
1133  /// warnings.
1134  void markMacroAsUsed(MacroInfo *MI);
1135};
1136
1137/// \brief Abstract base class that describes a handler that will receive
1138/// source ranges for each of the comments encountered in the source file.
1139class CommentHandler {
1140public:
1141  virtual ~CommentHandler();
1142
1143  // The handler shall return true if it has pushed any tokens
1144  // to be read using e.g. EnterToken or EnterTokenStream.
1145  virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0;
1146};
1147
1148}  // end namespace clang
1149
1150#endif
1151