Preprocessor.h revision e2ca828119b8bff4a5c25c6db8ee4fec558451e7
1//===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines the Preprocessor interface.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15#define LLVM_CLANG_LEX_PREPROCESSOR_H
16
17#include "clang/Lex/MacroInfo.h"
18#include "clang/Lex/Lexer.h"
19#include "clang/Lex/PTHLexer.h"
20#include "clang/Lex/PPCallbacks.h"
21#include "clang/Lex/TokenLexer.h"
22#include "clang/Lex/PTHManager.h"
23#include "clang/Basic/Builtins.h"
24#include "clang/Basic/Diagnostic.h"
25#include "clang/Basic/IdentifierTable.h"
26#include "clang/Basic/SourceLocation.h"
27#include "llvm/ADT/DenseMap.h"
28#include "llvm/ADT/IntrusiveRefCntPtr.h"
29#include "llvm/ADT/SmallPtrSet.h"
30#include "llvm/ADT/OwningPtr.h"
31#include "llvm/ADT/SmallVector.h"
32#include "llvm/ADT/ArrayRef.h"
33#include "llvm/Support/Allocator.h"
34#include <vector>
35
36namespace clang {
37
38class SourceManager;
39class ExternalPreprocessorSource;
40class FileManager;
41class FileEntry;
42class HeaderSearch;
43class PragmaNamespace;
44class PragmaHandler;
45class CommentHandler;
46class ScratchBuffer;
47class TargetInfo;
48class PPCallbacks;
49class CodeCompletionHandler;
50class DirectoryLookup;
51class PreprocessingRecord;
52class ModuleLoader;
53
54/// Preprocessor - This object engages in a tight little dance with the lexer to
55/// efficiently preprocess tokens.  Lexers know only about tokens within a
56/// single source file, and don't know anything about preprocessor-level issues
57/// like the #include stack, token expansion, etc.
58///
59class Preprocessor : public llvm::RefCountedBase<Preprocessor> {
60  Diagnostic        *Diags;
61  LangOptions       &Features;
62  const TargetInfo  &Target;
63  FileManager       &FileMgr;
64  SourceManager     &SourceMgr;
65  ScratchBuffer     *ScratchBuf;
66  HeaderSearch      &HeaderInfo;
67  ModuleLoader      &TheModuleLoader;
68
69  /// \brief External source of macros.
70  ExternalPreprocessorSource *ExternalSource;
71
72
73  /// PTH - An optional PTHManager object used for getting tokens from
74  ///  a token cache rather than lexing the original source file.
75  llvm::OwningPtr<PTHManager> PTH;
76
77  /// BP - A BumpPtrAllocator object used to quickly allocate and release
78  ///  objects internal to the Preprocessor.
79  llvm::BumpPtrAllocator BP;
80
81  /// Identifiers for builtin macros and other builtins.
82  IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
83  IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
84  IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
85  IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
86  IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
87  IdentifierInfo *Ident__COUNTER__;                // __COUNTER__
88  IdentifierInfo *Ident_Pragma, *Ident__pragma;    // _Pragma, __pragma
89  IdentifierInfo *Ident__VA_ARGS__;                // __VA_ARGS__
90  IdentifierInfo *Ident__has_feature;              // __has_feature
91  IdentifierInfo *Ident__has_extension;            // __has_extension
92  IdentifierInfo *Ident__has_builtin;              // __has_builtin
93  IdentifierInfo *Ident__has_attribute;            // __has_attribute
94  IdentifierInfo *Ident__has_include;              // __has_include
95  IdentifierInfo *Ident__has_include_next;         // __has_include_next
96
97  SourceLocation DATELoc, TIMELoc;
98  unsigned CounterValue;  // Next __COUNTER__ value.
99
100  enum {
101    /// MaxIncludeStackDepth - Maximum depth of #includes.
102    MaxAllowedIncludeStackDepth = 200
103  };
104
105  // State that is set before the preprocessor begins.
106  bool KeepComments : 1;
107  bool KeepMacroComments : 1;
108  bool SuppressIncludeNotFoundError : 1;
109
110  // State that changes while the preprocessor runs:
111  bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
112
113  /// Whether the preprocessor owns the header search object.
114  bool OwnsHeaderSearch : 1;
115
116  /// DisableMacroExpansion - True if macro expansion is disabled.
117  bool DisableMacroExpansion : 1;
118
119  /// \brief Whether we have already loaded macros from the external source.
120  mutable bool ReadMacrosFromExternalSource : 1;
121
122  /// \brief Tracks the depth of Lex() Calls.
123  unsigned LexDepth;
124
125  /// Identifiers - This is mapping/lookup information for all identifiers in
126  /// the program, including program keywords.
127  mutable IdentifierTable Identifiers;
128
129  /// Selectors - This table contains all the selectors in the program. Unlike
130  /// IdentifierTable above, this table *isn't* populated by the preprocessor.
131  /// It is declared/expanded here because it's role/lifetime is
132  /// conceptually similar the IdentifierTable. In addition, the current control
133  /// flow (in clang::ParseAST()), make it convenient to put here.
134  /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
135  /// the lifetime of the preprocessor.
136  SelectorTable Selectors;
137
138  /// BuiltinInfo - Information about builtins.
139  Builtin::Context BuiltinInfo;
140
141  /// PragmaHandlers - This tracks all of the pragmas that the client registered
142  /// with this preprocessor.
143  PragmaNamespace *PragmaHandlers;
144
145  /// \brief Tracks all of the comment handlers that the client registered
146  /// with this preprocessor.
147  std::vector<CommentHandler *> CommentHandlers;
148
149  /// \brief The code-completion handler.
150  CodeCompletionHandler *CodeComplete;
151
152  /// \brief The file that we're performing code-completion for, if any.
153  const FileEntry *CodeCompletionFile;
154
155  /// \brief The number of bytes that we will initially skip when entering the
156  /// main file, which is used when loading a precompiled preamble, along
157  /// with a flag that indicates whether skipping this number of bytes will
158  /// place the lexer at the start of a line.
159  std::pair<unsigned, bool> SkipMainFilePreamble;
160
161  /// CurLexer - This is the current top of the stack that we're lexing from if
162  /// not expanding a macro and we are lexing directly from source code.
163  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
164  llvm::OwningPtr<Lexer> CurLexer;
165
166  /// CurPTHLexer - This is the current top of stack that we're lexing from if
167  ///  not expanding from a macro and we are lexing from a PTH cache.
168  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
169  llvm::OwningPtr<PTHLexer> CurPTHLexer;
170
171  /// CurPPLexer - This is the current top of the stack what we're lexing from
172  ///  if not expanding a macro.  This is an alias for either CurLexer or
173  ///  CurPTHLexer.
174  PreprocessorLexer *CurPPLexer;
175
176  /// CurLookup - The DirectoryLookup structure used to find the current
177  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
178  /// implement #include_next and find directory-specific properties.
179  const DirectoryLookup *CurDirLookup;
180
181  /// CurTokenLexer - This is the current macro we are expanding, if we are
182  /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
183  llvm::OwningPtr<TokenLexer> CurTokenLexer;
184
185  /// IncludeMacroStack - This keeps track of the stack of files currently
186  /// #included, and macros currently being expanded from, not counting
187  /// CurLexer/CurTokenLexer.
188  struct IncludeStackInfo {
189    Lexer                 *TheLexer;
190    PTHLexer              *ThePTHLexer;
191    PreprocessorLexer     *ThePPLexer;
192    TokenLexer            *TheTokenLexer;
193    const DirectoryLookup *TheDirLookup;
194
195    IncludeStackInfo(Lexer *L, PTHLexer* P, PreprocessorLexer* PPL,
196                     TokenLexer* TL, const DirectoryLookup *D)
197      : TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL), TheTokenLexer(TL),
198        TheDirLookup(D) {}
199  };
200  std::vector<IncludeStackInfo> IncludeMacroStack;
201
202  /// Callbacks - These are actions invoked when some preprocessor activity is
203  /// encountered (e.g. a file is #included, etc).
204  PPCallbacks *Callbacks;
205
206  /// Macros - For each IdentifierInfo with 'HasMacro' set, we keep a mapping
207  /// to the actual definition of the macro.
208  llvm::DenseMap<IdentifierInfo*, MacroInfo*> Macros;
209
210  /// \brief Macros that we want to warn because they are not used at the end
211  /// of the translation unit; we store just their SourceLocations instead
212  /// something like MacroInfo*. The benefit of this is that when we are
213  /// deserializing from PCH, we don't need to deserialize identifier & macros
214  /// just so that we can report that they are unused, we just warn using
215  /// the SourceLocations of this set (that will be filled by the ASTReader).
216  /// We are using SmallPtrSet instead of a vector for faster removal.
217  typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy;
218  WarnUnusedMacroLocsTy WarnUnusedMacroLocs;
219
220  /// MacroArgCache - This is a "freelist" of MacroArg objects that can be
221  /// reused for quick allocation.
222  MacroArgs *MacroArgCache;
223  friend class MacroArgs;
224
225  /// PragmaPushMacroInfo - For each IdentifierInfo used in a #pragma
226  /// push_macro directive, we keep a MacroInfo stack used to restore
227  /// previous macro value.
228  llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo;
229
230  // Various statistics we track for performance analysis.
231  unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
232  unsigned NumIf, NumElse, NumEndif;
233  unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
234  unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
235  unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
236  unsigned NumSkipped;
237
238  /// Predefines - This string is the predefined macros that preprocessor
239  /// should use from the command line etc.
240  std::string Predefines;
241
242  /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
243  enum { TokenLexerCacheSize = 8 };
244  unsigned NumCachedTokenLexers;
245  TokenLexer *TokenLexerCache[TokenLexerCacheSize];
246
247  /// \brief Keeps macro expanded tokens for TokenLexers.
248  //
249  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
250  /// going to lex in the cache and when it finishes the tokens are removed
251  /// from the end of the cache.
252  SmallVector<Token, 16> MacroExpandedTokens;
253  std::vector<std::pair<TokenLexer *, size_t> > MacroExpandingLexersStack;
254
255  /// \brief A record of the macro definitions and expansions that
256  /// occurred during preprocessing.
257  ///
258  /// This is an optional side structure that can be enabled with
259  /// \c createPreprocessingRecord() prior to preprocessing.
260  PreprocessingRecord *Record;
261
262private:  // Cached tokens state.
263  typedef SmallVector<Token, 1> CachedTokensTy;
264
265  /// CachedTokens - Cached tokens are stored here when we do backtracking or
266  /// lookahead. They are "lexed" by the CachingLex() method.
267  CachedTokensTy CachedTokens;
268
269  /// CachedLexPos - The position of the cached token that CachingLex() should
270  /// "lex" next. If it points beyond the CachedTokens vector, it means that
271  /// a normal Lex() should be invoked.
272  CachedTokensTy::size_type CachedLexPos;
273
274  /// BacktrackPositions - Stack of backtrack positions, allowing nested
275  /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
276  /// indicate where CachedLexPos should be set when the BackTrack() method is
277  /// invoked (at which point the last position is popped).
278  std::vector<CachedTokensTy::size_type> BacktrackPositions;
279
280  struct MacroInfoChain {
281    MacroInfo MI;
282    MacroInfoChain *Next;
283    MacroInfoChain *Prev;
284  };
285
286  /// MacroInfos are managed as a chain for easy disposal.  This is the head
287  /// of that list.
288  MacroInfoChain *MIChainHead;
289
290  /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
291  /// allocation.
292  MacroInfoChain *MICache;
293
294  MacroInfo *getInfoForMacro(IdentifierInfo *II) const;
295
296public:
297  Preprocessor(Diagnostic &diags, LangOptions &opts,
298               const TargetInfo &target,
299               SourceManager &SM, HeaderSearch &Headers,
300               ModuleLoader &TheModuleLoader,
301               IdentifierInfoLookup *IILookup = 0,
302               bool OwnsHeaderSearch = false);
303
304  ~Preprocessor();
305
306  Diagnostic &getDiagnostics() const { return *Diags; }
307  void setDiagnostics(Diagnostic &D) { Diags = &D; }
308
309  const LangOptions &getLangOptions() const { return Features; }
310  const TargetInfo &getTargetInfo() const { return Target; }
311  FileManager &getFileManager() const { return FileMgr; }
312  SourceManager &getSourceManager() const { return SourceMgr; }
313  HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
314
315  IdentifierTable &getIdentifierTable() { return Identifiers; }
316  SelectorTable &getSelectorTable() { return Selectors; }
317  Builtin::Context &getBuiltinInfo() { return BuiltinInfo; }
318  llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
319
320  void setPTHManager(PTHManager* pm);
321
322  PTHManager *getPTHManager() { return PTH.get(); }
323
324  void setExternalSource(ExternalPreprocessorSource *Source) {
325    ExternalSource = Source;
326  }
327
328  ExternalPreprocessorSource *getExternalSource() const {
329    return ExternalSource;
330  }
331
332  /// \brief Retrieve the module loader associated with this preprocessor.
333  ModuleLoader &getModuleLoader() const { return TheModuleLoader; }
334
335  /// SetCommentRetentionState - Control whether or not the preprocessor retains
336  /// comments in output.
337  void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
338    this->KeepComments = KeepComments | KeepMacroComments;
339    this->KeepMacroComments = KeepMacroComments;
340  }
341
342  bool getCommentRetentionState() const { return KeepComments; }
343
344  void SetSuppressIncludeNotFoundError(bool Suppress) {
345    SuppressIncludeNotFoundError = Suppress;
346  }
347
348  bool GetSuppressIncludeNotFoundError() {
349    return SuppressIncludeNotFoundError;
350  }
351
352  /// isCurrentLexer - Return true if we are lexing directly from the specified
353  /// lexer.
354  bool isCurrentLexer(const PreprocessorLexer *L) const {
355    return CurPPLexer == L;
356  }
357
358  /// getCurrentLexer - Return the current lexer being lexed from.  Note
359  /// that this ignores any potentially active macro expansions and _Pragma
360  /// expansions going on at the time.
361  PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; }
362
363  /// getCurrentFileLexer - Return the current file lexer being lexed from.
364  /// Note that this ignores any potentially active macro expansions and _Pragma
365  /// expansions going on at the time.
366  PreprocessorLexer *getCurrentFileLexer() const;
367
368  /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks.
369  /// Note that this class takes ownership of any PPCallbacks object given to
370  /// it.
371  PPCallbacks *getPPCallbacks() const { return Callbacks; }
372  void addPPCallbacks(PPCallbacks *C) {
373    if (Callbacks)
374      C = new PPChainedCallbacks(C, Callbacks);
375    Callbacks = C;
376  }
377
378  /// getMacroInfo - Given an identifier, return the MacroInfo it is #defined to
379  /// or null if it isn't #define'd.
380  MacroInfo *getMacroInfo(IdentifierInfo *II) const {
381    if (!II->hasMacroDefinition())
382      return 0;
383
384    return getInfoForMacro(II);
385  }
386
387  /// setMacroInfo - Specify a macro for this identifier.
388  ///
389  void setMacroInfo(IdentifierInfo *II, MacroInfo *MI);
390
391  /// macro_iterator/macro_begin/macro_end - This allows you to walk the current
392  /// state of the macro table.  This visits every currently-defined macro.
393  typedef llvm::DenseMap<IdentifierInfo*,
394                         MacroInfo*>::const_iterator macro_iterator;
395  macro_iterator macro_begin(bool IncludeExternalMacros = true) const;
396  macro_iterator macro_end(bool IncludeExternalMacros = true) const;
397
398  const std::string &getPredefines() const { return Predefines; }
399  /// setPredefines - Set the predefines for this Preprocessor.  These
400  /// predefines are automatically injected when parsing the main file.
401  void setPredefines(const char *P) { Predefines = P; }
402  void setPredefines(const std::string &P) { Predefines = P; }
403
404  /// getIdentifierInfo - Return information about the specified preprocessor
405  /// identifier token.  The version of this method that takes two character
406  /// pointers is preferred unless the identifier is already available as a
407  /// string (this avoids allocation and copying of memory to construct an
408  /// std::string).
409  IdentifierInfo *getIdentifierInfo(StringRef Name) const {
410    return &Identifiers.get(Name);
411  }
412
413  /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
414  /// If 'Namespace' is non-null, then it is a token required to exist on the
415  /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
416  void AddPragmaHandler(StringRef Namespace, PragmaHandler *Handler);
417  void AddPragmaHandler(PragmaHandler *Handler) {
418    AddPragmaHandler(StringRef(), Handler);
419  }
420
421  /// RemovePragmaHandler - Remove the specific pragma handler from
422  /// the preprocessor. If \arg Namespace is non-null, then it should
423  /// be the namespace that \arg Handler was added to. It is an error
424  /// to remove a handler that has not been registered.
425  void RemovePragmaHandler(StringRef Namespace, PragmaHandler *Handler);
426  void RemovePragmaHandler(PragmaHandler *Handler) {
427    RemovePragmaHandler(StringRef(), Handler);
428  }
429
430  /// \brief Add the specified comment handler to the preprocessor.
431  void AddCommentHandler(CommentHandler *Handler);
432
433  /// \brief Remove the specified comment handler.
434  ///
435  /// It is an error to remove a handler that has not been registered.
436  void RemoveCommentHandler(CommentHandler *Handler);
437
438  /// \brief Set the code completion handler to the given object.
439  void setCodeCompletionHandler(CodeCompletionHandler &Handler) {
440    CodeComplete = &Handler;
441  }
442
443  /// \brief Retrieve the current code-completion handler.
444  CodeCompletionHandler *getCodeCompletionHandler() const {
445    return CodeComplete;
446  }
447
448  /// \brief Clear out the code completion handler.
449  void clearCodeCompletionHandler() {
450    CodeComplete = 0;
451  }
452
453  /// \brief Hook used by the lexer to invoke the "natural language" code
454  /// completion point.
455  void CodeCompleteNaturalLanguage();
456
457  /// \brief Retrieve the preprocessing record, or NULL if there is no
458  /// preprocessing record.
459  PreprocessingRecord *getPreprocessingRecord() const { return Record; }
460
461  /// \brief Create a new preprocessing record, which will keep track of
462  /// all macro expansions, macro definitions, etc.
463  void createPreprocessingRecord(bool IncludeNestedMacroExpansions);
464
465  /// EnterMainSourceFile - Enter the specified FileID as the main source file,
466  /// which implicitly adds the builtin defines etc.
467  void EnterMainSourceFile();
468
469  /// EndSourceFile - Inform the preprocessor callbacks that processing is
470  /// complete.
471  void EndSourceFile();
472
473  /// EnterSourceFile - Add a source file to the top of the include stack and
474  /// start lexing tokens from it instead of the current buffer.  Emit an error
475  /// and don't enter the file on error.
476  void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir,
477                       SourceLocation Loc);
478
479  /// EnterMacro - Add a Macro to the top of the include stack and start lexing
480  /// tokens from it instead of the current buffer.  Args specifies the
481  /// tokens input to a function-like macro.
482  ///
483  /// ILEnd specifies the location of the ')' for a function-like macro or the
484  /// identifier for an object-like macro.
485  void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroArgs *Args);
486
487  /// EnterTokenStream - Add a "macro" context to the top of the include stack,
488  /// which will cause the lexer to start returning the specified tokens.
489  ///
490  /// If DisableMacroExpansion is true, tokens lexed from the token stream will
491  /// not be subject to further macro expansion.  Otherwise, these tokens will
492  /// be re-macro-expanded when/if expansion is enabled.
493  ///
494  /// If OwnsTokens is false, this method assumes that the specified stream of
495  /// tokens has a permanent owner somewhere, so they do not need to be copied.
496  /// If it is true, it assumes the array of tokens is allocated with new[] and
497  /// must be freed.
498  ///
499  void EnterTokenStream(const Token *Toks, unsigned NumToks,
500                        bool DisableMacroExpansion, bool OwnsTokens);
501
502  /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
503  /// lexer stack.  This should only be used in situations where the current
504  /// state of the top-of-stack lexer is known.
505  void RemoveTopOfLexerStack();
506
507  /// EnableBacktrackAtThisPos - From the point that this method is called, and
508  /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
509  /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
510  /// make the Preprocessor re-lex the same tokens.
511  ///
512  /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
513  /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
514  /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
515  ///
516  /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
517  /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
518  /// tokens will continue indefinitely.
519  ///
520  void EnableBacktrackAtThisPos();
521
522  /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
523  void CommitBacktrackedTokens();
524
525  /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
526  /// EnableBacktrackAtThisPos() was previously called.
527  void Backtrack();
528
529  /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
530  /// caching of tokens is on.
531  bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
532
533  /// Lex - To lex a token from the preprocessor, just pull a token from the
534  /// current lexer or macro object.
535  void Lex(Token &Result) {
536    ++LexDepth;
537    if (CurLexer)
538      CurLexer->Lex(Result);
539    else if (CurPTHLexer)
540      CurPTHLexer->Lex(Result);
541    else if (CurTokenLexer)
542      CurTokenLexer->Lex(Result);
543    else
544      CachingLex(Result);
545    --LexDepth;
546
547    // If we have the __import_module__ keyword, handle the module import now.
548    if (Result.getKind() == tok::kw___import_module__ && LexDepth == 0)
549      HandleModuleImport(Result);
550  }
551
552  /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
553  /// something not a comment.  This is useful in -E -C mode where comments
554  /// would foul up preprocessor directive handling.
555  void LexNonComment(Token &Result) {
556    do
557      Lex(Result);
558    while (Result.getKind() == tok::comment);
559  }
560
561  /// LexUnexpandedToken - This is just like Lex, but this disables macro
562  /// expansion of identifier tokens.
563  void LexUnexpandedToken(Token &Result) {
564    // Disable macro expansion.
565    bool OldVal = DisableMacroExpansion;
566    DisableMacroExpansion = true;
567    // Lex the token.
568    Lex(Result);
569
570    // Reenable it.
571    DisableMacroExpansion = OldVal;
572  }
573
574  /// LexUnexpandedNonComment - Like LexNonComment, but this disables macro
575  /// expansion of identifier tokens.
576  void LexUnexpandedNonComment(Token &Result) {
577    do
578      LexUnexpandedToken(Result);
579    while (Result.getKind() == tok::comment);
580  }
581
582  /// LookAhead - This peeks ahead N tokens and returns that token without
583  /// consuming any tokens.  LookAhead(0) returns the next token that would be
584  /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
585  /// returns normal tokens after phase 5.  As such, it is equivalent to using
586  /// 'Lex', not 'LexUnexpandedToken'.
587  const Token &LookAhead(unsigned N) {
588    if (CachedLexPos + N < CachedTokens.size())
589      return CachedTokens[CachedLexPos+N];
590    else
591      return PeekAhead(N+1);
592  }
593
594  /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
595  /// this allows to revert a specific number of tokens.
596  /// Note that the number of tokens being reverted should be up to the last
597  /// backtrack position, not more.
598  void RevertCachedTokens(unsigned N) {
599    assert(isBacktrackEnabled() &&
600           "Should only be called when tokens are cached for backtracking");
601    assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
602         && "Should revert tokens up to the last backtrack position, not more");
603    assert(signed(CachedLexPos) - signed(N) >= 0 &&
604           "Corrupted backtrack positions ?");
605    CachedLexPos -= N;
606  }
607
608  /// EnterToken - Enters a token in the token stream to be lexed next. If
609  /// BackTrack() is called afterwards, the token will remain at the insertion
610  /// point.
611  void EnterToken(const Token &Tok) {
612    EnterCachingLexMode();
613    CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
614  }
615
616  /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
617  /// tokens (because backtrack is enabled) it should replace the most recent
618  /// cached tokens with the given annotation token. This function has no effect
619  /// if backtracking is not enabled.
620  ///
621  /// Note that the use of this function is just for optimization; so that the
622  /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
623  /// invoked.
624  void AnnotateCachedTokens(const Token &Tok) {
625    assert(Tok.isAnnotation() && "Expected annotation token");
626    if (CachedLexPos != 0 && isBacktrackEnabled())
627      AnnotatePreviousCachedTokens(Tok);
628  }
629
630  /// \brief Replace the last token with an annotation token.
631  ///
632  /// Like AnnotateCachedTokens(), this routine replaces an
633  /// already-parsed (and resolved) token with an annotation
634  /// token. However, this routine only replaces the last token with
635  /// the annotation token; it does not affect any other cached
636  /// tokens. This function has no effect if backtracking is not
637  /// enabled.
638  void ReplaceLastTokenWithAnnotation(const Token &Tok) {
639    assert(Tok.isAnnotation() && "Expected annotation token");
640    if (CachedLexPos != 0 && isBacktrackEnabled())
641      CachedTokens[CachedLexPos-1] = Tok;
642  }
643
644  /// \brief Specify the point at which code-completion will be performed.
645  ///
646  /// \param File the file in which code completion should occur. If
647  /// this file is included multiple times, code-completion will
648  /// perform completion the first time it is included. If NULL, this
649  /// function clears out the code-completion point.
650  ///
651  /// \param Line the line at which code completion should occur
652  /// (1-based).
653  ///
654  /// \param Column the column at which code completion should occur
655  /// (1-based).
656  ///
657  /// \returns true if an error occurred, false otherwise.
658  bool SetCodeCompletionPoint(const FileEntry *File,
659                              unsigned Line, unsigned Column);
660
661  /// \brief Determine if this source location refers into the file
662  /// for which we are performing code completion.
663  bool isCodeCompletionFile(SourceLocation FileLoc) const;
664
665  /// \brief Determine if we are performing code completion.
666  bool isCodeCompletionEnabled() const { return CodeCompletionFile != 0; }
667
668  /// \brief Instruct the preprocessor to skip part of the main
669  /// the main source file.
670  ///
671  /// \brief Bytes The number of bytes in the preamble to skip.
672  ///
673  /// \brief StartOfLine Whether skipping these bytes puts the lexer at the
674  /// start of a line.
675  void setSkipMainFilePreamble(unsigned Bytes, bool StartOfLine) {
676    SkipMainFilePreamble.first = Bytes;
677    SkipMainFilePreamble.second = StartOfLine;
678  }
679
680  /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
681  /// the specified Token's location, translating the token's start
682  /// position in the current buffer into a SourcePosition object for rendering.
683  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) {
684    return Diags->Report(Loc, DiagID);
685  }
686
687  DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) {
688    return Diags->Report(Tok.getLocation(), DiagID);
689  }
690
691  /// getSpelling() - Return the 'spelling' of the token at the given
692  /// location; does not go up to the spelling location or down to the
693  /// expansion location.
694  ///
695  /// \param buffer A buffer which will be used only if the token requires
696  ///   "cleaning", e.g. if it contains trigraphs or escaped newlines
697  /// \param invalid If non-null, will be set \c true if an error occurs.
698  StringRef getSpelling(SourceLocation loc,
699                              SmallVectorImpl<char> &buffer,
700                              bool *invalid = 0) const {
701    return Lexer::getSpelling(loc, buffer, SourceMgr, Features, invalid);
702  }
703
704  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
705  /// token is the characters used to represent the token in the source file
706  /// after trigraph expansion and escaped-newline folding.  In particular, this
707  /// wants to get the true, uncanonicalized, spelling of things like digraphs
708  /// UCNs, etc.
709  ///
710  /// \param Invalid If non-null, will be set \c true if an error occurs.
711  std::string getSpelling(const Token &Tok, bool *Invalid = 0) const {
712    return Lexer::getSpelling(Tok, SourceMgr, Features, Invalid);
713  }
714
715  /// getSpelling - This method is used to get the spelling of a token into a
716  /// preallocated buffer, instead of as an std::string.  The caller is required
717  /// to allocate enough space for the token, which is guaranteed to be at least
718  /// Tok.getLength() bytes long.  The length of the actual result is returned.
719  ///
720  /// Note that this method may do two possible things: it may either fill in
721  /// the buffer specified with characters, or it may *change the input pointer*
722  /// to point to a constant buffer with the data already in it (avoiding a
723  /// copy).  The caller is not allowed to modify the returned buffer pointer
724  /// if an internal buffer is returned.
725  unsigned getSpelling(const Token &Tok, const char *&Buffer,
726                       bool *Invalid = 0) const {
727    return Lexer::getSpelling(Tok, Buffer, SourceMgr, Features, Invalid);
728  }
729
730  /// getSpelling - This method is used to get the spelling of a token into a
731  /// SmallVector. Note that the returned StringRef may not point to the
732  /// supplied buffer if a copy can be avoided.
733  StringRef getSpelling(const Token &Tok,
734                              SmallVectorImpl<char> &Buffer,
735                              bool *Invalid = 0) const;
736
737  /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
738  /// with length 1, return the character.
739  char getSpellingOfSingleCharacterNumericConstant(const Token &Tok,
740                                                   bool *Invalid = 0) const {
741    assert(Tok.is(tok::numeric_constant) &&
742           Tok.getLength() == 1 && "Called on unsupported token");
743    assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
744
745    // If the token is carrying a literal data pointer, just use it.
746    if (const char *D = Tok.getLiteralData())
747      return *D;
748
749    // Otherwise, fall back on getCharacterData, which is slower, but always
750    // works.
751    return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid);
752  }
753
754  /// CreateString - Plop the specified string into a scratch buffer and set the
755  /// specified token's location and length to it.  If specified, the source
756  /// location provides a location of the expansion point of the token.
757  void CreateString(const char *Buf, unsigned Len,
758                    Token &Tok, SourceLocation SourceLoc = SourceLocation());
759
760  /// \brief Computes the source location just past the end of the
761  /// token at this source location.
762  ///
763  /// This routine can be used to produce a source location that
764  /// points just past the end of the token referenced by \p Loc, and
765  /// is generally used when a diagnostic needs to point just after a
766  /// token where it expected something different that it received. If
767  /// the returned source location would not be meaningful (e.g., if
768  /// it points into a macro), this routine returns an invalid
769  /// source location.
770  ///
771  /// \param Offset an offset from the end of the token, where the source
772  /// location should refer to. The default offset (0) produces a source
773  /// location pointing just past the end of the token; an offset of 1 produces
774  /// a source location pointing to the last character in the token, etc.
775  SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0) {
776    return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, Features);
777  }
778
779  /// \brief Returns true if the given MacroID location points at the first
780  /// token of the macro expansion.
781  bool isAtStartOfMacroExpansion(SourceLocation loc) const {
782    return Lexer::isAtStartOfMacroExpansion(loc, SourceMgr, Features);
783  }
784
785  /// \brief Returns true if the given MacroID location points at the last
786  /// token of the macro expansion.
787  bool isAtEndOfMacroExpansion(SourceLocation loc) const {
788    return Lexer::isAtEndOfMacroExpansion(loc, SourceMgr, Features);
789  }
790
791  /// DumpToken - Print the token to stderr, used for debugging.
792  ///
793  void DumpToken(const Token &Tok, bool DumpFlags = false) const;
794  void DumpLocation(SourceLocation Loc) const;
795  void DumpMacro(const MacroInfo &MI) const;
796
797  /// AdvanceToTokenCharacter - Given a location that specifies the start of a
798  /// token, return a new location that specifies a character within the token.
799  SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,
800                                         unsigned Char) const {
801    return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, Features);
802  }
803
804  /// IncrementPasteCounter - Increment the counters for the number of token
805  /// paste operations performed.  If fast was specified, this is a 'fast paste'
806  /// case we handled.
807  ///
808  void IncrementPasteCounter(bool isFast) {
809    if (isFast)
810      ++NumFastTokenPaste;
811    else
812      ++NumTokenPaste;
813  }
814
815  void PrintStats();
816
817  size_t getTotalMemory() const;
818
819  /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
820  /// comment (/##/) in microsoft mode, this method handles updating the current
821  /// state, returning the token on the next source line.
822  void HandleMicrosoftCommentPaste(Token &Tok);
823
824  //===--------------------------------------------------------------------===//
825  // Preprocessor callback methods.  These are invoked by a lexer as various
826  // directives and events are found.
827
828  /// LookUpIdentifierInfo - Given a tok::raw_identifier token, look up the
829  /// identifier information for the token and install it into the token,
830  /// updating the token kind accordingly.
831  IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const;
832
833private:
834  llvm::DenseMap<IdentifierInfo*,unsigned> PoisonReasons;
835
836public:
837
838  // SetPoisonReason - Call this function to indicate the reason for
839  // poisoning an identifier. If that identifier is accessed while
840  // poisoned, then this reason will be used instead of the default
841  // "poisoned" diagnostic.
842  void SetPoisonReason(IdentifierInfo *II, unsigned DiagID);
843
844  // HandlePoisonedIdentifier - Display reason for poisoned
845  // identifier.
846  void HandlePoisonedIdentifier(Token & Tok);
847
848  void MaybeHandlePoisonedIdentifier(Token & Identifier) {
849    if(IdentifierInfo * II = Identifier.getIdentifierInfo()) {
850      if(II->isPoisoned()) {
851        HandlePoisonedIdentifier(Identifier);
852      }
853    }
854  }
855
856private:
857  /// Identifiers used for SEH handling in Borland. These are only
858  /// allowed in particular circumstances
859  IdentifierInfo *Ident__exception_code, *Ident___exception_code, *Ident_GetExceptionCode; // __except block
860  IdentifierInfo *Ident__exception_info, *Ident___exception_info, *Ident_GetExceptionInfo; // __except filter expression
861  IdentifierInfo *Ident__abnormal_termination, *Ident___abnormal_termination, *Ident_AbnormalTermination; // __finally
862public:
863  void PoisonSEHIdentifiers(bool Poison = true); // Borland
864
865  /// HandleIdentifier - This callback is invoked when the lexer reads an
866  /// identifier and has filled in the tokens IdentifierInfo member.  This
867  /// callback potentially macro expands it or turns it into a named token (like
868  /// 'for').
869  void HandleIdentifier(Token &Identifier);
870
871
872  /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
873  /// the current file.  This either returns the EOF token and returns true, or
874  /// pops a level off the include stack and returns false, at which point the
875  /// client should call lex again.
876  bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
877
878  /// HandleEndOfTokenLexer - This callback is invoked when the current
879  /// TokenLexer hits the end of its token stream.
880  bool HandleEndOfTokenLexer(Token &Result);
881
882  /// HandleDirective - This callback is invoked when the lexer sees a # token
883  /// at the start of a line.  This consumes the directive, modifies the
884  /// lexer/preprocessor state, and advances the lexer(s) so that the next token
885  /// read is the correct one.
886  void HandleDirective(Token &Result);
887
888  /// CheckEndOfDirective - Ensure that the next token is a tok::eod token.  If
889  /// not, emit a diagnostic and consume up until the eod.  If EnableMacros is
890  /// true, then we consider macros that expand to zero tokens as being ok.
891  void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
892
893  /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
894  /// current line until the tok::eod token is found.
895  void DiscardUntilEndOfDirective();
896
897  /// SawDateOrTime - This returns true if the preprocessor has seen a use of
898  /// __DATE__ or __TIME__ in the file so far.
899  bool SawDateOrTime() const {
900    return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
901  }
902  unsigned getCounterValue() const { return CounterValue; }
903  void setCounterValue(unsigned V) { CounterValue = V; }
904
905  /// AllocateMacroInfo - Allocate a new MacroInfo object with the provide
906  ///  SourceLocation.
907  MacroInfo *AllocateMacroInfo(SourceLocation L);
908
909  /// CloneMacroInfo - Allocate a new MacroInfo object which is clone of MI.
910  MacroInfo *CloneMacroInfo(const MacroInfo &MI);
911
912  /// GetIncludeFilenameSpelling - Turn the specified lexer token into a fully
913  /// checked and spelled filename, e.g. as an operand of #include. This returns
914  /// true if the input filename was in <>'s or false if it were in ""'s.  The
915  /// caller is expected to provide a buffer that is large enough to hold the
916  /// spelling of the filename, but is also expected to handle the case when
917  /// this method decides to use a different buffer.
918  bool GetIncludeFilenameSpelling(SourceLocation Loc,StringRef &Filename);
919
920  /// LookupFile - Given a "foo" or <foo> reference, look up the indicated file,
921  /// return null on failure.  isAngled indicates whether the file reference is
922  /// for system #include's or not (i.e. using <> instead of "").
923  const FileEntry *LookupFile(StringRef Filename,
924                              bool isAngled, const DirectoryLookup *FromDir,
925                              const DirectoryLookup *&CurDir,
926                              SmallVectorImpl<char> *SearchPath,
927                              SmallVectorImpl<char> *RelativePath);
928
929  /// GetCurLookup - The DirectoryLookup structure used to find the current
930  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
931  /// implement #include_next and find directory-specific properties.
932  const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; }
933
934  /// isInPrimaryFile - Return true if we're in the top-level file, not in a
935  /// #include.
936  bool isInPrimaryFile() const;
937
938  /// ConcatenateIncludeName - Handle cases where the #include name is expanded
939  /// from a macro as multiple tokens, which need to be glued together.  This
940  /// occurs for code like:
941  ///    #define FOO <a/b.h>
942  ///    #include FOO
943  /// because in this case, "<a/b.h>" is returned as 7 tokens, not one.
944  ///
945  /// This code concatenates and consumes tokens up to the '>' token.  It
946  /// returns false if the > was found, otherwise it returns true if it finds
947  /// and consumes the EOD marker.
948  bool ConcatenateIncludeName(llvm::SmallString<128> &FilenameBuffer,
949                              SourceLocation &End);
950
951  /// LexOnOffSwitch - Lex an on-off-switch (C99 6.10.6p2) and verify that it is
952  /// followed by EOD.  Return true if the token is not a valid on-off-switch.
953  bool LexOnOffSwitch(tok::OnOffSwitch &OOS);
954
955private:
956
957  void PushIncludeMacroStack() {
958    IncludeMacroStack.push_back(IncludeStackInfo(CurLexer.take(),
959                                                 CurPTHLexer.take(),
960                                                 CurPPLexer,
961                                                 CurTokenLexer.take(),
962                                                 CurDirLookup));
963    CurPPLexer = 0;
964  }
965
966  void PopIncludeMacroStack() {
967    CurLexer.reset(IncludeMacroStack.back().TheLexer);
968    CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer);
969    CurPPLexer = IncludeMacroStack.back().ThePPLexer;
970    CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer);
971    CurDirLookup  = IncludeMacroStack.back().TheDirLookup;
972    IncludeMacroStack.pop_back();
973  }
974
975  /// AllocateMacroInfo - Allocate a new MacroInfo object.
976  MacroInfo *AllocateMacroInfo();
977
978  /// ReleaseMacroInfo - Release the specified MacroInfo.  This memory will
979  ///  be reused for allocating new MacroInfo objects.
980  void ReleaseMacroInfo(MacroInfo* MI);
981
982  /// ReadMacroName - Lex and validate a macro name, which occurs after a
983  /// #define or #undef.  This emits a diagnostic, sets the token kind to eod,
984  /// and discards the rest of the macro line if the macro name is invalid.
985  void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
986
987  /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
988  /// definition has just been read.  Lex the rest of the arguments and the
989  /// closing ), updating MI with what we learn.  Return true if an error occurs
990  /// parsing the arg list.
991  bool ReadMacroDefinitionArgList(MacroInfo *MI);
992
993  /// SkipExcludedConditionalBlock - We just read a #if or related directive and
994  /// decided that the subsequent tokens are in the #if'd out portion of the
995  /// file.  Lex the rest of the file, until we see an #endif.  If
996  /// FoundNonSkipPortion is true, then we have already emitted code for part of
997  /// this #if directive, so #else/#elif blocks should never be entered. If
998  /// FoundElse is false, then #else directives are ok, if not, then we have
999  /// already seen one so a #else directive is a duplicate.  When this returns,
1000  /// the caller can lex the first valid token.
1001  void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
1002                                    bool FoundNonSkipPortion, bool FoundElse);
1003
1004  /// PTHSkipExcludedConditionalBlock - A fast PTH version of
1005  ///  SkipExcludedConditionalBlock.
1006  void PTHSkipExcludedConditionalBlock();
1007
1008  /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
1009  /// may occur after a #if or #elif directive and return it as a bool.  If the
1010  /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
1011  bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
1012
1013  /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
1014  /// #pragma GCC poison/system_header/dependency and #pragma once.
1015  void RegisterBuiltinPragmas();
1016
1017  /// RegisterBuiltinMacros - Register builtin macros, such as __LINE__ with the
1018  /// identifier table.
1019  void RegisterBuiltinMacros();
1020
1021  /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
1022  /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
1023  /// the macro should not be expanded return true, otherwise return false.
1024  bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI);
1025
1026  /// \brief Handle a module import directive.
1027  void HandleModuleImport(Token &Import);
1028
1029  /// \brief Cache macro expanded tokens for TokenLexers.
1030  //
1031  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
1032  /// going to lex in the cache and when it finishes the tokens are removed
1033  /// from the end of the cache.
1034  Token *cacheMacroExpandedTokens(TokenLexer *tokLexer,
1035                                  ArrayRef<Token> tokens);
1036  void removeCachedMacroExpandedTokensOfLastLexer();
1037  friend void TokenLexer::ExpandFunctionArguments();
1038
1039  /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
1040  /// lexed is a '('.  If so, consume the token and return true, if not, this
1041  /// method should have no observable side-effect on the lexed tokens.
1042  bool isNextPPTokenLParen();
1043
1044  /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
1045  /// invoked to read all of the formal arguments specified for the macro
1046  /// invocation.  This returns null on error.
1047  MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
1048                                       SourceLocation &ExpansionEnd);
1049
1050  /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
1051  /// as a builtin macro, handle it and return the next token as 'Tok'.
1052  void ExpandBuiltinMacro(Token &Tok);
1053
1054  /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
1055  /// return the first token after the directive.  The _Pragma token has just
1056  /// been read into 'Tok'.
1057  void Handle_Pragma(Token &Tok);
1058
1059  /// HandleMicrosoft__pragma - Like Handle_Pragma except the pragma text
1060  /// is not enclosed within a string literal.
1061  void HandleMicrosoft__pragma(Token &Tok);
1062
1063  /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
1064  /// start lexing tokens from it instead of the current buffer.
1065  void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
1066
1067  /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
1068  /// start getting tokens from it using the PTH cache.
1069  void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
1070
1071  /// IsFileLexer - Returns true if we are lexing from a file and not a
1072  ///  pragma or a macro.
1073  static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
1074    return L ? !L->isPragmaLexer() : P != 0;
1075  }
1076
1077  static bool IsFileLexer(const IncludeStackInfo& I) {
1078    return IsFileLexer(I.TheLexer, I.ThePPLexer);
1079  }
1080
1081  bool IsFileLexer() const {
1082    return IsFileLexer(CurLexer.get(), CurPPLexer);
1083  }
1084
1085  //===--------------------------------------------------------------------===//
1086  // Caching stuff.
1087  void CachingLex(Token &Result);
1088  bool InCachingLexMode() const {
1089    // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means
1090    // that we are past EOF, not that we are in CachingLex mode.
1091    return CurPPLexer == 0 && CurTokenLexer == 0 && CurPTHLexer == 0 &&
1092           !IncludeMacroStack.empty();
1093  }
1094  void EnterCachingLexMode();
1095  void ExitCachingLexMode() {
1096    if (InCachingLexMode())
1097      RemoveTopOfLexerStack();
1098  }
1099  const Token &PeekAhead(unsigned N);
1100  void AnnotatePreviousCachedTokens(const Token &Tok);
1101
1102  //===--------------------------------------------------------------------===//
1103  /// Handle*Directive - implement the various preprocessor directives.  These
1104  /// should side-effect the current preprocessor object so that the next call
1105  /// to Lex() will return the appropriate token next.
1106  void HandleLineDirective(Token &Tok);
1107  void HandleDigitDirective(Token &Tok);
1108  void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
1109  void HandleIdentSCCSDirective(Token &Tok);
1110  void HandleMacroExportDirective(Token &Tok);
1111
1112  // File inclusion.
1113  void HandleIncludeDirective(SourceLocation HashLoc,
1114                              Token &Tok,
1115                              const DirectoryLookup *LookupFrom = 0,
1116                              bool isImport = false);
1117  void HandleIncludeNextDirective(SourceLocation HashLoc, Token &Tok);
1118  void HandleIncludeMacrosDirective(SourceLocation HashLoc, Token &Tok);
1119  void HandleImportDirective(SourceLocation HashLoc, Token &Tok);
1120
1121  // Macro handling.
1122  void HandleDefineDirective(Token &Tok);
1123  void HandleUndefDirective(Token &Tok);
1124
1125  // Conditional Inclusion.
1126  void HandleIfdefDirective(Token &Tok, bool isIfndef,
1127                            bool ReadAnyTokensBeforeDirective);
1128  void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
1129  void HandleEndifDirective(Token &Tok);
1130  void HandleElseDirective(Token &Tok);
1131  void HandleElifDirective(Token &Tok);
1132
1133  // Pragmas.
1134  void HandlePragmaDirective(unsigned Introducer);
1135public:
1136  void HandlePragmaOnce(Token &OnceTok);
1137  void HandlePragmaMark();
1138  void HandlePragmaPoison(Token &PoisonTok);
1139  void HandlePragmaSystemHeader(Token &SysHeaderTok);
1140  void HandlePragmaDependency(Token &DependencyTok);
1141  void HandlePragmaComment(Token &CommentTok);
1142  void HandlePragmaMessage(Token &MessageTok);
1143  void HandlePragmaPushMacro(Token &Tok);
1144  void HandlePragmaPopMacro(Token &Tok);
1145  IdentifierInfo *ParsePragmaPushOrPopMacro(Token &Tok);
1146
1147  // Return true and store the first token only if any CommentHandler
1148  // has inserted some tokens and getCommentRetentionState() is false.
1149  bool HandleComment(Token &Token, SourceRange Comment);
1150
1151  /// \brief A macro is used, update information about macros that need unused
1152  /// warnings.
1153  void markMacroAsUsed(MacroInfo *MI);
1154};
1155
1156/// \brief Abstract base class that describes a handler that will receive
1157/// source ranges for each of the comments encountered in the source file.
1158class CommentHandler {
1159public:
1160  virtual ~CommentHandler();
1161
1162  // The handler shall return true if it has pushed any tokens
1163  // to be read using e.g. EnterToken or EnterTokenStream.
1164  virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0;
1165};
1166
1167}  // end namespace clang
1168
1169#endif
1170