Preprocessor.h revision a08529cc3f00e0b47a3c028823634129ac46847b
1//===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines the Preprocessor interface.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15#define LLVM_CLANG_LEX_PREPROCESSOR_H
16
17#include "clang/Lex/MacroInfo.h"
18#include "clang/Lex/Lexer.h"
19#include "clang/Lex/PTHLexer.h"
20#include "clang/Lex/PPCallbacks.h"
21#include "clang/Lex/TokenLexer.h"
22#include "clang/Lex/PTHManager.h"
23#include "clang/Basic/Builtins.h"
24#include "clang/Basic/Diagnostic.h"
25#include "clang/Basic/IdentifierTable.h"
26#include "clang/Basic/SourceLocation.h"
27#include "llvm/ADT/DenseMap.h"
28#include "llvm/ADT/IntrusiveRefCntPtr.h"
29#include "llvm/ADT/SmallPtrSet.h"
30#include "llvm/ADT/OwningPtr.h"
31#include "llvm/ADT/SmallVector.h"
32#include "llvm/ADT/ArrayRef.h"
33#include "llvm/Support/Allocator.h"
34#include <vector>
35
36namespace clang {
37
38class SourceManager;
39class ExternalPreprocessorSource;
40class FileManager;
41class FileEntry;
42class HeaderSearch;
43class PragmaNamespace;
44class PragmaHandler;
45class CommentHandler;
46class ScratchBuffer;
47class TargetInfo;
48class PPCallbacks;
49class CodeCompletionHandler;
50class DirectoryLookup;
51class PreprocessingRecord;
52class ModuleLoader;
53
54/// Preprocessor - This object engages in a tight little dance with the lexer to
55/// efficiently preprocess tokens.  Lexers know only about tokens within a
56/// single source file, and don't know anything about preprocessor-level issues
57/// like the #include stack, token expansion, etc.
58///
59class Preprocessor : public llvm::RefCountedBase<Preprocessor> {
60  DiagnosticsEngine        *Diags;
61  LangOptions       &Features;
62  const TargetInfo  *Target;
63  FileManager       &FileMgr;
64  SourceManager     &SourceMgr;
65  ScratchBuffer     *ScratchBuf;
66  HeaderSearch      &HeaderInfo;
67  ModuleLoader      &TheModuleLoader;
68
69  /// \brief External source of macros.
70  ExternalPreprocessorSource *ExternalSource;
71
72
73  /// PTH - An optional PTHManager object used for getting tokens from
74  ///  a token cache rather than lexing the original source file.
75  llvm::OwningPtr<PTHManager> PTH;
76
77  /// BP - A BumpPtrAllocator object used to quickly allocate and release
78  ///  objects internal to the Preprocessor.
79  llvm::BumpPtrAllocator BP;
80
81  /// Identifiers for builtin macros and other builtins.
82  IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
83  IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
84  IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
85  IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
86  IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
87  IdentifierInfo *Ident__COUNTER__;                // __COUNTER__
88  IdentifierInfo *Ident_Pragma, *Ident__pragma;    // _Pragma, __pragma
89  IdentifierInfo *Ident__VA_ARGS__;                // __VA_ARGS__
90  IdentifierInfo *Ident__has_feature;              // __has_feature
91  IdentifierInfo *Ident__has_extension;            // __has_extension
92  IdentifierInfo *Ident__has_builtin;              // __has_builtin
93  IdentifierInfo *Ident__has_attribute;            // __has_attribute
94  IdentifierInfo *Ident__has_include;              // __has_include
95  IdentifierInfo *Ident__has_include_next;         // __has_include_next
96
97  SourceLocation DATELoc, TIMELoc;
98  unsigned CounterValue;  // Next __COUNTER__ value.
99
100  enum {
101    /// MaxIncludeStackDepth - Maximum depth of #includes.
102    MaxAllowedIncludeStackDepth = 200
103  };
104
105  // State that is set before the preprocessor begins.
106  bool KeepComments : 1;
107  bool KeepMacroComments : 1;
108  bool SuppressIncludeNotFoundError : 1;
109  bool AutoModuleImport : 1;
110
111  // State that changes while the preprocessor runs:
112  bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
113
114  /// Whether the preprocessor owns the header search object.
115  bool OwnsHeaderSearch : 1;
116
117  /// DisableMacroExpansion - True if macro expansion is disabled.
118  bool DisableMacroExpansion : 1;
119
120  /// \brief Whether we have already loaded macros from the external source.
121  mutable bool ReadMacrosFromExternalSource : 1;
122
123  /// Identifiers - This is mapping/lookup information for all identifiers in
124  /// the program, including program keywords.
125  mutable IdentifierTable Identifiers;
126
127  /// Selectors - This table contains all the selectors in the program. Unlike
128  /// IdentifierTable above, this table *isn't* populated by the preprocessor.
129  /// It is declared/expanded here because it's role/lifetime is
130  /// conceptually similar the IdentifierTable. In addition, the current control
131  /// flow (in clang::ParseAST()), make it convenient to put here.
132  /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
133  /// the lifetime of the preprocessor.
134  SelectorTable Selectors;
135
136  /// BuiltinInfo - Information about builtins.
137  Builtin::Context BuiltinInfo;
138
139  /// PragmaHandlers - This tracks all of the pragmas that the client registered
140  /// with this preprocessor.
141  PragmaNamespace *PragmaHandlers;
142
143  /// \brief Tracks all of the comment handlers that the client registered
144  /// with this preprocessor.
145  std::vector<CommentHandler *> CommentHandlers;
146
147  /// \brief The code-completion handler.
148  CodeCompletionHandler *CodeComplete;
149
150  /// \brief The file that we're performing code-completion for, if any.
151  const FileEntry *CodeCompletionFile;
152
153  /// \brief The offset in file for the code-completion point.
154  unsigned CodeCompletionOffset;
155
156  /// \brief The location for the code-completion point. This gets instantiated
157  /// when the CodeCompletionFile gets #include'ed for preprocessing.
158  SourceLocation CodeCompletionLoc;
159
160  /// \brief The start location for the file of the code-completion point.
161  /// This gets instantiated when the CodeCompletionFile gets #include'ed
162  /// for preprocessing.
163  SourceLocation CodeCompletionFileLoc;
164
165  /// \brief The source location of the __import_module__ keyword we just
166  /// lexed, if any.
167  SourceLocation ModuleImportLoc;
168
169  /// \brief The source location of the currently-active
170  /// #pragma clang arc_cf_code_audited begin.
171  SourceLocation PragmaARCCFCodeAuditedLoc;
172
173  /// \brief True if we hit the code-completion point.
174  bool CodeCompletionReached;
175
176  /// \brief The number of bytes that we will initially skip when entering the
177  /// main file, which is used when loading a precompiled preamble, along
178  /// with a flag that indicates whether skipping this number of bytes will
179  /// place the lexer at the start of a line.
180  std::pair<unsigned, bool> SkipMainFilePreamble;
181
182  /// CurLexer - This is the current top of the stack that we're lexing from if
183  /// not expanding a macro and we are lexing directly from source code.
184  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
185  llvm::OwningPtr<Lexer> CurLexer;
186
187  /// CurPTHLexer - This is the current top of stack that we're lexing from if
188  ///  not expanding from a macro and we are lexing from a PTH cache.
189  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
190  llvm::OwningPtr<PTHLexer> CurPTHLexer;
191
192  /// CurPPLexer - This is the current top of the stack what we're lexing from
193  ///  if not expanding a macro.  This is an alias for either CurLexer or
194  ///  CurPTHLexer.
195  PreprocessorLexer *CurPPLexer;
196
197  /// CurLookup - The DirectoryLookup structure used to find the current
198  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
199  /// implement #include_next and find directory-specific properties.
200  const DirectoryLookup *CurDirLookup;
201
202  /// CurTokenLexer - This is the current macro we are expanding, if we are
203  /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
204  llvm::OwningPtr<TokenLexer> CurTokenLexer;
205
206  /// \brief The kind of lexer we're currently working with.
207  enum CurLexerKind {
208    CLK_Lexer,
209    CLK_PTHLexer,
210    CLK_TokenLexer,
211    CLK_CachingLexer,
212    CLK_LexAfterModuleImport
213  } CurLexerKind;
214
215  /// IncludeMacroStack - This keeps track of the stack of files currently
216  /// #included, and macros currently being expanded from, not counting
217  /// CurLexer/CurTokenLexer.
218  struct IncludeStackInfo {
219    enum CurLexerKind     CurLexerKind;
220    Lexer                 *TheLexer;
221    PTHLexer              *ThePTHLexer;
222    PreprocessorLexer     *ThePPLexer;
223    TokenLexer            *TheTokenLexer;
224    const DirectoryLookup *TheDirLookup;
225
226    IncludeStackInfo(enum CurLexerKind K, Lexer *L, PTHLexer* P,
227                     PreprocessorLexer* PPL,
228                     TokenLexer* TL, const DirectoryLookup *D)
229      : CurLexerKind(K), TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL),
230        TheTokenLexer(TL), TheDirLookup(D) {}
231  };
232  std::vector<IncludeStackInfo> IncludeMacroStack;
233
234  /// Callbacks - These are actions invoked when some preprocessor activity is
235  /// encountered (e.g. a file is #included, etc).
236  PPCallbacks *Callbacks;
237
238  /// Macros - For each IdentifierInfo with 'HasMacro' set, we keep a mapping
239  /// to the actual definition of the macro.
240  llvm::DenseMap<IdentifierInfo*, MacroInfo*> Macros;
241
242  /// \brief Macros that we want to warn because they are not used at the end
243  /// of the translation unit; we store just their SourceLocations instead
244  /// something like MacroInfo*. The benefit of this is that when we are
245  /// deserializing from PCH, we don't need to deserialize identifier & macros
246  /// just so that we can report that they are unused, we just warn using
247  /// the SourceLocations of this set (that will be filled by the ASTReader).
248  /// We are using SmallPtrSet instead of a vector for faster removal.
249  typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy;
250  WarnUnusedMacroLocsTy WarnUnusedMacroLocs;
251
252  /// MacroArgCache - This is a "freelist" of MacroArg objects that can be
253  /// reused for quick allocation.
254  MacroArgs *MacroArgCache;
255  friend class MacroArgs;
256
257  /// PragmaPushMacroInfo - For each IdentifierInfo used in a #pragma
258  /// push_macro directive, we keep a MacroInfo stack used to restore
259  /// previous macro value.
260  llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo;
261
262  // Various statistics we track for performance analysis.
263  unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
264  unsigned NumIf, NumElse, NumEndif;
265  unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
266  unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
267  unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
268  unsigned NumSkipped;
269
270  /// Predefines - This string is the predefined macros that preprocessor
271  /// should use from the command line etc.
272  std::string Predefines;
273
274  /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
275  enum { TokenLexerCacheSize = 8 };
276  unsigned NumCachedTokenLexers;
277  TokenLexer *TokenLexerCache[TokenLexerCacheSize];
278
279  /// \brief Keeps macro expanded tokens for TokenLexers.
280  //
281  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
282  /// going to lex in the cache and when it finishes the tokens are removed
283  /// from the end of the cache.
284  SmallVector<Token, 16> MacroExpandedTokens;
285  std::vector<std::pair<TokenLexer *, size_t> > MacroExpandingLexersStack;
286
287  /// \brief A record of the macro definitions and expansions that
288  /// occurred during preprocessing.
289  ///
290  /// This is an optional side structure that can be enabled with
291  /// \c createPreprocessingRecord() prior to preprocessing.
292  PreprocessingRecord *Record;
293
294private:  // Cached tokens state.
295  typedef SmallVector<Token, 1> CachedTokensTy;
296
297  /// CachedTokens - Cached tokens are stored here when we do backtracking or
298  /// lookahead. They are "lexed" by the CachingLex() method.
299  CachedTokensTy CachedTokens;
300
301  /// CachedLexPos - The position of the cached token that CachingLex() should
302  /// "lex" next. If it points beyond the CachedTokens vector, it means that
303  /// a normal Lex() should be invoked.
304  CachedTokensTy::size_type CachedLexPos;
305
306  /// BacktrackPositions - Stack of backtrack positions, allowing nested
307  /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
308  /// indicate where CachedLexPos should be set when the BackTrack() method is
309  /// invoked (at which point the last position is popped).
310  std::vector<CachedTokensTy::size_type> BacktrackPositions;
311
312  struct MacroInfoChain {
313    MacroInfo MI;
314    MacroInfoChain *Next;
315    MacroInfoChain *Prev;
316  };
317
318  /// MacroInfos are managed as a chain for easy disposal.  This is the head
319  /// of that list.
320  MacroInfoChain *MIChainHead;
321
322  /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
323  /// allocation.
324  MacroInfoChain *MICache;
325
326  MacroInfo *getInfoForMacro(IdentifierInfo *II) const;
327
328public:
329  Preprocessor(DiagnosticsEngine &diags, LangOptions &opts,
330               const TargetInfo *target,
331               SourceManager &SM, HeaderSearch &Headers,
332               ModuleLoader &TheModuleLoader,
333               IdentifierInfoLookup *IILookup = 0,
334               bool OwnsHeaderSearch = false,
335               bool DelayInitialization = false);
336
337  ~Preprocessor();
338
339  /// \brief Initialize the preprocessor, if the constructor did not already
340  /// perform the initialization.
341  ///
342  /// \param Target Information about the target.
343  void Initialize(const TargetInfo &Target);
344
345  DiagnosticsEngine &getDiagnostics() const { return *Diags; }
346  void setDiagnostics(DiagnosticsEngine &D) { Diags = &D; }
347
348  const LangOptions &getLangOptions() const { return Features; }
349  const TargetInfo &getTargetInfo() const { return *Target; }
350  FileManager &getFileManager() const { return FileMgr; }
351  SourceManager &getSourceManager() const { return SourceMgr; }
352  HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
353
354  IdentifierTable &getIdentifierTable() { return Identifiers; }
355  SelectorTable &getSelectorTable() { return Selectors; }
356  Builtin::Context &getBuiltinInfo() { return BuiltinInfo; }
357  llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
358
359  void setPTHManager(PTHManager* pm);
360
361  PTHManager *getPTHManager() { return PTH.get(); }
362
363  void setExternalSource(ExternalPreprocessorSource *Source) {
364    ExternalSource = Source;
365  }
366
367  ExternalPreprocessorSource *getExternalSource() const {
368    return ExternalSource;
369  }
370
371  /// \brief Retrieve the module loader associated with this preprocessor.
372  ModuleLoader &getModuleLoader() const { return TheModuleLoader; }
373
374  /// SetCommentRetentionState - Control whether or not the preprocessor retains
375  /// comments in output.
376  void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
377    this->KeepComments = KeepComments | KeepMacroComments;
378    this->KeepMacroComments = KeepMacroComments;
379  }
380
381  bool getCommentRetentionState() const { return KeepComments; }
382
383  void SetSuppressIncludeNotFoundError(bool Suppress) {
384    SuppressIncludeNotFoundError = Suppress;
385  }
386
387  bool GetSuppressIncludeNotFoundError() {
388    return SuppressIncludeNotFoundError;
389  }
390
391  /// \brief Specify whether automatic module imports are enabled.
392  void setAutoModuleImport(bool AutoModuleImport = true) {
393    this->AutoModuleImport = AutoModuleImport;
394  }
395
396  /// isCurrentLexer - Return true if we are lexing directly from the specified
397  /// lexer.
398  bool isCurrentLexer(const PreprocessorLexer *L) const {
399    return CurPPLexer == L;
400  }
401
402  /// getCurrentLexer - Return the current lexer being lexed from.  Note
403  /// that this ignores any potentially active macro expansions and _Pragma
404  /// expansions going on at the time.
405  PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; }
406
407  /// getCurrentFileLexer - Return the current file lexer being lexed from.
408  /// Note that this ignores any potentially active macro expansions and _Pragma
409  /// expansions going on at the time.
410  PreprocessorLexer *getCurrentFileLexer() const;
411
412  /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks.
413  /// Note that this class takes ownership of any PPCallbacks object given to
414  /// it.
415  PPCallbacks *getPPCallbacks() const { return Callbacks; }
416  void addPPCallbacks(PPCallbacks *C) {
417    if (Callbacks)
418      C = new PPChainedCallbacks(C, Callbacks);
419    Callbacks = C;
420  }
421
422  /// getMacroInfo - Given an identifier, return the MacroInfo it is #defined to
423  /// or null if it isn't #define'd.
424  MacroInfo *getMacroInfo(IdentifierInfo *II) const {
425    if (!II->hasMacroDefinition())
426      return 0;
427
428    return getInfoForMacro(II);
429  }
430
431  /// setMacroInfo - Specify a macro for this identifier.
432  ///
433  void setMacroInfo(IdentifierInfo *II, MacroInfo *MI);
434
435  /// macro_iterator/macro_begin/macro_end - This allows you to walk the current
436  /// state of the macro table.  This visits every currently-defined macro.
437  typedef llvm::DenseMap<IdentifierInfo*,
438                         MacroInfo*>::const_iterator macro_iterator;
439  macro_iterator macro_begin(bool IncludeExternalMacros = true) const;
440  macro_iterator macro_end(bool IncludeExternalMacros = true) const;
441
442  const std::string &getPredefines() const { return Predefines; }
443  /// setPredefines - Set the predefines for this Preprocessor.  These
444  /// predefines are automatically injected when parsing the main file.
445  void setPredefines(const char *P) { Predefines = P; }
446  void setPredefines(const std::string &P) { Predefines = P; }
447
448  /// getIdentifierInfo - Return information about the specified preprocessor
449  /// identifier token.  The version of this method that takes two character
450  /// pointers is preferred unless the identifier is already available as a
451  /// string (this avoids allocation and copying of memory to construct an
452  /// std::string).
453  IdentifierInfo *getIdentifierInfo(StringRef Name) const {
454    return &Identifiers.get(Name);
455  }
456
457  /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
458  /// If 'Namespace' is non-null, then it is a token required to exist on the
459  /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
460  void AddPragmaHandler(StringRef Namespace, PragmaHandler *Handler);
461  void AddPragmaHandler(PragmaHandler *Handler) {
462    AddPragmaHandler(StringRef(), Handler);
463  }
464
465  /// RemovePragmaHandler - Remove the specific pragma handler from
466  /// the preprocessor. If \arg Namespace is non-null, then it should
467  /// be the namespace that \arg Handler was added to. It is an error
468  /// to remove a handler that has not been registered.
469  void RemovePragmaHandler(StringRef Namespace, PragmaHandler *Handler);
470  void RemovePragmaHandler(PragmaHandler *Handler) {
471    RemovePragmaHandler(StringRef(), Handler);
472  }
473
474  /// \brief Add the specified comment handler to the preprocessor.
475  void AddCommentHandler(CommentHandler *Handler);
476
477  /// \brief Remove the specified comment handler.
478  ///
479  /// It is an error to remove a handler that has not been registered.
480  void RemoveCommentHandler(CommentHandler *Handler);
481
482  /// \brief Set the code completion handler to the given object.
483  void setCodeCompletionHandler(CodeCompletionHandler &Handler) {
484    CodeComplete = &Handler;
485  }
486
487  /// \brief Retrieve the current code-completion handler.
488  CodeCompletionHandler *getCodeCompletionHandler() const {
489    return CodeComplete;
490  }
491
492  /// \brief Clear out the code completion handler.
493  void clearCodeCompletionHandler() {
494    CodeComplete = 0;
495  }
496
497  /// \brief Hook used by the lexer to invoke the "natural language" code
498  /// completion point.
499  void CodeCompleteNaturalLanguage();
500
501  /// \brief Retrieve the preprocessing record, or NULL if there is no
502  /// preprocessing record.
503  PreprocessingRecord *getPreprocessingRecord() const { return Record; }
504
505  /// \brief Create a new preprocessing record, which will keep track of
506  /// all macro expansions, macro definitions, etc.
507  void createPreprocessingRecord(bool IncludeNestedMacroExpansions);
508
509  /// EnterMainSourceFile - Enter the specified FileID as the main source file,
510  /// which implicitly adds the builtin defines etc.
511  void EnterMainSourceFile();
512
513  /// EndSourceFile - Inform the preprocessor callbacks that processing is
514  /// complete.
515  void EndSourceFile();
516
517  /// EnterSourceFile - Add a source file to the top of the include stack and
518  /// start lexing tokens from it instead of the current buffer.  Emit an error
519  /// and don't enter the file on error.
520  void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir,
521                       SourceLocation Loc);
522
523  /// EnterMacro - Add a Macro to the top of the include stack and start lexing
524  /// tokens from it instead of the current buffer.  Args specifies the
525  /// tokens input to a function-like macro.
526  ///
527  /// ILEnd specifies the location of the ')' for a function-like macro or the
528  /// identifier for an object-like macro.
529  void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroArgs *Args);
530
531  /// EnterTokenStream - Add a "macro" context to the top of the include stack,
532  /// which will cause the lexer to start returning the specified tokens.
533  ///
534  /// If DisableMacroExpansion is true, tokens lexed from the token stream will
535  /// not be subject to further macro expansion.  Otherwise, these tokens will
536  /// be re-macro-expanded when/if expansion is enabled.
537  ///
538  /// If OwnsTokens is false, this method assumes that the specified stream of
539  /// tokens has a permanent owner somewhere, so they do not need to be copied.
540  /// If it is true, it assumes the array of tokens is allocated with new[] and
541  /// must be freed.
542  ///
543  void EnterTokenStream(const Token *Toks, unsigned NumToks,
544                        bool DisableMacroExpansion, bool OwnsTokens);
545
546  /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
547  /// lexer stack.  This should only be used in situations where the current
548  /// state of the top-of-stack lexer is known.
549  void RemoveTopOfLexerStack();
550
551  /// EnableBacktrackAtThisPos - From the point that this method is called, and
552  /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
553  /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
554  /// make the Preprocessor re-lex the same tokens.
555  ///
556  /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
557  /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
558  /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
559  ///
560  /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
561  /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
562  /// tokens will continue indefinitely.
563  ///
564  void EnableBacktrackAtThisPos();
565
566  /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
567  void CommitBacktrackedTokens();
568
569  /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
570  /// EnableBacktrackAtThisPos() was previously called.
571  void Backtrack();
572
573  /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
574  /// caching of tokens is on.
575  bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
576
577  /// Lex - To lex a token from the preprocessor, just pull a token from the
578  /// current lexer or macro object.
579  void Lex(Token &Result) {
580    switch (CurLexerKind) {
581    case CLK_Lexer: CurLexer->Lex(Result); break;
582    case CLK_PTHLexer: CurPTHLexer->Lex(Result); break;
583    case CLK_TokenLexer: CurTokenLexer->Lex(Result); break;
584    case CLK_CachingLexer: CachingLex(Result); break;
585    case CLK_LexAfterModuleImport: LexAfterModuleImport(Result); break;
586    }
587  }
588
589  void LexAfterModuleImport(Token &Result);
590
591  /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
592  /// something not a comment.  This is useful in -E -C mode where comments
593  /// would foul up preprocessor directive handling.
594  void LexNonComment(Token &Result) {
595    do
596      Lex(Result);
597    while (Result.getKind() == tok::comment);
598  }
599
600  /// LexUnexpandedToken - This is just like Lex, but this disables macro
601  /// expansion of identifier tokens.
602  void LexUnexpandedToken(Token &Result) {
603    // Disable macro expansion.
604    bool OldVal = DisableMacroExpansion;
605    DisableMacroExpansion = true;
606    // Lex the token.
607    Lex(Result);
608
609    // Reenable it.
610    DisableMacroExpansion = OldVal;
611  }
612
613  /// LexUnexpandedNonComment - Like LexNonComment, but this disables macro
614  /// expansion of identifier tokens.
615  void LexUnexpandedNonComment(Token &Result) {
616    do
617      LexUnexpandedToken(Result);
618    while (Result.getKind() == tok::comment);
619  }
620
621  /// LookAhead - This peeks ahead N tokens and returns that token without
622  /// consuming any tokens.  LookAhead(0) returns the next token that would be
623  /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
624  /// returns normal tokens after phase 5.  As such, it is equivalent to using
625  /// 'Lex', not 'LexUnexpandedToken'.
626  const Token &LookAhead(unsigned N) {
627    if (CachedLexPos + N < CachedTokens.size())
628      return CachedTokens[CachedLexPos+N];
629    else
630      return PeekAhead(N+1);
631  }
632
633  /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
634  /// this allows to revert a specific number of tokens.
635  /// Note that the number of tokens being reverted should be up to the last
636  /// backtrack position, not more.
637  void RevertCachedTokens(unsigned N) {
638    assert(isBacktrackEnabled() &&
639           "Should only be called when tokens are cached for backtracking");
640    assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
641         && "Should revert tokens up to the last backtrack position, not more");
642    assert(signed(CachedLexPos) - signed(N) >= 0 &&
643           "Corrupted backtrack positions ?");
644    CachedLexPos -= N;
645  }
646
647  /// EnterToken - Enters a token in the token stream to be lexed next. If
648  /// BackTrack() is called afterwards, the token will remain at the insertion
649  /// point.
650  void EnterToken(const Token &Tok) {
651    EnterCachingLexMode();
652    CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
653  }
654
655  /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
656  /// tokens (because backtrack is enabled) it should replace the most recent
657  /// cached tokens with the given annotation token. This function has no effect
658  /// if backtracking is not enabled.
659  ///
660  /// Note that the use of this function is just for optimization; so that the
661  /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
662  /// invoked.
663  void AnnotateCachedTokens(const Token &Tok) {
664    assert(Tok.isAnnotation() && "Expected annotation token");
665    if (CachedLexPos != 0 && isBacktrackEnabled())
666      AnnotatePreviousCachedTokens(Tok);
667  }
668
669  /// \brief Replace the last token with an annotation token.
670  ///
671  /// Like AnnotateCachedTokens(), this routine replaces an
672  /// already-parsed (and resolved) token with an annotation
673  /// token. However, this routine only replaces the last token with
674  /// the annotation token; it does not affect any other cached
675  /// tokens. This function has no effect if backtracking is not
676  /// enabled.
677  void ReplaceLastTokenWithAnnotation(const Token &Tok) {
678    assert(Tok.isAnnotation() && "Expected annotation token");
679    if (CachedLexPos != 0 && isBacktrackEnabled())
680      CachedTokens[CachedLexPos-1] = Tok;
681  }
682
683  /// \brief Specify the point at which code-completion will be performed.
684  ///
685  /// \param File the file in which code completion should occur. If
686  /// this file is included multiple times, code-completion will
687  /// perform completion the first time it is included. If NULL, this
688  /// function clears out the code-completion point.
689  ///
690  /// \param Line the line at which code completion should occur
691  /// (1-based).
692  ///
693  /// \param Column the column at which code completion should occur
694  /// (1-based).
695  ///
696  /// \returns true if an error occurred, false otherwise.
697  bool SetCodeCompletionPoint(const FileEntry *File,
698                              unsigned Line, unsigned Column);
699
700  /// \brief Determine if we are performing code completion.
701  bool isCodeCompletionEnabled() const { return CodeCompletionFile != 0; }
702
703  /// \brief Returns the location of the code-completion point.
704  /// Returns an invalid location if code-completion is not enabled or the file
705  /// containing the code-completion point has not been lexed yet.
706  SourceLocation getCodeCompletionLoc() const { return CodeCompletionLoc; }
707
708  /// \brief Returns the start location of the file of code-completion point.
709  /// Returns an invalid location if code-completion is not enabled or the file
710  /// containing the code-completion point has not been lexed yet.
711  SourceLocation getCodeCompletionFileLoc() const {
712    return CodeCompletionFileLoc;
713  }
714
715  /// \brief Returns true if code-completion is enabled and we have hit the
716  /// code-completion point.
717  bool isCodeCompletionReached() const { return CodeCompletionReached; }
718
719  /// \brief Note that we hit the code-completion point.
720  void setCodeCompletionReached() {
721    assert(isCodeCompletionEnabled() && "Code-completion not enabled!");
722    CodeCompletionReached = true;
723    // Silence any diagnostics that occur after we hit the code-completion.
724    getDiagnostics().setSuppressAllDiagnostics(true);
725  }
726
727  /// \brief The location of the currently-active #pragma clang
728  /// arc_cf_code_audited begin.  Returns an invalid location if there
729  /// is no such pragma active.
730  SourceLocation getPragmaARCCFCodeAuditedLoc() const {
731    return PragmaARCCFCodeAuditedLoc;
732  }
733
734  /// \brief Set the location of the currently-active #pragma clang
735  /// arc_cf_code_audited begin.  An invalid location ends the pragma.
736  void setPragmaARCCFCodeAuditedLoc(SourceLocation Loc) {
737    PragmaARCCFCodeAuditedLoc = Loc;
738  }
739
740  /// \brief Instruct the preprocessor to skip part of the main
741  /// the main source file.
742  ///
743  /// \brief Bytes The number of bytes in the preamble to skip.
744  ///
745  /// \brief StartOfLine Whether skipping these bytes puts the lexer at the
746  /// start of a line.
747  void setSkipMainFilePreamble(unsigned Bytes, bool StartOfLine) {
748    SkipMainFilePreamble.first = Bytes;
749    SkipMainFilePreamble.second = StartOfLine;
750  }
751
752  /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
753  /// the specified Token's location, translating the token's start
754  /// position in the current buffer into a SourcePosition object for rendering.
755  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) {
756    return Diags->Report(Loc, DiagID);
757  }
758
759  DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) {
760    return Diags->Report(Tok.getLocation(), DiagID);
761  }
762
763  /// getSpelling() - Return the 'spelling' of the token at the given
764  /// location; does not go up to the spelling location or down to the
765  /// expansion location.
766  ///
767  /// \param buffer A buffer which will be used only if the token requires
768  ///   "cleaning", e.g. if it contains trigraphs or escaped newlines
769  /// \param invalid If non-null, will be set \c true if an error occurs.
770  StringRef getSpelling(SourceLocation loc,
771                              SmallVectorImpl<char> &buffer,
772                              bool *invalid = 0) const {
773    return Lexer::getSpelling(loc, buffer, SourceMgr, Features, invalid);
774  }
775
776  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
777  /// token is the characters used to represent the token in the source file
778  /// after trigraph expansion and escaped-newline folding.  In particular, this
779  /// wants to get the true, uncanonicalized, spelling of things like digraphs
780  /// UCNs, etc.
781  ///
782  /// \param Invalid If non-null, will be set \c true if an error occurs.
783  std::string getSpelling(const Token &Tok, bool *Invalid = 0) const {
784    return Lexer::getSpelling(Tok, SourceMgr, Features, Invalid);
785  }
786
787  /// getSpelling - This method is used to get the spelling of a token into a
788  /// preallocated buffer, instead of as an std::string.  The caller is required
789  /// to allocate enough space for the token, which is guaranteed to be at least
790  /// Tok.getLength() bytes long.  The length of the actual result is returned.
791  ///
792  /// Note that this method may do two possible things: it may either fill in
793  /// the buffer specified with characters, or it may *change the input pointer*
794  /// to point to a constant buffer with the data already in it (avoiding a
795  /// copy).  The caller is not allowed to modify the returned buffer pointer
796  /// if an internal buffer is returned.
797  unsigned getSpelling(const Token &Tok, const char *&Buffer,
798                       bool *Invalid = 0) const {
799    return Lexer::getSpelling(Tok, Buffer, SourceMgr, Features, Invalid);
800  }
801
802  /// getSpelling - This method is used to get the spelling of a token into a
803  /// SmallVector. Note that the returned StringRef may not point to the
804  /// supplied buffer if a copy can be avoided.
805  StringRef getSpelling(const Token &Tok,
806                              SmallVectorImpl<char> &Buffer,
807                              bool *Invalid = 0) const;
808
809  /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
810  /// with length 1, return the character.
811  char getSpellingOfSingleCharacterNumericConstant(const Token &Tok,
812                                                   bool *Invalid = 0) const {
813    assert(Tok.is(tok::numeric_constant) &&
814           Tok.getLength() == 1 && "Called on unsupported token");
815    assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
816
817    // If the token is carrying a literal data pointer, just use it.
818    if (const char *D = Tok.getLiteralData())
819      return *D;
820
821    // Otherwise, fall back on getCharacterData, which is slower, but always
822    // works.
823    return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid);
824  }
825
826  /// CreateString - Plop the specified string into a scratch buffer and set the
827  /// specified token's location and length to it.  If specified, the source
828  /// location provides a location of the expansion point of the token.
829  void CreateString(const char *Buf, unsigned Len, Token &Tok,
830                    SourceLocation ExpansionLocStart = SourceLocation(),
831                    SourceLocation ExpansionLocEnd = SourceLocation());
832
833  /// \brief Computes the source location just past the end of the
834  /// token at this source location.
835  ///
836  /// This routine can be used to produce a source location that
837  /// points just past the end of the token referenced by \p Loc, and
838  /// is generally used when a diagnostic needs to point just after a
839  /// token where it expected something different that it received. If
840  /// the returned source location would not be meaningful (e.g., if
841  /// it points into a macro), this routine returns an invalid
842  /// source location.
843  ///
844  /// \param Offset an offset from the end of the token, where the source
845  /// location should refer to. The default offset (0) produces a source
846  /// location pointing just past the end of the token; an offset of 1 produces
847  /// a source location pointing to the last character in the token, etc.
848  SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0) {
849    return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, Features);
850  }
851
852  /// \brief Returns true if the given MacroID location points at the first
853  /// token of the macro expansion.
854  bool isAtStartOfMacroExpansion(SourceLocation loc) const {
855    return Lexer::isAtStartOfMacroExpansion(loc, SourceMgr, Features);
856  }
857
858  /// \brief Returns true if the given MacroID location points at the last
859  /// token of the macro expansion.
860  bool isAtEndOfMacroExpansion(SourceLocation loc) const {
861    return Lexer::isAtEndOfMacroExpansion(loc, SourceMgr, Features);
862  }
863
864  /// DumpToken - Print the token to stderr, used for debugging.
865  ///
866  void DumpToken(const Token &Tok, bool DumpFlags = false) const;
867  void DumpLocation(SourceLocation Loc) const;
868  void DumpMacro(const MacroInfo &MI) const;
869
870  /// AdvanceToTokenCharacter - Given a location that specifies the start of a
871  /// token, return a new location that specifies a character within the token.
872  SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,
873                                         unsigned Char) const {
874    return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, Features);
875  }
876
877  /// IncrementPasteCounter - Increment the counters for the number of token
878  /// paste operations performed.  If fast was specified, this is a 'fast paste'
879  /// case we handled.
880  ///
881  void IncrementPasteCounter(bool isFast) {
882    if (isFast)
883      ++NumFastTokenPaste;
884    else
885      ++NumTokenPaste;
886  }
887
888  void PrintStats();
889
890  size_t getTotalMemory() const;
891
892  /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
893  /// comment (/##/) in microsoft mode, this method handles updating the current
894  /// state, returning the token on the next source line.
895  void HandleMicrosoftCommentPaste(Token &Tok);
896
897  //===--------------------------------------------------------------------===//
898  // Preprocessor callback methods.  These are invoked by a lexer as various
899  // directives and events are found.
900
901  /// LookUpIdentifierInfo - Given a tok::raw_identifier token, look up the
902  /// identifier information for the token and install it into the token,
903  /// updating the token kind accordingly.
904  IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const;
905
906private:
907  llvm::DenseMap<IdentifierInfo*,unsigned> PoisonReasons;
908
909public:
910
911  // SetPoisonReason - Call this function to indicate the reason for
912  // poisoning an identifier. If that identifier is accessed while
913  // poisoned, then this reason will be used instead of the default
914  // "poisoned" diagnostic.
915  void SetPoisonReason(IdentifierInfo *II, unsigned DiagID);
916
917  // HandlePoisonedIdentifier - Display reason for poisoned
918  // identifier.
919  void HandlePoisonedIdentifier(Token & Tok);
920
921  void MaybeHandlePoisonedIdentifier(Token & Identifier) {
922    if(IdentifierInfo * II = Identifier.getIdentifierInfo()) {
923      if(II->isPoisoned()) {
924        HandlePoisonedIdentifier(Identifier);
925      }
926    }
927  }
928
929private:
930  /// Identifiers used for SEH handling in Borland. These are only
931  /// allowed in particular circumstances
932  IdentifierInfo *Ident__exception_code, *Ident___exception_code, *Ident_GetExceptionCode; // __except block
933  IdentifierInfo *Ident__exception_info, *Ident___exception_info, *Ident_GetExceptionInfo; // __except filter expression
934  IdentifierInfo *Ident__abnormal_termination, *Ident___abnormal_termination, *Ident_AbnormalTermination; // __finally
935public:
936  void PoisonSEHIdentifiers(bool Poison = true); // Borland
937
938  /// HandleIdentifier - This callback is invoked when the lexer reads an
939  /// identifier and has filled in the tokens IdentifierInfo member.  This
940  /// callback potentially macro expands it or turns it into a named token (like
941  /// 'for').
942  void HandleIdentifier(Token &Identifier);
943
944
945  /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
946  /// the current file.  This either returns the EOF token and returns true, or
947  /// pops a level off the include stack and returns false, at which point the
948  /// client should call lex again.
949  bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
950
951  /// HandleEndOfTokenLexer - This callback is invoked when the current
952  /// TokenLexer hits the end of its token stream.
953  bool HandleEndOfTokenLexer(Token &Result);
954
955  /// HandleDirective - This callback is invoked when the lexer sees a # token
956  /// at the start of a line.  This consumes the directive, modifies the
957  /// lexer/preprocessor state, and advances the lexer(s) so that the next token
958  /// read is the correct one.
959  void HandleDirective(Token &Result);
960
961  /// CheckEndOfDirective - Ensure that the next token is a tok::eod token.  If
962  /// not, emit a diagnostic and consume up until the eod.  If EnableMacros is
963  /// true, then we consider macros that expand to zero tokens as being ok.
964  void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
965
966  /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
967  /// current line until the tok::eod token is found.
968  void DiscardUntilEndOfDirective();
969
970  /// SawDateOrTime - This returns true if the preprocessor has seen a use of
971  /// __DATE__ or __TIME__ in the file so far.
972  bool SawDateOrTime() const {
973    return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
974  }
975  unsigned getCounterValue() const { return CounterValue; }
976  void setCounterValue(unsigned V) { CounterValue = V; }
977
978  /// AllocateMacroInfo - Allocate a new MacroInfo object with the provide
979  ///  SourceLocation.
980  MacroInfo *AllocateMacroInfo(SourceLocation L);
981
982  /// CloneMacroInfo - Allocate a new MacroInfo object which is clone of MI.
983  MacroInfo *CloneMacroInfo(const MacroInfo &MI);
984
985  /// GetIncludeFilenameSpelling - Turn the specified lexer token into a fully
986  /// checked and spelled filename, e.g. as an operand of #include. This returns
987  /// true if the input filename was in <>'s or false if it were in ""'s.  The
988  /// caller is expected to provide a buffer that is large enough to hold the
989  /// spelling of the filename, but is also expected to handle the case when
990  /// this method decides to use a different buffer.
991  bool GetIncludeFilenameSpelling(SourceLocation Loc,StringRef &Filename);
992
993  /// LookupFile - Given a "foo" or <foo> reference, look up the indicated file,
994  /// return null on failure.  isAngled indicates whether the file reference is
995  /// for system #include's or not (i.e. using <> instead of "").
996  const FileEntry *LookupFile(StringRef Filename,
997                              bool isAngled, const DirectoryLookup *FromDir,
998                              const DirectoryLookup *&CurDir,
999                              SmallVectorImpl<char> *SearchPath,
1000                              SmallVectorImpl<char> *RelativePath,
1001                              StringRef *SuggestedModule);
1002
1003  /// GetCurLookup - The DirectoryLookup structure used to find the current
1004  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
1005  /// implement #include_next and find directory-specific properties.
1006  const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; }
1007
1008  /// isInPrimaryFile - Return true if we're in the top-level file, not in a
1009  /// #include.
1010  bool isInPrimaryFile() const;
1011
1012  /// ConcatenateIncludeName - Handle cases where the #include name is expanded
1013  /// from a macro as multiple tokens, which need to be glued together.  This
1014  /// occurs for code like:
1015  ///    #define FOO <a/b.h>
1016  ///    #include FOO
1017  /// because in this case, "<a/b.h>" is returned as 7 tokens, not one.
1018  ///
1019  /// This code concatenates and consumes tokens up to the '>' token.  It
1020  /// returns false if the > was found, otherwise it returns true if it finds
1021  /// and consumes the EOD marker.
1022  bool ConcatenateIncludeName(llvm::SmallString<128> &FilenameBuffer,
1023                              SourceLocation &End);
1024
1025  /// LexOnOffSwitch - Lex an on-off-switch (C99 6.10.6p2) and verify that it is
1026  /// followed by EOD.  Return true if the token is not a valid on-off-switch.
1027  bool LexOnOffSwitch(tok::OnOffSwitch &OOS);
1028
1029private:
1030
1031  void PushIncludeMacroStack() {
1032    IncludeMacroStack.push_back(IncludeStackInfo(CurLexerKind,
1033                                                 CurLexer.take(),
1034                                                 CurPTHLexer.take(),
1035                                                 CurPPLexer,
1036                                                 CurTokenLexer.take(),
1037                                                 CurDirLookup));
1038    CurPPLexer = 0;
1039  }
1040
1041  void PopIncludeMacroStack() {
1042    CurLexer.reset(IncludeMacroStack.back().TheLexer);
1043    CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer);
1044    CurPPLexer = IncludeMacroStack.back().ThePPLexer;
1045    CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer);
1046    CurDirLookup  = IncludeMacroStack.back().TheDirLookup;
1047    CurLexerKind = IncludeMacroStack.back().CurLexerKind;
1048    IncludeMacroStack.pop_back();
1049  }
1050
1051  /// AllocateMacroInfo - Allocate a new MacroInfo object.
1052  MacroInfo *AllocateMacroInfo();
1053
1054  /// ReleaseMacroInfo - Release the specified MacroInfo.  This memory will
1055  ///  be reused for allocating new MacroInfo objects.
1056  void ReleaseMacroInfo(MacroInfo* MI);
1057
1058  /// ReadMacroName - Lex and validate a macro name, which occurs after a
1059  /// #define or #undef.  This emits a diagnostic, sets the token kind to eod,
1060  /// and discards the rest of the macro line if the macro name is invalid.
1061  void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
1062
1063  /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
1064  /// definition has just been read.  Lex the rest of the arguments and the
1065  /// closing ), updating MI with what we learn.  Return true if an error occurs
1066  /// parsing the arg list.
1067  bool ReadMacroDefinitionArgList(MacroInfo *MI);
1068
1069  /// SkipExcludedConditionalBlock - We just read a #if or related directive and
1070  /// decided that the subsequent tokens are in the #if'd out portion of the
1071  /// file.  Lex the rest of the file, until we see an #endif.  If
1072  /// FoundNonSkipPortion is true, then we have already emitted code for part of
1073  /// this #if directive, so #else/#elif blocks should never be entered. If
1074  /// FoundElse is false, then #else directives are ok, if not, then we have
1075  /// already seen one so a #else directive is a duplicate.  When this returns,
1076  /// the caller can lex the first valid token.
1077  void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
1078                                    bool FoundNonSkipPortion, bool FoundElse,
1079                                    SourceLocation ElseLoc = SourceLocation());
1080
1081  /// PTHSkipExcludedConditionalBlock - A fast PTH version of
1082  ///  SkipExcludedConditionalBlock.
1083  void PTHSkipExcludedConditionalBlock();
1084
1085  /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
1086  /// may occur after a #if or #elif directive and return it as a bool.  If the
1087  /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
1088  bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
1089
1090  /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
1091  /// #pragma GCC poison/system_header/dependency and #pragma once.
1092  void RegisterBuiltinPragmas();
1093
1094  /// RegisterBuiltinMacros - Register builtin macros, such as __LINE__ with the
1095  /// identifier table.
1096  void RegisterBuiltinMacros();
1097
1098  /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
1099  /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
1100  /// the macro should not be expanded return true, otherwise return false.
1101  bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI);
1102
1103  /// \brief Cache macro expanded tokens for TokenLexers.
1104  //
1105  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
1106  /// going to lex in the cache and when it finishes the tokens are removed
1107  /// from the end of the cache.
1108  Token *cacheMacroExpandedTokens(TokenLexer *tokLexer,
1109                                  ArrayRef<Token> tokens);
1110  void removeCachedMacroExpandedTokensOfLastLexer();
1111  friend void TokenLexer::ExpandFunctionArguments();
1112
1113  /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
1114  /// lexed is a '('.  If so, consume the token and return true, if not, this
1115  /// method should have no observable side-effect on the lexed tokens.
1116  bool isNextPPTokenLParen();
1117
1118  /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
1119  /// invoked to read all of the formal arguments specified for the macro
1120  /// invocation.  This returns null on error.
1121  MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
1122                                       SourceLocation &ExpansionEnd);
1123
1124  /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
1125  /// as a builtin macro, handle it and return the next token as 'Tok'.
1126  void ExpandBuiltinMacro(Token &Tok);
1127
1128  /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
1129  /// return the first token after the directive.  The _Pragma token has just
1130  /// been read into 'Tok'.
1131  void Handle_Pragma(Token &Tok);
1132
1133  /// HandleMicrosoft__pragma - Like Handle_Pragma except the pragma text
1134  /// is not enclosed within a string literal.
1135  void HandleMicrosoft__pragma(Token &Tok);
1136
1137  /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
1138  /// start lexing tokens from it instead of the current buffer.
1139  void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
1140
1141  /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
1142  /// start getting tokens from it using the PTH cache.
1143  void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
1144
1145  /// IsFileLexer - Returns true if we are lexing from a file and not a
1146  ///  pragma or a macro.
1147  static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
1148    return L ? !L->isPragmaLexer() : P != 0;
1149  }
1150
1151  static bool IsFileLexer(const IncludeStackInfo& I) {
1152    return IsFileLexer(I.TheLexer, I.ThePPLexer);
1153  }
1154
1155  bool IsFileLexer() const {
1156    return IsFileLexer(CurLexer.get(), CurPPLexer);
1157  }
1158
1159  //===--------------------------------------------------------------------===//
1160  // Caching stuff.
1161  void CachingLex(Token &Result);
1162  bool InCachingLexMode() const {
1163    // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means
1164    // that we are past EOF, not that we are in CachingLex mode.
1165    return CurPPLexer == 0 && CurTokenLexer == 0 && CurPTHLexer == 0 &&
1166           !IncludeMacroStack.empty();
1167  }
1168  void EnterCachingLexMode();
1169  void ExitCachingLexMode() {
1170    if (InCachingLexMode())
1171      RemoveTopOfLexerStack();
1172  }
1173  const Token &PeekAhead(unsigned N);
1174  void AnnotatePreviousCachedTokens(const Token &Tok);
1175
1176  //===--------------------------------------------------------------------===//
1177  /// Handle*Directive - implement the various preprocessor directives.  These
1178  /// should side-effect the current preprocessor object so that the next call
1179  /// to Lex() will return the appropriate token next.
1180  void HandleLineDirective(Token &Tok);
1181  void HandleDigitDirective(Token &Tok);
1182  void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
1183  void HandleIdentSCCSDirective(Token &Tok);
1184  void HandleMacroExportDirective(Token &Tok);
1185
1186  // File inclusion.
1187  void HandleIncludeDirective(SourceLocation HashLoc,
1188                              Token &Tok,
1189                              const DirectoryLookup *LookupFrom = 0,
1190                              bool isImport = false);
1191  void HandleIncludeNextDirective(SourceLocation HashLoc, Token &Tok);
1192  void HandleIncludeMacrosDirective(SourceLocation HashLoc, Token &Tok);
1193  void HandleImportDirective(SourceLocation HashLoc, Token &Tok);
1194
1195  // Macro handling.
1196  void HandleDefineDirective(Token &Tok);
1197  void HandleUndefDirective(Token &Tok);
1198
1199  // Conditional Inclusion.
1200  void HandleIfdefDirective(Token &Tok, bool isIfndef,
1201                            bool ReadAnyTokensBeforeDirective);
1202  void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
1203  void HandleEndifDirective(Token &Tok);
1204  void HandleElseDirective(Token &Tok);
1205  void HandleElifDirective(Token &Tok);
1206
1207  // Pragmas.
1208  void HandlePragmaDirective(unsigned Introducer);
1209public:
1210  void HandlePragmaOnce(Token &OnceTok);
1211  void HandlePragmaMark();
1212  void HandlePragmaPoison(Token &PoisonTok);
1213  void HandlePragmaSystemHeader(Token &SysHeaderTok);
1214  void HandlePragmaDependency(Token &DependencyTok);
1215  void HandlePragmaComment(Token &CommentTok);
1216  void HandlePragmaMessage(Token &MessageTok);
1217  void HandlePragmaPushMacro(Token &Tok);
1218  void HandlePragmaPopMacro(Token &Tok);
1219  IdentifierInfo *ParsePragmaPushOrPopMacro(Token &Tok);
1220
1221  // Return true and store the first token only if any CommentHandler
1222  // has inserted some tokens and getCommentRetentionState() is false.
1223  bool HandleComment(Token &Token, SourceRange Comment);
1224
1225  /// \brief A macro is used, update information about macros that need unused
1226  /// warnings.
1227  void markMacroAsUsed(MacroInfo *MI);
1228};
1229
1230/// \brief Abstract base class that describes a handler that will receive
1231/// source ranges for each of the comments encountered in the source file.
1232class CommentHandler {
1233public:
1234  virtual ~CommentHandler();
1235
1236  // The handler shall return true if it has pushed any tokens
1237  // to be read using e.g. EnterToken or EnterTokenStream.
1238  virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0;
1239};
1240
1241}  // end namespace clang
1242
1243#endif
1244