Preprocessor.h revision 10285d9113c14d1e523f86a55b193eb752638ea5
1//===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines the Preprocessor interface.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15#define LLVM_CLANG_LEX_PREPROCESSOR_H
16
17#include "clang/Lex/MacroInfo.h"
18#include "clang/Lex/Lexer.h"
19#include "clang/Lex/PTHLexer.h"
20#include "clang/Lex/PPCallbacks.h"
21#include "clang/Lex/TokenLexer.h"
22#include "clang/Lex/PTHManager.h"
23#include "clang/Basic/Builtins.h"
24#include "clang/Basic/Diagnostic.h"
25#include "clang/Basic/IdentifierTable.h"
26#include "clang/Basic/SourceLocation.h"
27#include "llvm/ADT/DenseMap.h"
28#include "llvm/ADT/IntrusiveRefCntPtr.h"
29#include "llvm/ADT/SmallPtrSet.h"
30#include "llvm/ADT/OwningPtr.h"
31#include "llvm/ADT/SmallVector.h"
32#include "llvm/ADT/ArrayRef.h"
33#include "llvm/Support/Allocator.h"
34#include <vector>
35
36namespace llvm {
37  template<unsigned InternalLen> class SmallString;
38}
39
40namespace clang {
41
42class SourceManager;
43class ExternalPreprocessorSource;
44class FileManager;
45class FileEntry;
46class HeaderSearch;
47class PragmaNamespace;
48class PragmaHandler;
49class CommentHandler;
50class ScratchBuffer;
51class TargetInfo;
52class PPCallbacks;
53class CodeCompletionHandler;
54class DirectoryLookup;
55class PreprocessingRecord;
56class ModuleLoader;
57
58/// Preprocessor - This object engages in a tight little dance with the lexer to
59/// efficiently preprocess tokens.  Lexers know only about tokens within a
60/// single source file, and don't know anything about preprocessor-level issues
61/// like the #include stack, token expansion, etc.
62///
63class Preprocessor : public RefCountedBase<Preprocessor> {
64  DiagnosticsEngine        *Diags;
65  LangOptions       &Features;
66  const TargetInfo  *Target;
67  FileManager       &FileMgr;
68  SourceManager     &SourceMgr;
69  ScratchBuffer     *ScratchBuf;
70  HeaderSearch      &HeaderInfo;
71  ModuleLoader      &TheModuleLoader;
72
73  /// \brief External source of macros.
74  ExternalPreprocessorSource *ExternalSource;
75
76
77  /// PTH - An optional PTHManager object used for getting tokens from
78  ///  a token cache rather than lexing the original source file.
79  OwningPtr<PTHManager> PTH;
80
81  /// BP - A BumpPtrAllocator object used to quickly allocate and release
82  ///  objects internal to the Preprocessor.
83  llvm::BumpPtrAllocator BP;
84
85  /// Identifiers for builtin macros and other builtins.
86  IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
87  IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
88  IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
89  IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
90  IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
91  IdentifierInfo *Ident__COUNTER__;                // __COUNTER__
92  IdentifierInfo *Ident_Pragma, *Ident__pragma;    // _Pragma, __pragma
93  IdentifierInfo *Ident__VA_ARGS__;                // __VA_ARGS__
94  IdentifierInfo *Ident__has_feature;              // __has_feature
95  IdentifierInfo *Ident__has_extension;            // __has_extension
96  IdentifierInfo *Ident__has_builtin;              // __has_builtin
97  IdentifierInfo *Ident__has_attribute;            // __has_attribute
98  IdentifierInfo *Ident__has_include;              // __has_include
99  IdentifierInfo *Ident__has_include_next;         // __has_include_next
100  IdentifierInfo *Ident__has_warning;              // __has_warning
101
102  SourceLocation DATELoc, TIMELoc;
103  unsigned CounterValue;  // Next __COUNTER__ value.
104
105  enum {
106    /// MaxIncludeStackDepth - Maximum depth of #includes.
107    MaxAllowedIncludeStackDepth = 200
108  };
109
110  // State that is set before the preprocessor begins.
111  bool KeepComments : 1;
112  bool KeepMacroComments : 1;
113  bool SuppressIncludeNotFoundError : 1;
114
115  // State that changes while the preprocessor runs:
116  bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
117
118  /// Whether the preprocessor owns the header search object.
119  bool OwnsHeaderSearch : 1;
120
121  /// DisableMacroExpansion - True if macro expansion is disabled.
122  bool DisableMacroExpansion : 1;
123
124  /// \brief Whether we have already loaded macros from the external source.
125  mutable bool ReadMacrosFromExternalSource : 1;
126
127  /// Identifiers - This is mapping/lookup information for all identifiers in
128  /// the program, including program keywords.
129  mutable IdentifierTable Identifiers;
130
131  /// Selectors - This table contains all the selectors in the program. Unlike
132  /// IdentifierTable above, this table *isn't* populated by the preprocessor.
133  /// It is declared/expanded here because it's role/lifetime is
134  /// conceptually similar the IdentifierTable. In addition, the current control
135  /// flow (in clang::ParseAST()), make it convenient to put here.
136  /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
137  /// the lifetime of the preprocessor.
138  SelectorTable Selectors;
139
140  /// BuiltinInfo - Information about builtins.
141  Builtin::Context BuiltinInfo;
142
143  /// PragmaHandlers - This tracks all of the pragmas that the client registered
144  /// with this preprocessor.
145  PragmaNamespace *PragmaHandlers;
146
147  /// \brief Tracks all of the comment handlers that the client registered
148  /// with this preprocessor.
149  std::vector<CommentHandler *> CommentHandlers;
150
151  /// \brief The code-completion handler.
152  CodeCompletionHandler *CodeComplete;
153
154  /// \brief The file that we're performing code-completion for, if any.
155  const FileEntry *CodeCompletionFile;
156
157  /// \brief The offset in file for the code-completion point.
158  unsigned CodeCompletionOffset;
159
160  /// \brief The location for the code-completion point. This gets instantiated
161  /// when the CodeCompletionFile gets #include'ed for preprocessing.
162  SourceLocation CodeCompletionLoc;
163
164  /// \brief The start location for the file of the code-completion point.
165  /// This gets instantiated when the CodeCompletionFile gets #include'ed
166  /// for preprocessing.
167  SourceLocation CodeCompletionFileLoc;
168
169  /// \brief The source location of the 'import' contextual keyword we just
170  /// lexed, if any.
171  SourceLocation ModuleImportLoc;
172
173  /// \brief The module import path that we're currently processing.
174  llvm::SmallVector<std::pair<IdentifierInfo *, SourceLocation>, 2>
175    ModuleImportPath;
176
177  /// \brief Whether the module import expectes an identifier next. Otherwise,
178  /// it expects a '.' or ';'.
179  bool ModuleImportExpectsIdentifier;
180
181  /// \brief The source location of the currently-active
182  /// #pragma clang arc_cf_code_audited begin.
183  SourceLocation PragmaARCCFCodeAuditedLoc;
184
185  /// \brief True if we hit the code-completion point.
186  bool CodeCompletionReached;
187
188  /// \brief The number of bytes that we will initially skip when entering the
189  /// main file, which is used when loading a precompiled preamble, along
190  /// with a flag that indicates whether skipping this number of bytes will
191  /// place the lexer at the start of a line.
192  std::pair<unsigned, bool> SkipMainFilePreamble;
193
194  /// CurLexer - This is the current top of the stack that we're lexing from if
195  /// not expanding a macro and we are lexing directly from source code.
196  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
197  OwningPtr<Lexer> CurLexer;
198
199  /// CurPTHLexer - This is the current top of stack that we're lexing from if
200  ///  not expanding from a macro and we are lexing from a PTH cache.
201  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
202  OwningPtr<PTHLexer> CurPTHLexer;
203
204  /// CurPPLexer - This is the current top of the stack what we're lexing from
205  ///  if not expanding a macro.  This is an alias for either CurLexer or
206  ///  CurPTHLexer.
207  PreprocessorLexer *CurPPLexer;
208
209  /// CurLookup - The DirectoryLookup structure used to find the current
210  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
211  /// implement #include_next and find directory-specific properties.
212  const DirectoryLookup *CurDirLookup;
213
214  /// CurTokenLexer - This is the current macro we are expanding, if we are
215  /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
216  OwningPtr<TokenLexer> CurTokenLexer;
217
218  /// \brief The kind of lexer we're currently working with.
219  enum CurLexerKind {
220    CLK_Lexer,
221    CLK_PTHLexer,
222    CLK_TokenLexer,
223    CLK_CachingLexer,
224    CLK_LexAfterModuleImport
225  } CurLexerKind;
226
227  /// IncludeMacroStack - This keeps track of the stack of files currently
228  /// #included, and macros currently being expanded from, not counting
229  /// CurLexer/CurTokenLexer.
230  struct IncludeStackInfo {
231    enum CurLexerKind     CurLexerKind;
232    Lexer                 *TheLexer;
233    PTHLexer              *ThePTHLexer;
234    PreprocessorLexer     *ThePPLexer;
235    TokenLexer            *TheTokenLexer;
236    const DirectoryLookup *TheDirLookup;
237
238    IncludeStackInfo(enum CurLexerKind K, Lexer *L, PTHLexer* P,
239                     PreprocessorLexer* PPL,
240                     TokenLexer* TL, const DirectoryLookup *D)
241      : CurLexerKind(K), TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL),
242        TheTokenLexer(TL), TheDirLookup(D) {}
243  };
244  std::vector<IncludeStackInfo> IncludeMacroStack;
245
246  /// Callbacks - These are actions invoked when some preprocessor activity is
247  /// encountered (e.g. a file is #included, etc).
248  PPCallbacks *Callbacks;
249
250  /// Macros - For each IdentifierInfo with 'HasMacro' set, we keep a mapping
251  /// to the actual definition of the macro.
252  llvm::DenseMap<IdentifierInfo*, MacroInfo*> Macros;
253
254  /// \brief Macros that we want to warn because they are not used at the end
255  /// of the translation unit; we store just their SourceLocations instead
256  /// something like MacroInfo*. The benefit of this is that when we are
257  /// deserializing from PCH, we don't need to deserialize identifier & macros
258  /// just so that we can report that they are unused, we just warn using
259  /// the SourceLocations of this set (that will be filled by the ASTReader).
260  /// We are using SmallPtrSet instead of a vector for faster removal.
261  typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy;
262  WarnUnusedMacroLocsTy WarnUnusedMacroLocs;
263
264  /// MacroArgCache - This is a "freelist" of MacroArg objects that can be
265  /// reused for quick allocation.
266  MacroArgs *MacroArgCache;
267  friend class MacroArgs;
268
269  /// PragmaPushMacroInfo - For each IdentifierInfo used in a #pragma
270  /// push_macro directive, we keep a MacroInfo stack used to restore
271  /// previous macro value.
272  llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo;
273
274  // Various statistics we track for performance analysis.
275  unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
276  unsigned NumIf, NumElse, NumEndif;
277  unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
278  unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
279  unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
280  unsigned NumSkipped;
281
282  /// Predefines - This string is the predefined macros that preprocessor
283  /// should use from the command line etc.
284  std::string Predefines;
285
286  /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
287  enum { TokenLexerCacheSize = 8 };
288  unsigned NumCachedTokenLexers;
289  TokenLexer *TokenLexerCache[TokenLexerCacheSize];
290
291  /// \brief Keeps macro expanded tokens for TokenLexers.
292  //
293  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
294  /// going to lex in the cache and when it finishes the tokens are removed
295  /// from the end of the cache.
296  SmallVector<Token, 16> MacroExpandedTokens;
297  std::vector<std::pair<TokenLexer *, size_t> > MacroExpandingLexersStack;
298
299  /// \brief A record of the macro definitions and expansions that
300  /// occurred during preprocessing.
301  ///
302  /// This is an optional side structure that can be enabled with
303  /// \c createPreprocessingRecord() prior to preprocessing.
304  PreprocessingRecord *Record;
305
306private:  // Cached tokens state.
307  typedef SmallVector<Token, 1> CachedTokensTy;
308
309  /// CachedTokens - Cached tokens are stored here when we do backtracking or
310  /// lookahead. They are "lexed" by the CachingLex() method.
311  CachedTokensTy CachedTokens;
312
313  /// CachedLexPos - The position of the cached token that CachingLex() should
314  /// "lex" next. If it points beyond the CachedTokens vector, it means that
315  /// a normal Lex() should be invoked.
316  CachedTokensTy::size_type CachedLexPos;
317
318  /// BacktrackPositions - Stack of backtrack positions, allowing nested
319  /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
320  /// indicate where CachedLexPos should be set when the BackTrack() method is
321  /// invoked (at which point the last position is popped).
322  std::vector<CachedTokensTy::size_type> BacktrackPositions;
323
324  struct MacroInfoChain {
325    MacroInfo MI;
326    MacroInfoChain *Next;
327    MacroInfoChain *Prev;
328  };
329
330  /// MacroInfos are managed as a chain for easy disposal.  This is the head
331  /// of that list.
332  MacroInfoChain *MIChainHead;
333
334  /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
335  /// allocation.
336  MacroInfoChain *MICache;
337
338  MacroInfo *getInfoForMacro(IdentifierInfo *II) const;
339
340public:
341  Preprocessor(DiagnosticsEngine &diags, LangOptions &opts,
342               const TargetInfo *target,
343               SourceManager &SM, HeaderSearch &Headers,
344               ModuleLoader &TheModuleLoader,
345               IdentifierInfoLookup *IILookup = 0,
346               bool OwnsHeaderSearch = false,
347               bool DelayInitialization = false);
348
349  ~Preprocessor();
350
351  /// \brief Initialize the preprocessor, if the constructor did not already
352  /// perform the initialization.
353  ///
354  /// \param Target Information about the target.
355  void Initialize(const TargetInfo &Target);
356
357  DiagnosticsEngine &getDiagnostics() const { return *Diags; }
358  void setDiagnostics(DiagnosticsEngine &D) { Diags = &D; }
359
360  const LangOptions &getLangOptions() const { return Features; }
361  const TargetInfo &getTargetInfo() const { return *Target; }
362  FileManager &getFileManager() const { return FileMgr; }
363  SourceManager &getSourceManager() const { return SourceMgr; }
364  HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
365
366  IdentifierTable &getIdentifierTable() { return Identifiers; }
367  SelectorTable &getSelectorTable() { return Selectors; }
368  Builtin::Context &getBuiltinInfo() { return BuiltinInfo; }
369  llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
370
371  void setPTHManager(PTHManager* pm);
372
373  PTHManager *getPTHManager() { return PTH.get(); }
374
375  void setExternalSource(ExternalPreprocessorSource *Source) {
376    ExternalSource = Source;
377  }
378
379  ExternalPreprocessorSource *getExternalSource() const {
380    return ExternalSource;
381  }
382
383  /// \brief Retrieve the module loader associated with this preprocessor.
384  ModuleLoader &getModuleLoader() const { return TheModuleLoader; }
385
386  /// SetCommentRetentionState - Control whether or not the preprocessor retains
387  /// comments in output.
388  void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
389    this->KeepComments = KeepComments | KeepMacroComments;
390    this->KeepMacroComments = KeepMacroComments;
391  }
392
393  bool getCommentRetentionState() const { return KeepComments; }
394
395  void SetSuppressIncludeNotFoundError(bool Suppress) {
396    SuppressIncludeNotFoundError = Suppress;
397  }
398
399  bool GetSuppressIncludeNotFoundError() {
400    return SuppressIncludeNotFoundError;
401  }
402
403  /// isCurrentLexer - Return true if we are lexing directly from the specified
404  /// lexer.
405  bool isCurrentLexer(const PreprocessorLexer *L) const {
406    return CurPPLexer == L;
407  }
408
409  /// getCurrentLexer - Return the current lexer being lexed from.  Note
410  /// that this ignores any potentially active macro expansions and _Pragma
411  /// expansions going on at the time.
412  PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; }
413
414  /// getCurrentFileLexer - Return the current file lexer being lexed from.
415  /// Note that this ignores any potentially active macro expansions and _Pragma
416  /// expansions going on at the time.
417  PreprocessorLexer *getCurrentFileLexer() const;
418
419  /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks.
420  /// Note that this class takes ownership of any PPCallbacks object given to
421  /// it.
422  PPCallbacks *getPPCallbacks() const { return Callbacks; }
423  void addPPCallbacks(PPCallbacks *C) {
424    if (Callbacks)
425      C = new PPChainedCallbacks(C, Callbacks);
426    Callbacks = C;
427  }
428
429  /// getMacroInfo - Given an identifier, return the MacroInfo it is #defined to
430  /// or null if it isn't #define'd.
431  MacroInfo *getMacroInfo(IdentifierInfo *II) const {
432    if (!II->hasMacroDefinition())
433      return 0;
434
435    return getInfoForMacro(II);
436  }
437
438  /// setMacroInfo - Specify a macro for this identifier.
439  ///
440  void setMacroInfo(IdentifierInfo *II, MacroInfo *MI,
441                    bool LoadedFromAST = false);
442
443  /// macro_iterator/macro_begin/macro_end - This allows you to walk the current
444  /// state of the macro table.  This visits every currently-defined macro.
445  typedef llvm::DenseMap<IdentifierInfo*,
446                         MacroInfo*>::const_iterator macro_iterator;
447  macro_iterator macro_begin(bool IncludeExternalMacros = true) const;
448  macro_iterator macro_end(bool IncludeExternalMacros = true) const;
449
450  const std::string &getPredefines() const { return Predefines; }
451  /// setPredefines - Set the predefines for this Preprocessor.  These
452  /// predefines are automatically injected when parsing the main file.
453  void setPredefines(const char *P) { Predefines = P; }
454  void setPredefines(const std::string &P) { Predefines = P; }
455
456  /// getIdentifierInfo - Return information about the specified preprocessor
457  /// identifier token.  The version of this method that takes two character
458  /// pointers is preferred unless the identifier is already available as a
459  /// string (this avoids allocation and copying of memory to construct an
460  /// std::string).
461  IdentifierInfo *getIdentifierInfo(StringRef Name) const {
462    return &Identifiers.get(Name);
463  }
464
465  /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
466  /// If 'Namespace' is non-null, then it is a token required to exist on the
467  /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
468  void AddPragmaHandler(StringRef Namespace, PragmaHandler *Handler);
469  void AddPragmaHandler(PragmaHandler *Handler) {
470    AddPragmaHandler(StringRef(), Handler);
471  }
472
473  /// RemovePragmaHandler - Remove the specific pragma handler from
474  /// the preprocessor. If \arg Namespace is non-null, then it should
475  /// be the namespace that \arg Handler was added to. It is an error
476  /// to remove a handler that has not been registered.
477  void RemovePragmaHandler(StringRef Namespace, PragmaHandler *Handler);
478  void RemovePragmaHandler(PragmaHandler *Handler) {
479    RemovePragmaHandler(StringRef(), Handler);
480  }
481
482  /// \brief Add the specified comment handler to the preprocessor.
483  void AddCommentHandler(CommentHandler *Handler);
484
485  /// \brief Remove the specified comment handler.
486  ///
487  /// It is an error to remove a handler that has not been registered.
488  void RemoveCommentHandler(CommentHandler *Handler);
489
490  /// \brief Set the code completion handler to the given object.
491  void setCodeCompletionHandler(CodeCompletionHandler &Handler) {
492    CodeComplete = &Handler;
493  }
494
495  /// \brief Retrieve the current code-completion handler.
496  CodeCompletionHandler *getCodeCompletionHandler() const {
497    return CodeComplete;
498  }
499
500  /// \brief Clear out the code completion handler.
501  void clearCodeCompletionHandler() {
502    CodeComplete = 0;
503  }
504
505  /// \brief Hook used by the lexer to invoke the "natural language" code
506  /// completion point.
507  void CodeCompleteNaturalLanguage();
508
509  /// \brief Retrieve the preprocessing record, or NULL if there is no
510  /// preprocessing record.
511  PreprocessingRecord *getPreprocessingRecord() const { return Record; }
512
513  /// \brief Create a new preprocessing record, which will keep track of
514  /// all macro expansions, macro definitions, etc.
515  void createPreprocessingRecord();
516
517  /// EnterMainSourceFile - Enter the specified FileID as the main source file,
518  /// which implicitly adds the builtin defines etc.
519  void EnterMainSourceFile();
520
521  /// EndSourceFile - Inform the preprocessor callbacks that processing is
522  /// complete.
523  void EndSourceFile();
524
525  /// EnterSourceFile - Add a source file to the top of the include stack and
526  /// start lexing tokens from it instead of the current buffer.  Emit an error
527  /// and don't enter the file on error.
528  void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir,
529                       SourceLocation Loc);
530
531  /// EnterMacro - Add a Macro to the top of the include stack and start lexing
532  /// tokens from it instead of the current buffer.  Args specifies the
533  /// tokens input to a function-like macro.
534  ///
535  /// ILEnd specifies the location of the ')' for a function-like macro or the
536  /// identifier for an object-like macro.
537  void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroArgs *Args);
538
539  /// EnterTokenStream - Add a "macro" context to the top of the include stack,
540  /// which will cause the lexer to start returning the specified tokens.
541  ///
542  /// If DisableMacroExpansion is true, tokens lexed from the token stream will
543  /// not be subject to further macro expansion.  Otherwise, these tokens will
544  /// be re-macro-expanded when/if expansion is enabled.
545  ///
546  /// If OwnsTokens is false, this method assumes that the specified stream of
547  /// tokens has a permanent owner somewhere, so they do not need to be copied.
548  /// If it is true, it assumes the array of tokens is allocated with new[] and
549  /// must be freed.
550  ///
551  void EnterTokenStream(const Token *Toks, unsigned NumToks,
552                        bool DisableMacroExpansion, bool OwnsTokens);
553
554  /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
555  /// lexer stack.  This should only be used in situations where the current
556  /// state of the top-of-stack lexer is known.
557  void RemoveTopOfLexerStack();
558
559  /// EnableBacktrackAtThisPos - From the point that this method is called, and
560  /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
561  /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
562  /// make the Preprocessor re-lex the same tokens.
563  ///
564  /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
565  /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
566  /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
567  ///
568  /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
569  /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
570  /// tokens will continue indefinitely.
571  ///
572  void EnableBacktrackAtThisPos();
573
574  /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
575  void CommitBacktrackedTokens();
576
577  /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
578  /// EnableBacktrackAtThisPos() was previously called.
579  void Backtrack();
580
581  /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
582  /// caching of tokens is on.
583  bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
584
585  /// Lex - To lex a token from the preprocessor, just pull a token from the
586  /// current lexer or macro object.
587  void Lex(Token &Result) {
588    switch (CurLexerKind) {
589    case CLK_Lexer: CurLexer->Lex(Result); break;
590    case CLK_PTHLexer: CurPTHLexer->Lex(Result); break;
591    case CLK_TokenLexer: CurTokenLexer->Lex(Result); break;
592    case CLK_CachingLexer: CachingLex(Result); break;
593    case CLK_LexAfterModuleImport: LexAfterModuleImport(Result); break;
594    }
595  }
596
597  void LexAfterModuleImport(Token &Result);
598
599  /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
600  /// something not a comment.  This is useful in -E -C mode where comments
601  /// would foul up preprocessor directive handling.
602  void LexNonComment(Token &Result) {
603    do
604      Lex(Result);
605    while (Result.getKind() == tok::comment);
606  }
607
608  /// LexUnexpandedToken - This is just like Lex, but this disables macro
609  /// expansion of identifier tokens.
610  void LexUnexpandedToken(Token &Result) {
611    // Disable macro expansion.
612    bool OldVal = DisableMacroExpansion;
613    DisableMacroExpansion = true;
614    // Lex the token.
615    Lex(Result);
616
617    // Reenable it.
618    DisableMacroExpansion = OldVal;
619  }
620
621  /// LexUnexpandedNonComment - Like LexNonComment, but this disables macro
622  /// expansion of identifier tokens.
623  void LexUnexpandedNonComment(Token &Result) {
624    do
625      LexUnexpandedToken(Result);
626    while (Result.getKind() == tok::comment);
627  }
628
629  /// LookAhead - This peeks ahead N tokens and returns that token without
630  /// consuming any tokens.  LookAhead(0) returns the next token that would be
631  /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
632  /// returns normal tokens after phase 5.  As such, it is equivalent to using
633  /// 'Lex', not 'LexUnexpandedToken'.
634  const Token &LookAhead(unsigned N) {
635    if (CachedLexPos + N < CachedTokens.size())
636      return CachedTokens[CachedLexPos+N];
637    else
638      return PeekAhead(N+1);
639  }
640
641  /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
642  /// this allows to revert a specific number of tokens.
643  /// Note that the number of tokens being reverted should be up to the last
644  /// backtrack position, not more.
645  void RevertCachedTokens(unsigned N) {
646    assert(isBacktrackEnabled() &&
647           "Should only be called when tokens are cached for backtracking");
648    assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
649         && "Should revert tokens up to the last backtrack position, not more");
650    assert(signed(CachedLexPos) - signed(N) >= 0 &&
651           "Corrupted backtrack positions ?");
652    CachedLexPos -= N;
653  }
654
655  /// EnterToken - Enters a token in the token stream to be lexed next. If
656  /// BackTrack() is called afterwards, the token will remain at the insertion
657  /// point.
658  void EnterToken(const Token &Tok) {
659    EnterCachingLexMode();
660    CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
661  }
662
663  /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
664  /// tokens (because backtrack is enabled) it should replace the most recent
665  /// cached tokens with the given annotation token. This function has no effect
666  /// if backtracking is not enabled.
667  ///
668  /// Note that the use of this function is just for optimization; so that the
669  /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
670  /// invoked.
671  void AnnotateCachedTokens(const Token &Tok) {
672    assert(Tok.isAnnotation() && "Expected annotation token");
673    if (CachedLexPos != 0 && isBacktrackEnabled())
674      AnnotatePreviousCachedTokens(Tok);
675  }
676
677  /// \brief Replace the last token with an annotation token.
678  ///
679  /// Like AnnotateCachedTokens(), this routine replaces an
680  /// already-parsed (and resolved) token with an annotation
681  /// token. However, this routine only replaces the last token with
682  /// the annotation token; it does not affect any other cached
683  /// tokens. This function has no effect if backtracking is not
684  /// enabled.
685  void ReplaceLastTokenWithAnnotation(const Token &Tok) {
686    assert(Tok.isAnnotation() && "Expected annotation token");
687    if (CachedLexPos != 0 && isBacktrackEnabled())
688      CachedTokens[CachedLexPos-1] = Tok;
689  }
690
691  /// \brief Recompute the current lexer kind based on the CurLexer/CurPTHLexer/
692  /// CurTokenLexer pointers.
693  void recomputeCurLexerKind();
694
695  /// \brief Specify the point at which code-completion will be performed.
696  ///
697  /// \param File the file in which code completion should occur. If
698  /// this file is included multiple times, code-completion will
699  /// perform completion the first time it is included. If NULL, this
700  /// function clears out the code-completion point.
701  ///
702  /// \param Line the line at which code completion should occur
703  /// (1-based).
704  ///
705  /// \param Column the column at which code completion should occur
706  /// (1-based).
707  ///
708  /// \returns true if an error occurred, false otherwise.
709  bool SetCodeCompletionPoint(const FileEntry *File,
710                              unsigned Line, unsigned Column);
711
712  /// \brief Determine if we are performing code completion.
713  bool isCodeCompletionEnabled() const { return CodeCompletionFile != 0; }
714
715  /// \brief Returns the location of the code-completion point.
716  /// Returns an invalid location if code-completion is not enabled or the file
717  /// containing the code-completion point has not been lexed yet.
718  SourceLocation getCodeCompletionLoc() const { return CodeCompletionLoc; }
719
720  /// \brief Returns the start location of the file of code-completion point.
721  /// Returns an invalid location if code-completion is not enabled or the file
722  /// containing the code-completion point has not been lexed yet.
723  SourceLocation getCodeCompletionFileLoc() const {
724    return CodeCompletionFileLoc;
725  }
726
727  /// \brief Returns true if code-completion is enabled and we have hit the
728  /// code-completion point.
729  bool isCodeCompletionReached() const { return CodeCompletionReached; }
730
731  /// \brief Note that we hit the code-completion point.
732  void setCodeCompletionReached() {
733    assert(isCodeCompletionEnabled() && "Code-completion not enabled!");
734    CodeCompletionReached = true;
735    // Silence any diagnostics that occur after we hit the code-completion.
736    getDiagnostics().setSuppressAllDiagnostics(true);
737  }
738
739  /// \brief The location of the currently-active #pragma clang
740  /// arc_cf_code_audited begin.  Returns an invalid location if there
741  /// is no such pragma active.
742  SourceLocation getPragmaARCCFCodeAuditedLoc() const {
743    return PragmaARCCFCodeAuditedLoc;
744  }
745
746  /// \brief Set the location of the currently-active #pragma clang
747  /// arc_cf_code_audited begin.  An invalid location ends the pragma.
748  void setPragmaARCCFCodeAuditedLoc(SourceLocation Loc) {
749    PragmaARCCFCodeAuditedLoc = Loc;
750  }
751
752  /// \brief Instruct the preprocessor to skip part of the main
753  /// the main source file.
754  ///
755  /// \brief Bytes The number of bytes in the preamble to skip.
756  ///
757  /// \brief StartOfLine Whether skipping these bytes puts the lexer at the
758  /// start of a line.
759  void setSkipMainFilePreamble(unsigned Bytes, bool StartOfLine) {
760    SkipMainFilePreamble.first = Bytes;
761    SkipMainFilePreamble.second = StartOfLine;
762  }
763
764  /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
765  /// the specified Token's location, translating the token's start
766  /// position in the current buffer into a SourcePosition object for rendering.
767  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) const {
768    return Diags->Report(Loc, DiagID);
769  }
770
771  DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) const {
772    return Diags->Report(Tok.getLocation(), DiagID);
773  }
774
775  /// getSpelling() - Return the 'spelling' of the token at the given
776  /// location; does not go up to the spelling location or down to the
777  /// expansion location.
778  ///
779  /// \param buffer A buffer which will be used only if the token requires
780  ///   "cleaning", e.g. if it contains trigraphs or escaped newlines
781  /// \param invalid If non-null, will be set \c true if an error occurs.
782  StringRef getSpelling(SourceLocation loc,
783                              SmallVectorImpl<char> &buffer,
784                              bool *invalid = 0) const {
785    return Lexer::getSpelling(loc, buffer, SourceMgr, Features, invalid);
786  }
787
788  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
789  /// token is the characters used to represent the token in the source file
790  /// after trigraph expansion and escaped-newline folding.  In particular, this
791  /// wants to get the true, uncanonicalized, spelling of things like digraphs
792  /// UCNs, etc.
793  ///
794  /// \param Invalid If non-null, will be set \c true if an error occurs.
795  std::string getSpelling(const Token &Tok, bool *Invalid = 0) const {
796    return Lexer::getSpelling(Tok, SourceMgr, Features, Invalid);
797  }
798
799  /// getSpelling - This method is used to get the spelling of a token into a
800  /// preallocated buffer, instead of as an std::string.  The caller is required
801  /// to allocate enough space for the token, which is guaranteed to be at least
802  /// Tok.getLength() bytes long.  The length of the actual result is returned.
803  ///
804  /// Note that this method may do two possible things: it may either fill in
805  /// the buffer specified with characters, or it may *change the input pointer*
806  /// to point to a constant buffer with the data already in it (avoiding a
807  /// copy).  The caller is not allowed to modify the returned buffer pointer
808  /// if an internal buffer is returned.
809  unsigned getSpelling(const Token &Tok, const char *&Buffer,
810                       bool *Invalid = 0) const {
811    return Lexer::getSpelling(Tok, Buffer, SourceMgr, Features, Invalid);
812  }
813
814  /// getSpelling - This method is used to get the spelling of a token into a
815  /// SmallVector. Note that the returned StringRef may not point to the
816  /// supplied buffer if a copy can be avoided.
817  StringRef getSpelling(const Token &Tok,
818                        SmallVectorImpl<char> &Buffer,
819                        bool *Invalid = 0) const;
820
821  /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
822  /// with length 1, return the character.
823  char getSpellingOfSingleCharacterNumericConstant(const Token &Tok,
824                                                   bool *Invalid = 0) const {
825    assert(Tok.is(tok::numeric_constant) &&
826           Tok.getLength() == 1 && "Called on unsupported token");
827    assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
828
829    // If the token is carrying a literal data pointer, just use it.
830    if (const char *D = Tok.getLiteralData())
831      return *D;
832
833    // Otherwise, fall back on getCharacterData, which is slower, but always
834    // works.
835    return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid);
836  }
837
838  /// \brief Retrieve the name of the immediate macro expansion.
839  ///
840  /// This routine starts from a source location, and finds the name of the macro
841  /// responsible for its immediate expansion. It looks through any intervening
842  /// macro argument expansions to compute this. It returns a StringRef which
843  /// refers to the SourceManager-owned buffer of the source where that macro
844  /// name is spelled. Thus, the result shouldn't out-live the SourceManager.
845  StringRef getImmediateMacroName(SourceLocation Loc) {
846    return Lexer::getImmediateMacroName(Loc, SourceMgr, getLangOptions());
847  }
848
849  /// CreateString - Plop the specified string into a scratch buffer and set the
850  /// specified token's location and length to it.  If specified, the source
851  /// location provides a location of the expansion point of the token.
852  void CreateString(const char *Buf, unsigned Len, Token &Tok,
853                    SourceLocation ExpansionLocStart = SourceLocation(),
854                    SourceLocation ExpansionLocEnd = SourceLocation());
855
856  /// \brief Computes the source location just past the end of the
857  /// token at this source location.
858  ///
859  /// This routine can be used to produce a source location that
860  /// points just past the end of the token referenced by \p Loc, and
861  /// is generally used when a diagnostic needs to point just after a
862  /// token where it expected something different that it received. If
863  /// the returned source location would not be meaningful (e.g., if
864  /// it points into a macro), this routine returns an invalid
865  /// source location.
866  ///
867  /// \param Offset an offset from the end of the token, where the source
868  /// location should refer to. The default offset (0) produces a source
869  /// location pointing just past the end of the token; an offset of 1 produces
870  /// a source location pointing to the last character in the token, etc.
871  SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0) {
872    return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, Features);
873  }
874
875  /// \brief Returns true if the given MacroID location points at the first
876  /// token of the macro expansion.
877  ///
878  /// \param MacroBegin If non-null and function returns true, it is set to
879  /// begin location of the macro.
880  bool isAtStartOfMacroExpansion(SourceLocation loc,
881                                 SourceLocation *MacroBegin = 0) const {
882    return Lexer::isAtStartOfMacroExpansion(loc, SourceMgr, Features,
883                                            MacroBegin);
884  }
885
886  /// \brief Returns true if the given MacroID location points at the last
887  /// token of the macro expansion.
888  ///
889  /// \param MacroBegin If non-null and function returns true, it is set to
890  /// end location of the macro.
891  bool isAtEndOfMacroExpansion(SourceLocation loc,
892                               SourceLocation *MacroEnd = 0) const {
893    return Lexer::isAtEndOfMacroExpansion(loc, SourceMgr, Features, MacroEnd);
894  }
895
896  /// DumpToken - Print the token to stderr, used for debugging.
897  ///
898  void DumpToken(const Token &Tok, bool DumpFlags = false) const;
899  void DumpLocation(SourceLocation Loc) const;
900  void DumpMacro(const MacroInfo &MI) const;
901
902  /// AdvanceToTokenCharacter - Given a location that specifies the start of a
903  /// token, return a new location that specifies a character within the token.
904  SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,
905                                         unsigned Char) const {
906    return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, Features);
907  }
908
909  /// IncrementPasteCounter - Increment the counters for the number of token
910  /// paste operations performed.  If fast was specified, this is a 'fast paste'
911  /// case we handled.
912  ///
913  void IncrementPasteCounter(bool isFast) {
914    if (isFast)
915      ++NumFastTokenPaste;
916    else
917      ++NumTokenPaste;
918  }
919
920  void PrintStats();
921
922  size_t getTotalMemory() const;
923
924  /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
925  /// comment (/##/) in microsoft mode, this method handles updating the current
926  /// state, returning the token on the next source line.
927  void HandleMicrosoftCommentPaste(Token &Tok);
928
929  //===--------------------------------------------------------------------===//
930  // Preprocessor callback methods.  These are invoked by a lexer as various
931  // directives and events are found.
932
933  /// LookUpIdentifierInfo - Given a tok::raw_identifier token, look up the
934  /// identifier information for the token and install it into the token,
935  /// updating the token kind accordingly.
936  IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const;
937
938private:
939  llvm::DenseMap<IdentifierInfo*,unsigned> PoisonReasons;
940
941public:
942
943  // SetPoisonReason - Call this function to indicate the reason for
944  // poisoning an identifier. If that identifier is accessed while
945  // poisoned, then this reason will be used instead of the default
946  // "poisoned" diagnostic.
947  void SetPoisonReason(IdentifierInfo *II, unsigned DiagID);
948
949  // HandlePoisonedIdentifier - Display reason for poisoned
950  // identifier.
951  void HandlePoisonedIdentifier(Token & Tok);
952
953  void MaybeHandlePoisonedIdentifier(Token & Identifier) {
954    if(IdentifierInfo * II = Identifier.getIdentifierInfo()) {
955      if(II->isPoisoned()) {
956        HandlePoisonedIdentifier(Identifier);
957      }
958    }
959  }
960
961private:
962  /// Identifiers used for SEH handling in Borland. These are only
963  /// allowed in particular circumstances
964  // __except block
965  IdentifierInfo *Ident__exception_code,
966                 *Ident___exception_code,
967                 *Ident_GetExceptionCode;
968  // __except filter expression
969  IdentifierInfo *Ident__exception_info,
970                 *Ident___exception_info,
971                 *Ident_GetExceptionInfo;
972  // __finally
973  IdentifierInfo *Ident__abnormal_termination,
974                 *Ident___abnormal_termination,
975                 *Ident_AbnormalTermination;
976public:
977  void PoisonSEHIdentifiers(bool Poison = true); // Borland
978
979  /// HandleIdentifier - This callback is invoked when the lexer reads an
980  /// identifier and has filled in the tokens IdentifierInfo member.  This
981  /// callback potentially macro expands it or turns it into a named token (like
982  /// 'for').
983  void HandleIdentifier(Token &Identifier);
984
985
986  /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
987  /// the current file.  This either returns the EOF token and returns true, or
988  /// pops a level off the include stack and returns false, at which point the
989  /// client should call lex again.
990  bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
991
992  /// HandleEndOfTokenLexer - This callback is invoked when the current
993  /// TokenLexer hits the end of its token stream.
994  bool HandleEndOfTokenLexer(Token &Result);
995
996  /// HandleDirective - This callback is invoked when the lexer sees a # token
997  /// at the start of a line.  This consumes the directive, modifies the
998  /// lexer/preprocessor state, and advances the lexer(s) so that the next token
999  /// read is the correct one.
1000  void HandleDirective(Token &Result);
1001
1002  /// CheckEndOfDirective - Ensure that the next token is a tok::eod token.  If
1003  /// not, emit a diagnostic and consume up until the eod.  If EnableMacros is
1004  /// true, then we consider macros that expand to zero tokens as being ok.
1005  void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
1006
1007  /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
1008  /// current line until the tok::eod token is found.
1009  void DiscardUntilEndOfDirective();
1010
1011  /// SawDateOrTime - This returns true if the preprocessor has seen a use of
1012  /// __DATE__ or __TIME__ in the file so far.
1013  bool SawDateOrTime() const {
1014    return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
1015  }
1016  unsigned getCounterValue() const { return CounterValue; }
1017  void setCounterValue(unsigned V) { CounterValue = V; }
1018
1019  /// \brief Retrieves the module that we're currently building, if any.
1020  Module *getCurrentModule();
1021
1022  /// AllocateMacroInfo - Allocate a new MacroInfo object with the provide
1023  ///  SourceLocation.
1024  MacroInfo *AllocateMacroInfo(SourceLocation L);
1025
1026  /// CloneMacroInfo - Allocate a new MacroInfo object which is clone of MI.
1027  MacroInfo *CloneMacroInfo(const MacroInfo &MI);
1028
1029  /// GetIncludeFilenameSpelling - Turn the specified lexer token into a fully
1030  /// checked and spelled filename, e.g. as an operand of #include. This returns
1031  /// true if the input filename was in <>'s or false if it were in ""'s.  The
1032  /// caller is expected to provide a buffer that is large enough to hold the
1033  /// spelling of the filename, but is also expected to handle the case when
1034  /// this method decides to use a different buffer.
1035  bool GetIncludeFilenameSpelling(SourceLocation Loc,StringRef &Filename);
1036
1037  /// LookupFile - Given a "foo" or <foo> reference, look up the indicated file,
1038  /// return null on failure.  isAngled indicates whether the file reference is
1039  /// for system #include's or not (i.e. using <> instead of "").
1040  const FileEntry *LookupFile(StringRef Filename,
1041                              bool isAngled, const DirectoryLookup *FromDir,
1042                              const DirectoryLookup *&CurDir,
1043                              SmallVectorImpl<char> *SearchPath,
1044                              SmallVectorImpl<char> *RelativePath,
1045                              Module **SuggestedModule,
1046                              bool SkipCache = false);
1047
1048  /// GetCurLookup - The DirectoryLookup structure used to find the current
1049  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
1050  /// implement #include_next and find directory-specific properties.
1051  const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; }
1052
1053  /// isInPrimaryFile - Return true if we're in the top-level file, not in a
1054  /// #include.
1055  bool isInPrimaryFile() const;
1056
1057  /// ConcatenateIncludeName - Handle cases where the #include name is expanded
1058  /// from a macro as multiple tokens, which need to be glued together.  This
1059  /// occurs for code like:
1060  ///    #define FOO <a/b.h>
1061  ///    #include FOO
1062  /// because in this case, "<a/b.h>" is returned as 7 tokens, not one.
1063  ///
1064  /// This code concatenates and consumes tokens up to the '>' token.  It
1065  /// returns false if the > was found, otherwise it returns true if it finds
1066  /// and consumes the EOD marker.
1067  bool ConcatenateIncludeName(SmallString<128> &FilenameBuffer,
1068                              SourceLocation &End);
1069
1070  /// LexOnOffSwitch - Lex an on-off-switch (C99 6.10.6p2) and verify that it is
1071  /// followed by EOD.  Return true if the token is not a valid on-off-switch.
1072  bool LexOnOffSwitch(tok::OnOffSwitch &OOS);
1073
1074private:
1075
1076  void PushIncludeMacroStack() {
1077    IncludeMacroStack.push_back(IncludeStackInfo(CurLexerKind,
1078                                                 CurLexer.take(),
1079                                                 CurPTHLexer.take(),
1080                                                 CurPPLexer,
1081                                                 CurTokenLexer.take(),
1082                                                 CurDirLookup));
1083    CurPPLexer = 0;
1084  }
1085
1086  void PopIncludeMacroStack() {
1087    CurLexer.reset(IncludeMacroStack.back().TheLexer);
1088    CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer);
1089    CurPPLexer = IncludeMacroStack.back().ThePPLexer;
1090    CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer);
1091    CurDirLookup  = IncludeMacroStack.back().TheDirLookup;
1092    CurLexerKind = IncludeMacroStack.back().CurLexerKind;
1093    IncludeMacroStack.pop_back();
1094  }
1095
1096  /// AllocateMacroInfo - Allocate a new MacroInfo object.
1097  MacroInfo *AllocateMacroInfo();
1098
1099  /// ReleaseMacroInfo - Release the specified MacroInfo.  This memory will
1100  ///  be reused for allocating new MacroInfo objects.
1101  void ReleaseMacroInfo(MacroInfo* MI);
1102
1103  /// ReadMacroName - Lex and validate a macro name, which occurs after a
1104  /// #define or #undef.  This emits a diagnostic, sets the token kind to eod,
1105  /// and discards the rest of the macro line if the macro name is invalid.
1106  void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
1107
1108  /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
1109  /// definition has just been read.  Lex the rest of the arguments and the
1110  /// closing ), updating MI with what we learn.  Return true if an error occurs
1111  /// parsing the arg list.
1112  bool ReadMacroDefinitionArgList(MacroInfo *MI);
1113
1114  /// SkipExcludedConditionalBlock - We just read a #if or related directive and
1115  /// decided that the subsequent tokens are in the #if'd out portion of the
1116  /// file.  Lex the rest of the file, until we see an #endif.  If
1117  /// FoundNonSkipPortion is true, then we have already emitted code for part of
1118  /// this #if directive, so #else/#elif blocks should never be entered. If
1119  /// FoundElse is false, then #else directives are ok, if not, then we have
1120  /// already seen one so a #else directive is a duplicate.  When this returns,
1121  /// the caller can lex the first valid token.
1122  void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
1123                                    bool FoundNonSkipPortion, bool FoundElse,
1124                                    SourceLocation ElseLoc = SourceLocation());
1125
1126  /// PTHSkipExcludedConditionalBlock - A fast PTH version of
1127  ///  SkipExcludedConditionalBlock.
1128  void PTHSkipExcludedConditionalBlock();
1129
1130  /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
1131  /// may occur after a #if or #elif directive and return it as a bool.  If the
1132  /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
1133  bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
1134
1135  /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
1136  /// #pragma GCC poison/system_header/dependency and #pragma once.
1137  void RegisterBuiltinPragmas();
1138
1139  /// RegisterBuiltinMacros - Register builtin macros, such as __LINE__ with the
1140  /// identifier table.
1141  void RegisterBuiltinMacros();
1142
1143  /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
1144  /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
1145  /// the macro should not be expanded return true, otherwise return false.
1146  bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI);
1147
1148  /// \brief Cache macro expanded tokens for TokenLexers.
1149  //
1150  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
1151  /// going to lex in the cache and when it finishes the tokens are removed
1152  /// from the end of the cache.
1153  Token *cacheMacroExpandedTokens(TokenLexer *tokLexer,
1154                                  ArrayRef<Token> tokens);
1155  void removeCachedMacroExpandedTokensOfLastLexer();
1156  friend void TokenLexer::ExpandFunctionArguments();
1157
1158  /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
1159  /// lexed is a '('.  If so, consume the token and return true, if not, this
1160  /// method should have no observable side-effect on the lexed tokens.
1161  bool isNextPPTokenLParen();
1162
1163  /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
1164  /// invoked to read all of the formal arguments specified for the macro
1165  /// invocation.  This returns null on error.
1166  MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
1167                                       SourceLocation &ExpansionEnd);
1168
1169  /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
1170  /// as a builtin macro, handle it and return the next token as 'Tok'.
1171  void ExpandBuiltinMacro(Token &Tok);
1172
1173  /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
1174  /// return the first token after the directive.  The _Pragma token has just
1175  /// been read into 'Tok'.
1176  void Handle_Pragma(Token &Tok);
1177
1178  /// HandleMicrosoft__pragma - Like Handle_Pragma except the pragma text
1179  /// is not enclosed within a string literal.
1180  void HandleMicrosoft__pragma(Token &Tok);
1181
1182  /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
1183  /// start lexing tokens from it instead of the current buffer.
1184  void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
1185
1186  /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
1187  /// start getting tokens from it using the PTH cache.
1188  void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
1189
1190  /// IsFileLexer - Returns true if we are lexing from a file and not a
1191  ///  pragma or a macro.
1192  static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
1193    return L ? !L->isPragmaLexer() : P != 0;
1194  }
1195
1196  static bool IsFileLexer(const IncludeStackInfo& I) {
1197    return IsFileLexer(I.TheLexer, I.ThePPLexer);
1198  }
1199
1200  bool IsFileLexer() const {
1201    return IsFileLexer(CurLexer.get(), CurPPLexer);
1202  }
1203
1204  //===--------------------------------------------------------------------===//
1205  // Caching stuff.
1206  void CachingLex(Token &Result);
1207  bool InCachingLexMode() const {
1208    // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means
1209    // that we are past EOF, not that we are in CachingLex mode.
1210    return CurPPLexer == 0 && CurTokenLexer == 0 && CurPTHLexer == 0 &&
1211           !IncludeMacroStack.empty();
1212  }
1213  void EnterCachingLexMode();
1214  void ExitCachingLexMode() {
1215    if (InCachingLexMode())
1216      RemoveTopOfLexerStack();
1217  }
1218  const Token &PeekAhead(unsigned N);
1219  void AnnotatePreviousCachedTokens(const Token &Tok);
1220
1221  //===--------------------------------------------------------------------===//
1222  /// Handle*Directive - implement the various preprocessor directives.  These
1223  /// should side-effect the current preprocessor object so that the next call
1224  /// to Lex() will return the appropriate token next.
1225  void HandleLineDirective(Token &Tok);
1226  void HandleDigitDirective(Token &Tok);
1227  void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
1228  void HandleIdentSCCSDirective(Token &Tok);
1229  void HandleMacroPublicDirective(Token &Tok);
1230  void HandleMacroPrivateDirective(Token &Tok);
1231
1232  // File inclusion.
1233  void HandleIncludeDirective(SourceLocation HashLoc,
1234                              Token &Tok,
1235                              const DirectoryLookup *LookupFrom = 0,
1236                              bool isImport = false);
1237  void HandleIncludeNextDirective(SourceLocation HashLoc, Token &Tok);
1238  void HandleIncludeMacrosDirective(SourceLocation HashLoc, Token &Tok);
1239  void HandleImportDirective(SourceLocation HashLoc, Token &Tok);
1240
1241  // Macro handling.
1242  void HandleDefineDirective(Token &Tok);
1243  void HandleUndefDirective(Token &Tok);
1244
1245  // Conditional Inclusion.
1246  void HandleIfdefDirective(Token &Tok, bool isIfndef,
1247                            bool ReadAnyTokensBeforeDirective);
1248  void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
1249  void HandleEndifDirective(Token &Tok);
1250  void HandleElseDirective(Token &Tok);
1251  void HandleElifDirective(Token &Tok);
1252
1253  // Pragmas.
1254  void HandlePragmaDirective(unsigned Introducer);
1255public:
1256  void HandlePragmaOnce(Token &OnceTok);
1257  void HandlePragmaMark();
1258  void HandlePragmaPoison(Token &PoisonTok);
1259  void HandlePragmaSystemHeader(Token &SysHeaderTok);
1260  void HandlePragmaDependency(Token &DependencyTok);
1261  void HandlePragmaComment(Token &CommentTok);
1262  void HandlePragmaMessage(Token &MessageTok);
1263  void HandlePragmaPushMacro(Token &Tok);
1264  void HandlePragmaPopMacro(Token &Tok);
1265  IdentifierInfo *ParsePragmaPushOrPopMacro(Token &Tok);
1266
1267  // Return true and store the first token only if any CommentHandler
1268  // has inserted some tokens and getCommentRetentionState() is false.
1269  bool HandleComment(Token &Token, SourceRange Comment);
1270
1271  /// \brief A macro is used, update information about macros that need unused
1272  /// warnings.
1273  void markMacroAsUsed(MacroInfo *MI);
1274};
1275
1276/// \brief Abstract base class that describes a handler that will receive
1277/// source ranges for each of the comments encountered in the source file.
1278class CommentHandler {
1279public:
1280  virtual ~CommentHandler();
1281
1282  // The handler shall return true if it has pushed any tokens
1283  // to be read using e.g. EnterToken or EnterTokenStream.
1284  virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0;
1285};
1286
1287}  // end namespace clang
1288
1289#endif
1290