Preprocessor.h revision 3e25b990f1e2ba2a9a63dde5fd111492a7f6194a
1//===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines the Preprocessor interface.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15#define LLVM_CLANG_LEX_PREPROCESSOR_H
16
17#include "clang/Basic/Builtins.h"
18#include "clang/Basic/Diagnostic.h"
19#include "clang/Basic/IdentifierTable.h"
20#include "clang/Basic/SourceLocation.h"
21#include "clang/Lex/Lexer.h"
22#include "clang/Lex/MacroInfo.h"
23#include "clang/Lex/PPCallbacks.h"
24#include "clang/Lex/PTHLexer.h"
25#include "clang/Lex/PTHManager.h"
26#include "clang/Lex/TokenLexer.h"
27#include "llvm/ADT/ArrayRef.h"
28#include "llvm/ADT/DenseMap.h"
29#include "llvm/ADT/IntrusiveRefCntPtr.h"
30#include "llvm/ADT/OwningPtr.h"
31#include "llvm/ADT/SmallPtrSet.h"
32#include "llvm/ADT/SmallVector.h"
33#include "llvm/Support/Allocator.h"
34#include <vector>
35
36namespace llvm {
37  template<unsigned InternalLen> class SmallString;
38}
39
40namespace clang {
41
42class SourceManager;
43class ExternalPreprocessorSource;
44class FileManager;
45class FileEntry;
46class HeaderSearch;
47class PragmaNamespace;
48class PragmaHandler;
49class CommentHandler;
50class ScratchBuffer;
51class TargetInfo;
52class PPCallbacks;
53class CodeCompletionHandler;
54class DirectoryLookup;
55class PreprocessingRecord;
56class ModuleLoader;
57class PreprocessorOptions;
58
59/// \brief Stores token information for comparing actual tokens with
60/// predefined values.  Only handles simple tokens and identifiers.
61class TokenValue {
62  tok::TokenKind Kind;
63  IdentifierInfo *II;
64
65public:
66  TokenValue(tok::TokenKind Kind) : Kind(Kind), II(0) {
67    assert(Kind != tok::raw_identifier && "Raw identifiers are not supported.");
68    assert(Kind != tok::identifier &&
69           "Identifiers should be created by TokenValue(IdentifierInfo *)");
70    assert(!tok::isLiteral(Kind) && "Literals are not supported.");
71    assert(!tok::isAnnotation(Kind) && "Annotations are not supported.");
72  }
73  TokenValue(IdentifierInfo *II) : Kind(tok::identifier), II(II) {}
74  bool operator==(const Token &Tok) const {
75    return Tok.getKind() == Kind &&
76        (!II || II == Tok.getIdentifierInfo());
77  }
78};
79
80/// Preprocessor - This object engages in a tight little dance with the lexer to
81/// efficiently preprocess tokens.  Lexers know only about tokens within a
82/// single source file, and don't know anything about preprocessor-level issues
83/// like the \#include stack, token expansion, etc.
84///
85class Preprocessor : public RefCountedBase<Preprocessor> {
86  IntrusiveRefCntPtr<PreprocessorOptions> PPOpts;
87  DiagnosticsEngine        *Diags;
88  LangOptions       &LangOpts;
89  const TargetInfo  *Target;
90  FileManager       &FileMgr;
91  SourceManager     &SourceMgr;
92  ScratchBuffer     *ScratchBuf;
93  HeaderSearch      &HeaderInfo;
94  ModuleLoader      &TheModuleLoader;
95
96  /// \brief External source of macros.
97  ExternalPreprocessorSource *ExternalSource;
98
99
100  /// PTH - An optional PTHManager object used for getting tokens from
101  ///  a token cache rather than lexing the original source file.
102  OwningPtr<PTHManager> PTH;
103
104  /// BP - A BumpPtrAllocator object used to quickly allocate and release
105  ///  objects internal to the Preprocessor.
106  llvm::BumpPtrAllocator BP;
107
108  /// Identifiers for builtin macros and other builtins.
109  IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
110  IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
111  IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
112  IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
113  IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
114  IdentifierInfo *Ident__COUNTER__;                // __COUNTER__
115  IdentifierInfo *Ident_Pragma, *Ident__pragma;    // _Pragma, __pragma
116  IdentifierInfo *Ident__VA_ARGS__;                // __VA_ARGS__
117  IdentifierInfo *Ident__has_feature;              // __has_feature
118  IdentifierInfo *Ident__has_extension;            // __has_extension
119  IdentifierInfo *Ident__has_builtin;              // __has_builtin
120  IdentifierInfo *Ident__has_attribute;            // __has_attribute
121  IdentifierInfo *Ident__has_include;              // __has_include
122  IdentifierInfo *Ident__has_include_next;         // __has_include_next
123  IdentifierInfo *Ident__has_warning;              // __has_warning
124  IdentifierInfo *Ident__building_module;          // __building_module
125  IdentifierInfo *Ident__MODULE__;                 // __MODULE__
126
127  SourceLocation DATELoc, TIMELoc;
128  unsigned CounterValue;  // Next __COUNTER__ value.
129
130  enum {
131    /// MaxIncludeStackDepth - Maximum depth of \#includes.
132    MaxAllowedIncludeStackDepth = 200
133  };
134
135  // State that is set before the preprocessor begins.
136  bool KeepComments : 1;
137  bool KeepMacroComments : 1;
138  bool SuppressIncludeNotFoundError : 1;
139
140  // State that changes while the preprocessor runs:
141  bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
142
143  /// Whether the preprocessor owns the header search object.
144  bool OwnsHeaderSearch : 1;
145
146  /// DisableMacroExpansion - True if macro expansion is disabled.
147  bool DisableMacroExpansion : 1;
148
149  /// MacroExpansionInDirectivesOverride - Temporarily disables
150  /// DisableMacroExpansion (i.e. enables expansion) when parsing preprocessor
151  /// directives.
152  bool MacroExpansionInDirectivesOverride : 1;
153
154  class ResetMacroExpansionHelper;
155
156  /// \brief Whether we have already loaded macros from the external source.
157  mutable bool ReadMacrosFromExternalSource : 1;
158
159  /// \brief True if pragmas are enabled.
160  bool PragmasEnabled : 1;
161
162  /// \brief True if the current build action is a preprocessing action.
163  bool PreprocessedOutput : 1;
164
165  /// \brief True if we are currently preprocessing a #if or #elif directive
166  bool ParsingIfOrElifDirective;
167
168  /// \brief True if we are pre-expanding macro arguments.
169  bool InMacroArgPreExpansion;
170
171  /// Identifiers - This is mapping/lookup information for all identifiers in
172  /// the program, including program keywords.
173  mutable IdentifierTable Identifiers;
174
175  /// Selectors - This table contains all the selectors in the program. Unlike
176  /// IdentifierTable above, this table *isn't* populated by the preprocessor.
177  /// It is declared/expanded here because it's role/lifetime is
178  /// conceptually similar the IdentifierTable. In addition, the current control
179  /// flow (in clang::ParseAST()), make it convenient to put here.
180  /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
181  /// the lifetime of the preprocessor.
182  SelectorTable Selectors;
183
184  /// BuiltinInfo - Information about builtins.
185  Builtin::Context BuiltinInfo;
186
187  /// PragmaHandlers - This tracks all of the pragmas that the client registered
188  /// with this preprocessor.
189  PragmaNamespace *PragmaHandlers;
190
191  /// \brief Tracks all of the comment handlers that the client registered
192  /// with this preprocessor.
193  std::vector<CommentHandler *> CommentHandlers;
194
195  /// \brief True if we want to ignore EOF token and continue later on (thus
196  /// avoid tearing the Lexer and etc. down).
197  bool IncrementalProcessing;
198
199  /// \brief The code-completion handler.
200  CodeCompletionHandler *CodeComplete;
201
202  /// \brief The file that we're performing code-completion for, if any.
203  const FileEntry *CodeCompletionFile;
204
205  /// \brief The offset in file for the code-completion point.
206  unsigned CodeCompletionOffset;
207
208  /// \brief The location for the code-completion point. This gets instantiated
209  /// when the CodeCompletionFile gets \#include'ed for preprocessing.
210  SourceLocation CodeCompletionLoc;
211
212  /// \brief The start location for the file of the code-completion point.
213  ///
214  /// This gets instantiated when the CodeCompletionFile gets \#include'ed
215  /// for preprocessing.
216  SourceLocation CodeCompletionFileLoc;
217
218  /// \brief The source location of the 'import' contextual keyword we just
219  /// lexed, if any.
220  SourceLocation ModuleImportLoc;
221
222  /// \brief The module import path that we're currently processing.
223  SmallVector<std::pair<IdentifierInfo *, SourceLocation>, 2> ModuleImportPath;
224
225  /// \brief Whether the module import expectes an identifier next. Otherwise,
226  /// it expects a '.' or ';'.
227  bool ModuleImportExpectsIdentifier;
228
229  /// \brief The source location of the currently-active
230  /// #pragma clang arc_cf_code_audited begin.
231  SourceLocation PragmaARCCFCodeAuditedLoc;
232
233  /// \brief True if we hit the code-completion point.
234  bool CodeCompletionReached;
235
236  /// \brief The number of bytes that we will initially skip when entering the
237  /// main file, which is used when loading a precompiled preamble, along
238  /// with a flag that indicates whether skipping this number of bytes will
239  /// place the lexer at the start of a line.
240  std::pair<unsigned, bool> SkipMainFilePreamble;
241
242  /// CurLexer - This is the current top of the stack that we're lexing from if
243  /// not expanding a macro and we are lexing directly from source code.
244  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
245  OwningPtr<Lexer> CurLexer;
246
247  /// CurPTHLexer - This is the current top of stack that we're lexing from if
248  ///  not expanding from a macro and we are lexing from a PTH cache.
249  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
250  OwningPtr<PTHLexer> CurPTHLexer;
251
252  /// CurPPLexer - This is the current top of the stack what we're lexing from
253  ///  if not expanding a macro.  This is an alias for either CurLexer or
254  ///  CurPTHLexer.
255  PreprocessorLexer *CurPPLexer;
256
257  /// CurLookup - The DirectoryLookup structure used to find the current
258  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
259  /// implement \#include_next and find directory-specific properties.
260  const DirectoryLookup *CurDirLookup;
261
262  /// CurTokenLexer - This is the current macro we are expanding, if we are
263  /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
264  OwningPtr<TokenLexer> CurTokenLexer;
265
266  /// \brief The kind of lexer we're currently working with.
267  enum CurLexerKind {
268    CLK_Lexer,
269    CLK_PTHLexer,
270    CLK_TokenLexer,
271    CLK_CachingLexer,
272    CLK_LexAfterModuleImport
273  } CurLexerKind;
274
275  /// IncludeMacroStack - This keeps track of the stack of files currently
276  /// \#included, and macros currently being expanded from, not counting
277  /// CurLexer/CurTokenLexer.
278  struct IncludeStackInfo {
279    enum CurLexerKind     CurLexerKind;
280    Lexer                 *TheLexer;
281    PTHLexer              *ThePTHLexer;
282    PreprocessorLexer     *ThePPLexer;
283    TokenLexer            *TheTokenLexer;
284    const DirectoryLookup *TheDirLookup;
285
286    IncludeStackInfo(enum CurLexerKind K, Lexer *L, PTHLexer* P,
287                     PreprocessorLexer* PPL,
288                     TokenLexer* TL, const DirectoryLookup *D)
289      : CurLexerKind(K), TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL),
290        TheTokenLexer(TL), TheDirLookup(D) {}
291  };
292  std::vector<IncludeStackInfo> IncludeMacroStack;
293
294  /// Callbacks - These are actions invoked when some preprocessor activity is
295  /// encountered (e.g. a file is \#included, etc).
296  PPCallbacks *Callbacks;
297
298  struct MacroExpandsInfo {
299    Token Tok;
300    MacroDirective *MD;
301    SourceRange Range;
302    MacroExpandsInfo(Token Tok, MacroDirective *MD, SourceRange Range)
303      : Tok(Tok), MD(MD), Range(Range) { }
304  };
305  SmallVector<MacroExpandsInfo, 2> DelayedMacroExpandsCallbacks;
306
307  /// Macros - For each IdentifierInfo that was associated with a macro, we
308  /// keep a mapping to the history of all macro definitions and #undefs in
309  /// the reverse order (the latest one is in the head of the list).
310  llvm::DenseMap<const IdentifierInfo*, MacroDirective*> Macros;
311  friend class ASTReader;
312
313  /// \brief Macros that we want to warn because they are not used at the end
314  /// of the translation unit; we store just their SourceLocations instead
315  /// something like MacroInfo*. The benefit of this is that when we are
316  /// deserializing from PCH, we don't need to deserialize identifier & macros
317  /// just so that we can report that they are unused, we just warn using
318  /// the SourceLocations of this set (that will be filled by the ASTReader).
319  /// We are using SmallPtrSet instead of a vector for faster removal.
320  typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy;
321  WarnUnusedMacroLocsTy WarnUnusedMacroLocs;
322
323  /// MacroArgCache - This is a "freelist" of MacroArg objects that can be
324  /// reused for quick allocation.
325  MacroArgs *MacroArgCache;
326  friend class MacroArgs;
327
328  /// PragmaPushMacroInfo - For each IdentifierInfo used in a #pragma
329  /// push_macro directive, we keep a MacroInfo stack used to restore
330  /// previous macro value.
331  llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo;
332
333  // Various statistics we track for performance analysis.
334  unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
335  unsigned NumIf, NumElse, NumEndif;
336  unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
337  unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
338  unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
339  unsigned NumSkipped;
340
341  /// Predefines - This string is the predefined macros that preprocessor
342  /// should use from the command line etc.
343  std::string Predefines;
344
345  /// \brief The file ID for the preprocessor predefines.
346  FileID PredefinesFileID;
347
348  /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
349  enum { TokenLexerCacheSize = 8 };
350  unsigned NumCachedTokenLexers;
351  TokenLexer *TokenLexerCache[TokenLexerCacheSize];
352
353  /// \brief Keeps macro expanded tokens for TokenLexers.
354  //
355  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
356  /// going to lex in the cache and when it finishes the tokens are removed
357  /// from the end of the cache.
358  SmallVector<Token, 16> MacroExpandedTokens;
359  std::vector<std::pair<TokenLexer *, size_t> > MacroExpandingLexersStack;
360
361  /// \brief A record of the macro definitions and expansions that
362  /// occurred during preprocessing.
363  ///
364  /// This is an optional side structure that can be enabled with
365  /// \c createPreprocessingRecord() prior to preprocessing.
366  PreprocessingRecord *Record;
367
368private:  // Cached tokens state.
369  typedef SmallVector<Token, 1> CachedTokensTy;
370
371  /// CachedTokens - Cached tokens are stored here when we do backtracking or
372  /// lookahead. They are "lexed" by the CachingLex() method.
373  CachedTokensTy CachedTokens;
374
375  /// CachedLexPos - The position of the cached token that CachingLex() should
376  /// "lex" next. If it points beyond the CachedTokens vector, it means that
377  /// a normal Lex() should be invoked.
378  CachedTokensTy::size_type CachedLexPos;
379
380  /// BacktrackPositions - Stack of backtrack positions, allowing nested
381  /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
382  /// indicate where CachedLexPos should be set when the BackTrack() method is
383  /// invoked (at which point the last position is popped).
384  std::vector<CachedTokensTy::size_type> BacktrackPositions;
385
386  struct MacroInfoChain {
387    MacroInfo MI;
388    MacroInfoChain *Next;
389    MacroInfoChain *Prev;
390  };
391
392  /// MacroInfos are managed as a chain for easy disposal.  This is the head
393  /// of that list.
394  MacroInfoChain *MIChainHead;
395
396  /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
397  /// allocation.
398  MacroInfoChain *MICache;
399
400  struct DeserializedMacroInfoChain {
401    MacroInfo MI;
402    unsigned OwningModuleID; // MUST be immediately after the MacroInfo object
403                     // so it can be accessed by MacroInfo::getOwningModuleID().
404    DeserializedMacroInfoChain *Next;
405  };
406  DeserializedMacroInfoChain *DeserialMIChainHead;
407
408public:
409  Preprocessor(IntrusiveRefCntPtr<PreprocessorOptions> PPOpts,
410               DiagnosticsEngine &diags, LangOptions &opts,
411               const TargetInfo *target,
412               SourceManager &SM, HeaderSearch &Headers,
413               ModuleLoader &TheModuleLoader,
414               IdentifierInfoLookup *IILookup = 0,
415               bool OwnsHeaderSearch = false,
416               bool DelayInitialization = false,
417               bool IncrProcessing = false);
418
419  ~Preprocessor();
420
421  /// \brief Initialize the preprocessor, if the constructor did not already
422  /// perform the initialization.
423  ///
424  /// \param Target Information about the target.
425  void Initialize(const TargetInfo &Target);
426
427  /// \brief Retrieve the preprocessor options used to initialize this
428  /// preprocessor.
429  PreprocessorOptions &getPreprocessorOpts() const { return *PPOpts; }
430
431  DiagnosticsEngine &getDiagnostics() const { return *Diags; }
432  void setDiagnostics(DiagnosticsEngine &D) { Diags = &D; }
433
434  const LangOptions &getLangOpts() const { return LangOpts; }
435  const TargetInfo &getTargetInfo() const { return *Target; }
436  FileManager &getFileManager() const { return FileMgr; }
437  SourceManager &getSourceManager() const { return SourceMgr; }
438  HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
439
440  IdentifierTable &getIdentifierTable() { return Identifiers; }
441  SelectorTable &getSelectorTable() { return Selectors; }
442  Builtin::Context &getBuiltinInfo() { return BuiltinInfo; }
443  llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
444
445  void setPTHManager(PTHManager* pm);
446
447  PTHManager *getPTHManager() { return PTH.get(); }
448
449  void setExternalSource(ExternalPreprocessorSource *Source) {
450    ExternalSource = Source;
451  }
452
453  ExternalPreprocessorSource *getExternalSource() const {
454    return ExternalSource;
455  }
456
457  /// \brief Retrieve the module loader associated with this preprocessor.
458  ModuleLoader &getModuleLoader() const { return TheModuleLoader; }
459
460  /// \brief True if we are currently preprocessing a #if or #elif directive
461  bool isParsingIfOrElifDirective() const {
462    return ParsingIfOrElifDirective;
463  }
464
465  /// SetCommentRetentionState - Control whether or not the preprocessor retains
466  /// comments in output.
467  void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
468    this->KeepComments = KeepComments | KeepMacroComments;
469    this->KeepMacroComments = KeepMacroComments;
470  }
471
472  bool getCommentRetentionState() const { return KeepComments; }
473
474  void setPragmasEnabled(bool Enabled) { PragmasEnabled = Enabled; }
475  bool getPragmasEnabled() const { return PragmasEnabled; }
476
477  void SetSuppressIncludeNotFoundError(bool Suppress) {
478    SuppressIncludeNotFoundError = Suppress;
479  }
480
481  bool GetSuppressIncludeNotFoundError() {
482    return SuppressIncludeNotFoundError;
483  }
484
485  /// Sets whether the preprocessor is responsible for producing output or if
486  /// it is producing tokens to be consumed by Parse and Sema.
487  void setPreprocessedOutput(bool IsPreprocessedOutput) {
488    PreprocessedOutput = IsPreprocessedOutput;
489  }
490
491  /// Returns true if the preprocessor is responsible for generating output,
492  /// false if it is producing tokens to be consumed by Parse and Sema.
493  bool isPreprocessedOutput() const { return PreprocessedOutput; }
494
495  /// isCurrentLexer - Return true if we are lexing directly from the specified
496  /// lexer.
497  bool isCurrentLexer(const PreprocessorLexer *L) const {
498    return CurPPLexer == L;
499  }
500
501  /// getCurrentLexer - Return the current lexer being lexed from.  Note
502  /// that this ignores any potentially active macro expansions and _Pragma
503  /// expansions going on at the time.
504  PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; }
505
506  /// getCurrentFileLexer - Return the current file lexer being lexed from.
507  /// Note that this ignores any potentially active macro expansions and _Pragma
508  /// expansions going on at the time.
509  PreprocessorLexer *getCurrentFileLexer() const;
510
511  /// \brief Returns the file ID for the preprocessor predefines.
512  FileID getPredefinesFileID() const { return PredefinesFileID; }
513
514  /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks.
515  /// Note that this class takes ownership of any PPCallbacks object given to
516  /// it.
517  PPCallbacks *getPPCallbacks() const { return Callbacks; }
518  void addPPCallbacks(PPCallbacks *C) {
519    if (Callbacks)
520      C = new PPChainedCallbacks(C, Callbacks);
521    Callbacks = C;
522  }
523
524  /// \brief Given an identifier, return its latest MacroDirective if it is
525  // \#defined or null if it isn't \#define'd.
526  MacroDirective *getMacroDirective(IdentifierInfo *II) const {
527    if (!II->hasMacroDefinition())
528      return 0;
529
530    MacroDirective *MD = getMacroDirectiveHistory(II);
531    assert(MD->isDefined() && "Macro is undefined!");
532    return MD;
533  }
534
535  const MacroInfo *getMacroInfo(IdentifierInfo *II) const {
536    return const_cast<Preprocessor*>(this)->getMacroInfo(II);
537  }
538
539  MacroInfo *getMacroInfo(IdentifierInfo *II) {
540    if (MacroDirective *MD = getMacroDirective(II))
541      return MD->getMacroInfo();
542    return 0;
543  }
544
545  /// \brief Given an identifier, return the (probably #undef'd) MacroInfo
546  /// representing the most recent macro definition. One can iterate over all
547  /// previous macro definitions from it. This method should only be called for
548  /// identifiers that hadMacroDefinition().
549  MacroDirective *getMacroDirectiveHistory(const IdentifierInfo *II) const;
550
551  /// \brief Add a directive to the macro directive history for this identifier.
552  void appendMacroDirective(IdentifierInfo *II, MacroDirective *MD);
553  DefMacroDirective *appendDefMacroDirective(IdentifierInfo *II, MacroInfo *MI,
554                                             SourceLocation Loc,
555                                             bool isImported) {
556    DefMacroDirective *MD = AllocateDefMacroDirective(MI, Loc, isImported);
557    appendMacroDirective(II, MD);
558    return MD;
559  }
560  DefMacroDirective *appendDefMacroDirective(IdentifierInfo *II, MacroInfo *MI){
561    return appendDefMacroDirective(II, MI, MI->getDefinitionLoc(), false);
562  }
563  /// \brief Set a MacroDirective that was loaded from a PCH file.
564  void setLoadedMacroDirective(IdentifierInfo *II, MacroDirective *MD);
565
566  /// macro_iterator/macro_begin/macro_end - This allows you to walk the macro
567  /// history table. Currently defined macros have
568  /// IdentifierInfo::hasMacroDefinition() set and an empty
569  /// MacroInfo::getUndefLoc() at the head of the list.
570  typedef llvm::DenseMap<const IdentifierInfo *,
571                         MacroDirective*>::const_iterator macro_iterator;
572  macro_iterator macro_begin(bool IncludeExternalMacros = true) const;
573  macro_iterator macro_end(bool IncludeExternalMacros = true) const;
574
575  /// \brief Return the name of the macro defined before \p Loc that has
576  /// spelling \p Tokens.  If there are multiple macros with same spelling,
577  /// return the last one defined.
578  StringRef getLastMacroWithSpelling(SourceLocation Loc,
579                                     ArrayRef<TokenValue> Tokens) const;
580
581  const std::string &getPredefines() const { return Predefines; }
582  /// setPredefines - Set the predefines for this Preprocessor.  These
583  /// predefines are automatically injected when parsing the main file.
584  void setPredefines(const char *P) { Predefines = P; }
585  void setPredefines(const std::string &P) { Predefines = P; }
586
587  /// Return information about the specified preprocessor
588  /// identifier token.
589  IdentifierInfo *getIdentifierInfo(StringRef Name) const {
590    return &Identifiers.get(Name);
591  }
592
593  /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
594  /// If 'Namespace' is non-null, then it is a token required to exist on the
595  /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
596  void AddPragmaHandler(StringRef Namespace, PragmaHandler *Handler);
597  void AddPragmaHandler(PragmaHandler *Handler) {
598    AddPragmaHandler(StringRef(), Handler);
599  }
600
601  /// RemovePragmaHandler - Remove the specific pragma handler from
602  /// the preprocessor. If \p Namespace is non-null, then it should
603  /// be the namespace that \p Handler was added to. It is an error
604  /// to remove a handler that has not been registered.
605  void RemovePragmaHandler(StringRef Namespace, PragmaHandler *Handler);
606  void RemovePragmaHandler(PragmaHandler *Handler) {
607    RemovePragmaHandler(StringRef(), Handler);
608  }
609
610  /// \brief Add the specified comment handler to the preprocessor.
611  void addCommentHandler(CommentHandler *Handler);
612
613  /// \brief Remove the specified comment handler.
614  ///
615  /// It is an error to remove a handler that has not been registered.
616  void removeCommentHandler(CommentHandler *Handler);
617
618  /// \brief Set the code completion handler to the given object.
619  void setCodeCompletionHandler(CodeCompletionHandler &Handler) {
620    CodeComplete = &Handler;
621  }
622
623  /// \brief Retrieve the current code-completion handler.
624  CodeCompletionHandler *getCodeCompletionHandler() const {
625    return CodeComplete;
626  }
627
628  /// \brief Clear out the code completion handler.
629  void clearCodeCompletionHandler() {
630    CodeComplete = 0;
631  }
632
633  /// \brief Hook used by the lexer to invoke the "natural language" code
634  /// completion point.
635  void CodeCompleteNaturalLanguage();
636
637  /// \brief Retrieve the preprocessing record, or NULL if there is no
638  /// preprocessing record.
639  PreprocessingRecord *getPreprocessingRecord() const { return Record; }
640
641  /// \brief Create a new preprocessing record, which will keep track of
642  /// all macro expansions, macro definitions, etc.
643  void createPreprocessingRecord();
644
645  /// EnterMainSourceFile - Enter the specified FileID as the main source file,
646  /// which implicitly adds the builtin defines etc.
647  void EnterMainSourceFile();
648
649  /// EndSourceFile - Inform the preprocessor callbacks that processing is
650  /// complete.
651  void EndSourceFile();
652
653  /// EnterSourceFile - Add a source file to the top of the include stack and
654  /// start lexing tokens from it instead of the current buffer.  Emit an error
655  /// and don't enter the file on error.
656  void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir,
657                       SourceLocation Loc);
658
659  /// EnterMacro - Add a Macro to the top of the include stack and start lexing
660  /// tokens from it instead of the current buffer.  Args specifies the
661  /// tokens input to a function-like macro.
662  ///
663  /// ILEnd specifies the location of the ')' for a function-like macro or the
664  /// identifier for an object-like macro.
665  void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroInfo *Macro,
666                  MacroArgs *Args);
667
668  /// EnterTokenStream - Add a "macro" context to the top of the include stack,
669  /// which will cause the lexer to start returning the specified tokens.
670  ///
671  /// If DisableMacroExpansion is true, tokens lexed from the token stream will
672  /// not be subject to further macro expansion.  Otherwise, these tokens will
673  /// be re-macro-expanded when/if expansion is enabled.
674  ///
675  /// If OwnsTokens is false, this method assumes that the specified stream of
676  /// tokens has a permanent owner somewhere, so they do not need to be copied.
677  /// If it is true, it assumes the array of tokens is allocated with new[] and
678  /// must be freed.
679  ///
680  void EnterTokenStream(const Token *Toks, unsigned NumToks,
681                        bool DisableMacroExpansion, bool OwnsTokens);
682
683  /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
684  /// lexer stack.  This should only be used in situations where the current
685  /// state of the top-of-stack lexer is known.
686  void RemoveTopOfLexerStack();
687
688  /// EnableBacktrackAtThisPos - From the point that this method is called, and
689  /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
690  /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
691  /// make the Preprocessor re-lex the same tokens.
692  ///
693  /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
694  /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
695  /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
696  ///
697  /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
698  /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
699  /// tokens will continue indefinitely.
700  ///
701  void EnableBacktrackAtThisPos();
702
703  /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
704  void CommitBacktrackedTokens();
705
706  /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
707  /// EnableBacktrackAtThisPos() was previously called.
708  void Backtrack();
709
710  /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
711  /// caching of tokens is on.
712  bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
713
714  /// Lex - To lex a token from the preprocessor, just pull a token from the
715  /// current lexer or macro object.
716  void Lex(Token &Result) {
717    switch (CurLexerKind) {
718    case CLK_Lexer: CurLexer->Lex(Result); break;
719    case CLK_PTHLexer: CurPTHLexer->Lex(Result); break;
720    case CLK_TokenLexer: CurTokenLexer->Lex(Result); break;
721    case CLK_CachingLexer: CachingLex(Result); break;
722    case CLK_LexAfterModuleImport: LexAfterModuleImport(Result); break;
723    }
724  }
725
726  void LexAfterModuleImport(Token &Result);
727
728  /// \brief Lex a string literal, which may be the concatenation of multiple
729  /// string literals and may even come from macro expansion.
730  /// \returns true on success, false if a error diagnostic has been generated.
731  bool LexStringLiteral(Token &Result, std::string &String,
732                        const char *DiagnosticTag, bool AllowMacroExpansion) {
733    if (AllowMacroExpansion)
734      Lex(Result);
735    else
736      LexUnexpandedToken(Result);
737    return FinishLexStringLiteral(Result, String, DiagnosticTag,
738                                  AllowMacroExpansion);
739  }
740
741  /// \brief Complete the lexing of a string literal where the first token has
742  /// already been lexed (see LexStringLiteral).
743  bool FinishLexStringLiteral(Token &Result, std::string &String,
744                              const char *DiagnosticTag,
745                              bool AllowMacroExpansion);
746
747  /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
748  /// something not a comment.  This is useful in -E -C mode where comments
749  /// would foul up preprocessor directive handling.
750  void LexNonComment(Token &Result) {
751    do
752      Lex(Result);
753    while (Result.getKind() == tok::comment);
754  }
755
756  /// LexUnexpandedToken - This is just like Lex, but this disables macro
757  /// expansion of identifier tokens.
758  void LexUnexpandedToken(Token &Result) {
759    // Disable macro expansion.
760    bool OldVal = DisableMacroExpansion;
761    DisableMacroExpansion = true;
762    // Lex the token.
763    Lex(Result);
764
765    // Reenable it.
766    DisableMacroExpansion = OldVal;
767  }
768
769  /// LexUnexpandedNonComment - Like LexNonComment, but this disables macro
770  /// expansion of identifier tokens.
771  void LexUnexpandedNonComment(Token &Result) {
772    do
773      LexUnexpandedToken(Result);
774    while (Result.getKind() == tok::comment);
775  }
776
777  /// Disables macro expansion everywhere except for preprocessor directives.
778  void SetMacroExpansionOnlyInDirectives() {
779    DisableMacroExpansion = true;
780    MacroExpansionInDirectivesOverride = true;
781  }
782
783  /// LookAhead - This peeks ahead N tokens and returns that token without
784  /// consuming any tokens.  LookAhead(0) returns the next token that would be
785  /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
786  /// returns normal tokens after phase 5.  As such, it is equivalent to using
787  /// 'Lex', not 'LexUnexpandedToken'.
788  const Token &LookAhead(unsigned N) {
789    if (CachedLexPos + N < CachedTokens.size())
790      return CachedTokens[CachedLexPos+N];
791    else
792      return PeekAhead(N+1);
793  }
794
795  /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
796  /// this allows to revert a specific number of tokens.
797  /// Note that the number of tokens being reverted should be up to the last
798  /// backtrack position, not more.
799  void RevertCachedTokens(unsigned N) {
800    assert(isBacktrackEnabled() &&
801           "Should only be called when tokens are cached for backtracking");
802    assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
803         && "Should revert tokens up to the last backtrack position, not more");
804    assert(signed(CachedLexPos) - signed(N) >= 0 &&
805           "Corrupted backtrack positions ?");
806    CachedLexPos -= N;
807  }
808
809  /// EnterToken - Enters a token in the token stream to be lexed next. If
810  /// BackTrack() is called afterwards, the token will remain at the insertion
811  /// point.
812  void EnterToken(const Token &Tok) {
813    EnterCachingLexMode();
814    CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
815  }
816
817  /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
818  /// tokens (because backtrack is enabled) it should replace the most recent
819  /// cached tokens with the given annotation token. This function has no effect
820  /// if backtracking is not enabled.
821  ///
822  /// Note that the use of this function is just for optimization; so that the
823  /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
824  /// invoked.
825  void AnnotateCachedTokens(const Token &Tok) {
826    assert(Tok.isAnnotation() && "Expected annotation token");
827    if (CachedLexPos != 0 && isBacktrackEnabled())
828      AnnotatePreviousCachedTokens(Tok);
829  }
830
831  /// \brief Replace the last token with an annotation token.
832  ///
833  /// Like AnnotateCachedTokens(), this routine replaces an
834  /// already-parsed (and resolved) token with an annotation
835  /// token. However, this routine only replaces the last token with
836  /// the annotation token; it does not affect any other cached
837  /// tokens. This function has no effect if backtracking is not
838  /// enabled.
839  void ReplaceLastTokenWithAnnotation(const Token &Tok) {
840    assert(Tok.isAnnotation() && "Expected annotation token");
841    if (CachedLexPos != 0 && isBacktrackEnabled())
842      CachedTokens[CachedLexPos-1] = Tok;
843  }
844
845  /// TypoCorrectToken - Update the current token to represent the provided
846  /// identifier, in order to cache an action performed by typo correction.
847  void TypoCorrectToken(const Token &Tok) {
848    assert(Tok.getIdentifierInfo() && "Expected identifier token");
849    if (CachedLexPos != 0 && isBacktrackEnabled())
850      CachedTokens[CachedLexPos-1] = Tok;
851  }
852
853  /// \brief Recompute the current lexer kind based on the CurLexer/CurPTHLexer/
854  /// CurTokenLexer pointers.
855  void recomputeCurLexerKind();
856
857  /// \brief Returns true if incremental processing is enabled
858  bool isIncrementalProcessingEnabled() const { return IncrementalProcessing; }
859
860  /// \brief Enables the incremental processing
861  void enableIncrementalProcessing(bool value = true) {
862    IncrementalProcessing = value;
863  }
864
865  /// \brief Specify the point at which code-completion will be performed.
866  ///
867  /// \param File the file in which code completion should occur. If
868  /// this file is included multiple times, code-completion will
869  /// perform completion the first time it is included. If NULL, this
870  /// function clears out the code-completion point.
871  ///
872  /// \param Line the line at which code completion should occur
873  /// (1-based).
874  ///
875  /// \param Column the column at which code completion should occur
876  /// (1-based).
877  ///
878  /// \returns true if an error occurred, false otherwise.
879  bool SetCodeCompletionPoint(const FileEntry *File,
880                              unsigned Line, unsigned Column);
881
882  /// \brief Determine if we are performing code completion.
883  bool isCodeCompletionEnabled() const { return CodeCompletionFile != 0; }
884
885  /// \brief Returns the location of the code-completion point.
886  /// Returns an invalid location if code-completion is not enabled or the file
887  /// containing the code-completion point has not been lexed yet.
888  SourceLocation getCodeCompletionLoc() const { return CodeCompletionLoc; }
889
890  /// \brief Returns the start location of the file of code-completion point.
891  /// Returns an invalid location if code-completion is not enabled or the file
892  /// containing the code-completion point has not been lexed yet.
893  SourceLocation getCodeCompletionFileLoc() const {
894    return CodeCompletionFileLoc;
895  }
896
897  /// \brief Returns true if code-completion is enabled and we have hit the
898  /// code-completion point.
899  bool isCodeCompletionReached() const { return CodeCompletionReached; }
900
901  /// \brief Note that we hit the code-completion point.
902  void setCodeCompletionReached() {
903    assert(isCodeCompletionEnabled() && "Code-completion not enabled!");
904    CodeCompletionReached = true;
905    // Silence any diagnostics that occur after we hit the code-completion.
906    getDiagnostics().setSuppressAllDiagnostics(true);
907  }
908
909  /// \brief The location of the currently-active \#pragma clang
910  /// arc_cf_code_audited begin.  Returns an invalid location if there
911  /// is no such pragma active.
912  SourceLocation getPragmaARCCFCodeAuditedLoc() const {
913    return PragmaARCCFCodeAuditedLoc;
914  }
915
916  /// \brief Set the location of the currently-active \#pragma clang
917  /// arc_cf_code_audited begin.  An invalid location ends the pragma.
918  void setPragmaARCCFCodeAuditedLoc(SourceLocation Loc) {
919    PragmaARCCFCodeAuditedLoc = Loc;
920  }
921
922  /// \brief Instruct the preprocessor to skip part of the main source file.
923  ///
924  /// \param Bytes The number of bytes in the preamble to skip.
925  ///
926  /// \param StartOfLine Whether skipping these bytes puts the lexer at the
927  /// start of a line.
928  void setSkipMainFilePreamble(unsigned Bytes, bool StartOfLine) {
929    SkipMainFilePreamble.first = Bytes;
930    SkipMainFilePreamble.second = StartOfLine;
931  }
932
933  /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
934  /// the specified Token's location, translating the token's start
935  /// position in the current buffer into a SourcePosition object for rendering.
936  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) const {
937    return Diags->Report(Loc, DiagID);
938  }
939
940  DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) const {
941    return Diags->Report(Tok.getLocation(), DiagID);
942  }
943
944  /// getSpelling() - Return the 'spelling' of the token at the given
945  /// location; does not go up to the spelling location or down to the
946  /// expansion location.
947  ///
948  /// \param buffer A buffer which will be used only if the token requires
949  ///   "cleaning", e.g. if it contains trigraphs or escaped newlines
950  /// \param invalid If non-null, will be set \c true if an error occurs.
951  StringRef getSpelling(SourceLocation loc,
952                        SmallVectorImpl<char> &buffer,
953                        bool *invalid = 0) const {
954    return Lexer::getSpelling(loc, buffer, SourceMgr, LangOpts, invalid);
955  }
956
957  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
958  /// token is the characters used to represent the token in the source file
959  /// after trigraph expansion and escaped-newline folding.  In particular, this
960  /// wants to get the true, uncanonicalized, spelling of things like digraphs
961  /// UCNs, etc.
962  ///
963  /// \param Invalid If non-null, will be set \c true if an error occurs.
964  std::string getSpelling(const Token &Tok, bool *Invalid = 0) const {
965    return Lexer::getSpelling(Tok, SourceMgr, LangOpts, Invalid);
966  }
967
968  /// getSpelling - This method is used to get the spelling of a token into a
969  /// preallocated buffer, instead of as an std::string.  The caller is required
970  /// to allocate enough space for the token, which is guaranteed to be at least
971  /// Tok.getLength() bytes long.  The length of the actual result is returned.
972  ///
973  /// Note that this method may do two possible things: it may either fill in
974  /// the buffer specified with characters, or it may *change the input pointer*
975  /// to point to a constant buffer with the data already in it (avoiding a
976  /// copy).  The caller is not allowed to modify the returned buffer pointer
977  /// if an internal buffer is returned.
978  unsigned getSpelling(const Token &Tok, const char *&Buffer,
979                       bool *Invalid = 0) const {
980    return Lexer::getSpelling(Tok, Buffer, SourceMgr, LangOpts, Invalid);
981  }
982
983  /// getSpelling - This method is used to get the spelling of a token into a
984  /// SmallVector. Note that the returned StringRef may not point to the
985  /// supplied buffer if a copy can be avoided.
986  StringRef getSpelling(const Token &Tok,
987                        SmallVectorImpl<char> &Buffer,
988                        bool *Invalid = 0) const;
989
990  /// \brief Relex the token at the specified location.
991  /// \returns true if there was a failure, false on success.
992  bool getRawToken(SourceLocation Loc, Token &Result) {
993    return Lexer::getRawToken(Loc, Result, SourceMgr, LangOpts);
994  }
995
996  /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
997  /// with length 1, return the character.
998  char getSpellingOfSingleCharacterNumericConstant(const Token &Tok,
999                                                   bool *Invalid = 0) const {
1000    assert(Tok.is(tok::numeric_constant) &&
1001           Tok.getLength() == 1 && "Called on unsupported token");
1002    assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
1003
1004    // If the token is carrying a literal data pointer, just use it.
1005    if (const char *D = Tok.getLiteralData())
1006      return *D;
1007
1008    // Otherwise, fall back on getCharacterData, which is slower, but always
1009    // works.
1010    return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid);
1011  }
1012
1013  /// \brief Retrieve the name of the immediate macro expansion.
1014  ///
1015  /// This routine starts from a source location, and finds the name of the macro
1016  /// responsible for its immediate expansion. It looks through any intervening
1017  /// macro argument expansions to compute this. It returns a StringRef which
1018  /// refers to the SourceManager-owned buffer of the source where that macro
1019  /// name is spelled. Thus, the result shouldn't out-live the SourceManager.
1020  StringRef getImmediateMacroName(SourceLocation Loc) {
1021    return Lexer::getImmediateMacroName(Loc, SourceMgr, getLangOpts());
1022  }
1023
1024  /// CreateString - Plop the specified string into a scratch buffer and set the
1025  /// specified token's location and length to it.  If specified, the source
1026  /// location provides a location of the expansion point of the token.
1027  void CreateString(StringRef Str, Token &Tok,
1028                    SourceLocation ExpansionLocStart = SourceLocation(),
1029                    SourceLocation ExpansionLocEnd = SourceLocation());
1030
1031  /// \brief Computes the source location just past the end of the
1032  /// token at this source location.
1033  ///
1034  /// This routine can be used to produce a source location that
1035  /// points just past the end of the token referenced by \p Loc, and
1036  /// is generally used when a diagnostic needs to point just after a
1037  /// token where it expected something different that it received. If
1038  /// the returned source location would not be meaningful (e.g., if
1039  /// it points into a macro), this routine returns an invalid
1040  /// source location.
1041  ///
1042  /// \param Offset an offset from the end of the token, where the source
1043  /// location should refer to. The default offset (0) produces a source
1044  /// location pointing just past the end of the token; an offset of 1 produces
1045  /// a source location pointing to the last character in the token, etc.
1046  SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0) {
1047    return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, LangOpts);
1048  }
1049
1050  /// \brief Returns true if the given MacroID location points at the first
1051  /// token of the macro expansion.
1052  ///
1053  /// \param MacroBegin If non-null and function returns true, it is set to
1054  /// begin location of the macro.
1055  bool isAtStartOfMacroExpansion(SourceLocation loc,
1056                                 SourceLocation *MacroBegin = 0) const {
1057    return Lexer::isAtStartOfMacroExpansion(loc, SourceMgr, LangOpts,
1058                                            MacroBegin);
1059  }
1060
1061  /// \brief Returns true if the given MacroID location points at the last
1062  /// token of the macro expansion.
1063  ///
1064  /// \param MacroEnd If non-null and function returns true, it is set to
1065  /// end location of the macro.
1066  bool isAtEndOfMacroExpansion(SourceLocation loc,
1067                               SourceLocation *MacroEnd = 0) const {
1068    return Lexer::isAtEndOfMacroExpansion(loc, SourceMgr, LangOpts, MacroEnd);
1069  }
1070
1071  /// DumpToken - Print the token to stderr, used for debugging.
1072  ///
1073  void DumpToken(const Token &Tok, bool DumpFlags = false) const;
1074  void DumpLocation(SourceLocation Loc) const;
1075  void DumpMacro(const MacroInfo &MI) const;
1076
1077  /// AdvanceToTokenCharacter - Given a location that specifies the start of a
1078  /// token, return a new location that specifies a character within the token.
1079  SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,
1080                                         unsigned Char) const {
1081    return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, LangOpts);
1082  }
1083
1084  /// IncrementPasteCounter - Increment the counters for the number of token
1085  /// paste operations performed.  If fast was specified, this is a 'fast paste'
1086  /// case we handled.
1087  ///
1088  void IncrementPasteCounter(bool isFast) {
1089    if (isFast)
1090      ++NumFastTokenPaste;
1091    else
1092      ++NumTokenPaste;
1093  }
1094
1095  void PrintStats();
1096
1097  size_t getTotalMemory() const;
1098
1099  /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
1100  /// comment (/##/) in microsoft mode, this method handles updating the current
1101  /// state, returning the token on the next source line.
1102  void HandleMicrosoftCommentPaste(Token &Tok);
1103
1104  //===--------------------------------------------------------------------===//
1105  // Preprocessor callback methods.  These are invoked by a lexer as various
1106  // directives and events are found.
1107
1108  /// LookUpIdentifierInfo - Given a tok::raw_identifier token, look up the
1109  /// identifier information for the token and install it into the token,
1110  /// updating the token kind accordingly.
1111  IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const;
1112
1113private:
1114  llvm::DenseMap<IdentifierInfo*,unsigned> PoisonReasons;
1115
1116public:
1117
1118  // SetPoisonReason - Call this function to indicate the reason for
1119  // poisoning an identifier. If that identifier is accessed while
1120  // poisoned, then this reason will be used instead of the default
1121  // "poisoned" diagnostic.
1122  void SetPoisonReason(IdentifierInfo *II, unsigned DiagID);
1123
1124  // HandlePoisonedIdentifier - Display reason for poisoned
1125  // identifier.
1126  void HandlePoisonedIdentifier(Token & Tok);
1127
1128  void MaybeHandlePoisonedIdentifier(Token & Identifier) {
1129    if(IdentifierInfo * II = Identifier.getIdentifierInfo()) {
1130      if(II->isPoisoned()) {
1131        HandlePoisonedIdentifier(Identifier);
1132      }
1133    }
1134  }
1135
1136private:
1137  /// Identifiers used for SEH handling in Borland. These are only
1138  /// allowed in particular circumstances
1139  // __except block
1140  IdentifierInfo *Ident__exception_code,
1141                 *Ident___exception_code,
1142                 *Ident_GetExceptionCode;
1143  // __except filter expression
1144  IdentifierInfo *Ident__exception_info,
1145                 *Ident___exception_info,
1146                 *Ident_GetExceptionInfo;
1147  // __finally
1148  IdentifierInfo *Ident__abnormal_termination,
1149                 *Ident___abnormal_termination,
1150                 *Ident_AbnormalTermination;
1151public:
1152  void PoisonSEHIdentifiers(bool Poison = true); // Borland
1153
1154  /// HandleIdentifier - This callback is invoked when the lexer reads an
1155  /// identifier and has filled in the tokens IdentifierInfo member.  This
1156  /// callback potentially macro expands it or turns it into a named token (like
1157  /// 'for').
1158  void HandleIdentifier(Token &Identifier);
1159
1160
1161  /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
1162  /// the current file.  This either returns the EOF token and returns true, or
1163  /// pops a level off the include stack and returns false, at which point the
1164  /// client should call lex again.
1165  bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
1166
1167  /// HandleEndOfTokenLexer - This callback is invoked when the current
1168  /// TokenLexer hits the end of its token stream.
1169  bool HandleEndOfTokenLexer(Token &Result);
1170
1171  /// HandleDirective - This callback is invoked when the lexer sees a # token
1172  /// at the start of a line.  This consumes the directive, modifies the
1173  /// lexer/preprocessor state, and advances the lexer(s) so that the next token
1174  /// read is the correct one.
1175  void HandleDirective(Token &Result);
1176
1177  /// CheckEndOfDirective - Ensure that the next token is a tok::eod token.  If
1178  /// not, emit a diagnostic and consume up until the eod.  If EnableMacros is
1179  /// true, then we consider macros that expand to zero tokens as being ok.
1180  void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
1181
1182  /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
1183  /// current line until the tok::eod token is found.
1184  void DiscardUntilEndOfDirective();
1185
1186  /// SawDateOrTime - This returns true if the preprocessor has seen a use of
1187  /// __DATE__ or __TIME__ in the file so far.
1188  bool SawDateOrTime() const {
1189    return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
1190  }
1191  unsigned getCounterValue() const { return CounterValue; }
1192  void setCounterValue(unsigned V) { CounterValue = V; }
1193
1194  /// \brief Retrieves the module that we're currently building, if any.
1195  Module *getCurrentModule();
1196
1197  /// \brief Allocate a new MacroInfo object with the provided SourceLocation.
1198  MacroInfo *AllocateMacroInfo(SourceLocation L);
1199
1200  /// \brief Allocate a new MacroInfo object loaded from an AST file.
1201  MacroInfo *AllocateDeserializedMacroInfo(SourceLocation L,
1202                                           unsigned SubModuleID);
1203
1204  /// \brief Turn the specified lexer token into a fully checked and spelled
1205  /// filename, e.g. as an operand of \#include.
1206  ///
1207  /// The caller is expected to provide a buffer that is large enough to hold
1208  /// the spelling of the filename, but is also expected to handle the case
1209  /// when this method decides to use a different buffer.
1210  ///
1211  /// \returns true if the input filename was in <>'s or false if it was
1212  /// in ""'s.
1213  bool GetIncludeFilenameSpelling(SourceLocation Loc,StringRef &Filename);
1214
1215  /// \brief Given a "foo" or \<foo> reference, look up the indicated file.
1216  ///
1217  /// Returns null on failure.  \p isAngled indicates whether the file
1218  /// reference is for system \#include's or not (i.e. using <> instead of "").
1219  const FileEntry *LookupFile(StringRef Filename,
1220                              bool isAngled, const DirectoryLookup *FromDir,
1221                              const DirectoryLookup *&CurDir,
1222                              SmallVectorImpl<char> *SearchPath,
1223                              SmallVectorImpl<char> *RelativePath,
1224                              Module **SuggestedModule,
1225                              bool SkipCache = false);
1226
1227  /// GetCurLookup - The DirectoryLookup structure used to find the current
1228  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
1229  /// implement \#include_next and find directory-specific properties.
1230  const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; }
1231
1232  /// \brief Return true if we're in the top-level file, not in a \#include.
1233  bool isInPrimaryFile() const;
1234
1235  /// ConcatenateIncludeName - Handle cases where the \#include name is expanded
1236  /// from a macro as multiple tokens, which need to be glued together.  This
1237  /// occurs for code like:
1238  /// \code
1239  ///    \#define FOO <x/y.h>
1240  ///    \#include FOO
1241  /// \endcode
1242  /// because in this case, "<x/y.h>" is returned as 7 tokens, not one.
1243  ///
1244  /// This code concatenates and consumes tokens up to the '>' token.  It
1245  /// returns false if the > was found, otherwise it returns true if it finds
1246  /// and consumes the EOD marker.
1247  bool ConcatenateIncludeName(SmallString<128> &FilenameBuffer,
1248                              SourceLocation &End);
1249
1250  /// LexOnOffSwitch - Lex an on-off-switch (C99 6.10.6p2) and verify that it is
1251  /// followed by EOD.  Return true if the token is not a valid on-off-switch.
1252  bool LexOnOffSwitch(tok::OnOffSwitch &OOS);
1253
1254private:
1255
1256  void PushIncludeMacroStack() {
1257    IncludeMacroStack.push_back(IncludeStackInfo(CurLexerKind,
1258                                                 CurLexer.take(),
1259                                                 CurPTHLexer.take(),
1260                                                 CurPPLexer,
1261                                                 CurTokenLexer.take(),
1262                                                 CurDirLookup));
1263    CurPPLexer = 0;
1264  }
1265
1266  void PopIncludeMacroStack() {
1267    CurLexer.reset(IncludeMacroStack.back().TheLexer);
1268    CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer);
1269    CurPPLexer = IncludeMacroStack.back().ThePPLexer;
1270    CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer);
1271    CurDirLookup  = IncludeMacroStack.back().TheDirLookup;
1272    CurLexerKind = IncludeMacroStack.back().CurLexerKind;
1273    IncludeMacroStack.pop_back();
1274  }
1275
1276  /// \brief Allocate a new MacroInfo object.
1277  MacroInfo *AllocateMacroInfo();
1278
1279  DefMacroDirective *AllocateDefMacroDirective(MacroInfo *MI,
1280                                               SourceLocation Loc,
1281                                               bool isImported);
1282  UndefMacroDirective *AllocateUndefMacroDirective(SourceLocation UndefLoc);
1283  VisibilityMacroDirective *AllocateVisibilityMacroDirective(SourceLocation Loc,
1284                                                             bool isPublic);
1285
1286  /// \brief Release the specified MacroInfo for re-use.
1287  ///
1288  /// This memory will  be reused for allocating new MacroInfo objects.
1289  void ReleaseMacroInfo(MacroInfo* MI);
1290
1291  /// ReadMacroName - Lex and validate a macro name, which occurs after a
1292  /// \#define or \#undef.  This emits a diagnostic, sets the token kind to eod,
1293  /// and discards the rest of the macro line if the macro name is invalid.
1294  void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
1295
1296  /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
1297  /// definition has just been read.  Lex the rest of the arguments and the
1298  /// closing ), updating MI with what we learn and saving in LastTok the
1299  /// last token read.
1300  /// Return true if an error occurs parsing the arg list.
1301  bool ReadMacroDefinitionArgList(MacroInfo *MI, Token& LastTok);
1302
1303  /// We just read a \#if or related directive and decided that the
1304  /// subsequent tokens are in the \#if'd out portion of the
1305  /// file.  Lex the rest of the file, until we see an \#endif.  If \p
1306  /// FoundNonSkipPortion is true, then we have already emitted code for part of
1307  /// this \#if directive, so \#else/\#elif blocks should never be entered. If
1308  /// \p FoundElse is false, then \#else directives are ok, if not, then we have
1309  /// already seen one so a \#else directive is a duplicate.  When this returns,
1310  /// the caller can lex the first valid token.
1311  void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
1312                                    bool FoundNonSkipPortion, bool FoundElse,
1313                                    SourceLocation ElseLoc = SourceLocation());
1314
1315  /// \brief A fast PTH version of SkipExcludedConditionalBlock.
1316  void PTHSkipExcludedConditionalBlock();
1317
1318  /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
1319  /// may occur after a #if or #elif directive and return it as a bool.  If the
1320  /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
1321  bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
1322
1323  /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
1324  /// \#pragma GCC poison/system_header/dependency and \#pragma once.
1325  void RegisterBuiltinPragmas();
1326
1327  /// \brief Register builtin macros such as __LINE__ with the identifier table.
1328  void RegisterBuiltinMacros();
1329
1330  /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
1331  /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
1332  /// the macro should not be expanded return true, otherwise return false.
1333  bool HandleMacroExpandedIdentifier(Token &Tok, MacroDirective *MD);
1334
1335  /// \brief Cache macro expanded tokens for TokenLexers.
1336  //
1337  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
1338  /// going to lex in the cache and when it finishes the tokens are removed
1339  /// from the end of the cache.
1340  Token *cacheMacroExpandedTokens(TokenLexer *tokLexer,
1341                                  ArrayRef<Token> tokens);
1342  void removeCachedMacroExpandedTokensOfLastLexer();
1343  friend void TokenLexer::ExpandFunctionArguments();
1344
1345  /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
1346  /// lexed is a '('.  If so, consume the token and return true, if not, this
1347  /// method should have no observable side-effect on the lexed tokens.
1348  bool isNextPPTokenLParen();
1349
1350  /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
1351  /// invoked to read all of the formal arguments specified for the macro
1352  /// invocation.  This returns null on error.
1353  MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
1354                                       SourceLocation &ExpansionEnd);
1355
1356  /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
1357  /// as a builtin macro, handle it and return the next token as 'Tok'.
1358  void ExpandBuiltinMacro(Token &Tok);
1359
1360  /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
1361  /// return the first token after the directive.  The _Pragma token has just
1362  /// been read into 'Tok'.
1363  void Handle_Pragma(Token &Tok);
1364
1365  /// HandleMicrosoft__pragma - Like Handle_Pragma except the pragma text
1366  /// is not enclosed within a string literal.
1367  void HandleMicrosoft__pragma(Token &Tok);
1368
1369  /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
1370  /// start lexing tokens from it instead of the current buffer.
1371  void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
1372
1373  /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
1374  /// start getting tokens from it using the PTH cache.
1375  void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
1376
1377  /// \brief Set the file ID for the preprocessor predefines.
1378  void setPredefinesFileID(FileID FID) {
1379    assert(PredefinesFileID.isInvalid() && "PredefinesFileID already set!");
1380    PredefinesFileID = FID;
1381  }
1382
1383  /// IsFileLexer - Returns true if we are lexing from a file and not a
1384  ///  pragma or a macro.
1385  static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
1386    return L ? !L->isPragmaLexer() : P != 0;
1387  }
1388
1389  static bool IsFileLexer(const IncludeStackInfo& I) {
1390    return IsFileLexer(I.TheLexer, I.ThePPLexer);
1391  }
1392
1393  bool IsFileLexer() const {
1394    return IsFileLexer(CurLexer.get(), CurPPLexer);
1395  }
1396
1397  //===--------------------------------------------------------------------===//
1398  // Caching stuff.
1399  void CachingLex(Token &Result);
1400  bool InCachingLexMode() const {
1401    // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means
1402    // that we are past EOF, not that we are in CachingLex mode.
1403    return CurPPLexer == 0 && CurTokenLexer == 0 && CurPTHLexer == 0 &&
1404           !IncludeMacroStack.empty();
1405  }
1406  void EnterCachingLexMode();
1407  void ExitCachingLexMode() {
1408    if (InCachingLexMode())
1409      RemoveTopOfLexerStack();
1410  }
1411  const Token &PeekAhead(unsigned N);
1412  void AnnotatePreviousCachedTokens(const Token &Tok);
1413
1414  //===--------------------------------------------------------------------===//
1415  /// Handle*Directive - implement the various preprocessor directives.  These
1416  /// should side-effect the current preprocessor object so that the next call
1417  /// to Lex() will return the appropriate token next.
1418  void HandleLineDirective(Token &Tok);
1419  void HandleDigitDirective(Token &Tok);
1420  void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
1421  void HandleIdentSCCSDirective(Token &Tok);
1422  void HandleMacroPublicDirective(Token &Tok);
1423  void HandleMacroPrivateDirective(Token &Tok);
1424
1425  // File inclusion.
1426  void HandleIncludeDirective(SourceLocation HashLoc,
1427                              Token &Tok,
1428                              const DirectoryLookup *LookupFrom = 0,
1429                              bool isImport = false);
1430  void HandleIncludeNextDirective(SourceLocation HashLoc, Token &Tok);
1431  void HandleIncludeMacrosDirective(SourceLocation HashLoc, Token &Tok);
1432  void HandleImportDirective(SourceLocation HashLoc, Token &Tok);
1433  void HandleMicrosoftImportDirective(Token &Tok);
1434
1435  // Macro handling.
1436  void HandleDefineDirective(Token &Tok);
1437  void HandleUndefDirective(Token &Tok);
1438
1439  // Conditional Inclusion.
1440  void HandleIfdefDirective(Token &Tok, bool isIfndef,
1441                            bool ReadAnyTokensBeforeDirective);
1442  void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
1443  void HandleEndifDirective(Token &Tok);
1444  void HandleElseDirective(Token &Tok);
1445  void HandleElifDirective(Token &Tok);
1446
1447  // Pragmas.
1448  void HandlePragmaDirective(unsigned Introducer);
1449public:
1450  void HandlePragmaOnce(Token &OnceTok);
1451  void HandlePragmaMark();
1452  void HandlePragmaPoison(Token &PoisonTok);
1453  void HandlePragmaSystemHeader(Token &SysHeaderTok);
1454  void HandlePragmaDependency(Token &DependencyTok);
1455  void HandlePragmaComment(Token &CommentTok);
1456  void HandlePragmaPushMacro(Token &Tok);
1457  void HandlePragmaPopMacro(Token &Tok);
1458  void HandlePragmaIncludeAlias(Token &Tok);
1459  IdentifierInfo *ParsePragmaPushOrPopMacro(Token &Tok);
1460
1461  // Return true and store the first token only if any CommentHandler
1462  // has inserted some tokens and getCommentRetentionState() is false.
1463  bool HandleComment(Token &Token, SourceRange Comment);
1464
1465  /// \brief A macro is used, update information about macros that need unused
1466  /// warnings.
1467  void markMacroAsUsed(MacroInfo *MI);
1468};
1469
1470/// \brief Abstract base class that describes a handler that will receive
1471/// source ranges for each of the comments encountered in the source file.
1472class CommentHandler {
1473public:
1474  virtual ~CommentHandler();
1475
1476  // The handler shall return true if it has pushed any tokens
1477  // to be read using e.g. EnterToken or EnterTokenStream.
1478  virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0;
1479};
1480
1481}  // end namespace clang
1482
1483#endif
1484