Preprocessor.h revision 9484c1dcf084452228aae439edcd00e1b4ce7470
1//===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines the Preprocessor interface.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15#define LLVM_CLANG_LEX_PREPROCESSOR_H
16
17#include "clang/Basic/Builtins.h"
18#include "clang/Basic/Diagnostic.h"
19#include "clang/Basic/IdentifierTable.h"
20#include "clang/Basic/SourceLocation.h"
21#include "clang/Lex/Lexer.h"
22#include "clang/Lex/MacroInfo.h"
23#include "clang/Lex/PPCallbacks.h"
24#include "clang/Lex/PPMutationListener.h"
25#include "clang/Lex/PTHLexer.h"
26#include "clang/Lex/PTHManager.h"
27#include "clang/Lex/TokenLexer.h"
28#include "llvm/ADT/ArrayRef.h"
29#include "llvm/ADT/DenseMap.h"
30#include "llvm/ADT/IntrusiveRefCntPtr.h"
31#include "llvm/ADT/OwningPtr.h"
32#include "llvm/ADT/SmallPtrSet.h"
33#include "llvm/ADT/SmallVector.h"
34#include "llvm/Support/Allocator.h"
35#include <vector>
36
37namespace llvm {
38  template<unsigned InternalLen> class SmallString;
39}
40
41namespace clang {
42
43class SourceManager;
44class ExternalPreprocessorSource;
45class FileManager;
46class FileEntry;
47class HeaderSearch;
48class PragmaNamespace;
49class PragmaHandler;
50class CommentHandler;
51class ScratchBuffer;
52class TargetInfo;
53class PPCallbacks;
54class CodeCompletionHandler;
55class DirectoryLookup;
56class PreprocessingRecord;
57class ModuleLoader;
58class PreprocessorOptions;
59
60/// \brief Stores token information for comparing actual tokens with
61/// predefined values.  Only handles simple tokens and identifiers.
62class TokenValue {
63  tok::TokenKind Kind;
64  IdentifierInfo *II;
65
66public:
67  TokenValue(tok::TokenKind Kind) : Kind(Kind), II(0) {
68    assert(Kind != tok::raw_identifier && "Raw identifiers are not supported.");
69    assert(Kind != tok::identifier &&
70           "Identifiers should be created by TokenValue(IdentifierInfo *)");
71    assert(!tok::isLiteral(Kind) && "Literals are not supported.");
72    assert(!tok::isAnnotation(Kind) && "Annotations are not supported.");
73  }
74  TokenValue(IdentifierInfo *II) : Kind(tok::identifier), II(II) {}
75  bool operator==(const Token &Tok) const {
76    return Tok.getKind() == Kind &&
77        (!II || II == Tok.getIdentifierInfo());
78  }
79};
80
81/// Preprocessor - This object engages in a tight little dance with the lexer to
82/// efficiently preprocess tokens.  Lexers know only about tokens within a
83/// single source file, and don't know anything about preprocessor-level issues
84/// like the \#include stack, token expansion, etc.
85///
86class Preprocessor : public RefCountedBase<Preprocessor> {
87  IntrusiveRefCntPtr<PreprocessorOptions> PPOpts;
88  DiagnosticsEngine        *Diags;
89  LangOptions       &LangOpts;
90  const TargetInfo  *Target;
91  FileManager       &FileMgr;
92  SourceManager     &SourceMgr;
93  ScratchBuffer     *ScratchBuf;
94  HeaderSearch      &HeaderInfo;
95  ModuleLoader      &TheModuleLoader;
96
97  /// \brief External source of macros.
98  ExternalPreprocessorSource *ExternalSource;
99
100
101  /// PTH - An optional PTHManager object used for getting tokens from
102  ///  a token cache rather than lexing the original source file.
103  OwningPtr<PTHManager> PTH;
104
105  /// BP - A BumpPtrAllocator object used to quickly allocate and release
106  ///  objects internal to the Preprocessor.
107  llvm::BumpPtrAllocator BP;
108
109  /// Identifiers for builtin macros and other builtins.
110  IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
111  IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
112  IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
113  IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
114  IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
115  IdentifierInfo *Ident__COUNTER__;                // __COUNTER__
116  IdentifierInfo *Ident_Pragma, *Ident__pragma;    // _Pragma, __pragma
117  IdentifierInfo *Ident__VA_ARGS__;                // __VA_ARGS__
118  IdentifierInfo *Ident__has_feature;              // __has_feature
119  IdentifierInfo *Ident__has_extension;            // __has_extension
120  IdentifierInfo *Ident__has_builtin;              // __has_builtin
121  IdentifierInfo *Ident__has_attribute;            // __has_attribute
122  IdentifierInfo *Ident__has_include;              // __has_include
123  IdentifierInfo *Ident__has_include_next;         // __has_include_next
124  IdentifierInfo *Ident__has_warning;              // __has_warning
125  IdentifierInfo *Ident__building_module;          // __building_module
126  IdentifierInfo *Ident__MODULE__;                 // __MODULE__
127
128  SourceLocation DATELoc, TIMELoc;
129  unsigned CounterValue;  // Next __COUNTER__ value.
130
131  enum {
132    /// MaxIncludeStackDepth - Maximum depth of \#includes.
133    MaxAllowedIncludeStackDepth = 200
134  };
135
136  // State that is set before the preprocessor begins.
137  bool KeepComments : 1;
138  bool KeepMacroComments : 1;
139  bool SuppressIncludeNotFoundError : 1;
140
141  // State that changes while the preprocessor runs:
142  bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
143
144  /// Whether the preprocessor owns the header search object.
145  bool OwnsHeaderSearch : 1;
146
147  /// DisableMacroExpansion - True if macro expansion is disabled.
148  bool DisableMacroExpansion : 1;
149
150  /// MacroExpansionInDirectivesOverride - Temporarily disables
151  /// DisableMacroExpansion (i.e. enables expansion) when parsing preprocessor
152  /// directives.
153  bool MacroExpansionInDirectivesOverride : 1;
154
155  class ResetMacroExpansionHelper;
156
157  /// \brief Whether we have already loaded macros from the external source.
158  mutable bool ReadMacrosFromExternalSource : 1;
159
160  /// \brief True if pragmas are enabled.
161  bool PragmasEnabled : 1;
162
163  /// \brief True if the current build action is a preprocessing action.
164  bool PreprocessedOutput : 1;
165
166  /// \brief True if we are currently preprocessing a #if or #elif directive
167  bool ParsingIfOrElifDirective;
168
169  /// \brief True if we are pre-expanding macro arguments.
170  bool InMacroArgPreExpansion;
171
172  /// Identifiers - This is mapping/lookup information for all identifiers in
173  /// the program, including program keywords.
174  mutable IdentifierTable Identifiers;
175
176  /// Selectors - This table contains all the selectors in the program. Unlike
177  /// IdentifierTable above, this table *isn't* populated by the preprocessor.
178  /// It is declared/expanded here because it's role/lifetime is
179  /// conceptually similar the IdentifierTable. In addition, the current control
180  /// flow (in clang::ParseAST()), make it convenient to put here.
181  /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
182  /// the lifetime of the preprocessor.
183  SelectorTable Selectors;
184
185  /// BuiltinInfo - Information about builtins.
186  Builtin::Context BuiltinInfo;
187
188  /// PragmaHandlers - This tracks all of the pragmas that the client registered
189  /// with this preprocessor.
190  PragmaNamespace *PragmaHandlers;
191
192  /// \brief Tracks all of the comment handlers that the client registered
193  /// with this preprocessor.
194  std::vector<CommentHandler *> CommentHandlers;
195
196  /// \brief True if we want to ignore EOF token and continue later on (thus
197  /// avoid tearing the Lexer and etc. down).
198  bool IncrementalProcessing;
199
200  /// \brief The code-completion handler.
201  CodeCompletionHandler *CodeComplete;
202
203  /// \brief The file that we're performing code-completion for, if any.
204  const FileEntry *CodeCompletionFile;
205
206  /// \brief The offset in file for the code-completion point.
207  unsigned CodeCompletionOffset;
208
209  /// \brief The location for the code-completion point. This gets instantiated
210  /// when the CodeCompletionFile gets \#include'ed for preprocessing.
211  SourceLocation CodeCompletionLoc;
212
213  /// \brief The start location for the file of the code-completion point.
214  ///
215  /// This gets instantiated when the CodeCompletionFile gets \#include'ed
216  /// for preprocessing.
217  SourceLocation CodeCompletionFileLoc;
218
219  /// \brief The source location of the 'import' contextual keyword we just
220  /// lexed, if any.
221  SourceLocation ModuleImportLoc;
222
223  /// \brief The module import path that we're currently processing.
224  SmallVector<std::pair<IdentifierInfo *, SourceLocation>, 2> ModuleImportPath;
225
226  /// \brief Whether the module import expectes an identifier next. Otherwise,
227  /// it expects a '.' or ';'.
228  bool ModuleImportExpectsIdentifier;
229
230  /// \brief The source location of the currently-active
231  /// #pragma clang arc_cf_code_audited begin.
232  SourceLocation PragmaARCCFCodeAuditedLoc;
233
234  /// \brief True if we hit the code-completion point.
235  bool CodeCompletionReached;
236
237  /// \brief The number of bytes that we will initially skip when entering the
238  /// main file, which is used when loading a precompiled preamble, along
239  /// with a flag that indicates whether skipping this number of bytes will
240  /// place the lexer at the start of a line.
241  std::pair<unsigned, bool> SkipMainFilePreamble;
242
243  /// CurLexer - This is the current top of the stack that we're lexing from if
244  /// not expanding a macro and we are lexing directly from source code.
245  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
246  OwningPtr<Lexer> CurLexer;
247
248  /// CurPTHLexer - This is the current top of stack that we're lexing from if
249  ///  not expanding from a macro and we are lexing from a PTH cache.
250  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
251  OwningPtr<PTHLexer> CurPTHLexer;
252
253  /// CurPPLexer - This is the current top of the stack what we're lexing from
254  ///  if not expanding a macro.  This is an alias for either CurLexer or
255  ///  CurPTHLexer.
256  PreprocessorLexer *CurPPLexer;
257
258  /// CurLookup - The DirectoryLookup structure used to find the current
259  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
260  /// implement \#include_next and find directory-specific properties.
261  const DirectoryLookup *CurDirLookup;
262
263  /// CurTokenLexer - This is the current macro we are expanding, if we are
264  /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
265  OwningPtr<TokenLexer> CurTokenLexer;
266
267  /// \brief The kind of lexer we're currently working with.
268  enum CurLexerKind {
269    CLK_Lexer,
270    CLK_PTHLexer,
271    CLK_TokenLexer,
272    CLK_CachingLexer,
273    CLK_LexAfterModuleImport
274  } CurLexerKind;
275
276  /// IncludeMacroStack - This keeps track of the stack of files currently
277  /// \#included, and macros currently being expanded from, not counting
278  /// CurLexer/CurTokenLexer.
279  struct IncludeStackInfo {
280    enum CurLexerKind     CurLexerKind;
281    Lexer                 *TheLexer;
282    PTHLexer              *ThePTHLexer;
283    PreprocessorLexer     *ThePPLexer;
284    TokenLexer            *TheTokenLexer;
285    const DirectoryLookup *TheDirLookup;
286
287    IncludeStackInfo(enum CurLexerKind K, Lexer *L, PTHLexer* P,
288                     PreprocessorLexer* PPL,
289                     TokenLexer* TL, const DirectoryLookup *D)
290      : CurLexerKind(K), TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL),
291        TheTokenLexer(TL), TheDirLookup(D) {}
292  };
293  std::vector<IncludeStackInfo> IncludeMacroStack;
294
295  /// Callbacks - These are actions invoked when some preprocessor activity is
296  /// encountered (e.g. a file is \#included, etc).
297  PPCallbacks *Callbacks;
298
299  /// \brief Listener whose actions are invoked when an entity in the
300  /// preprocessor (e.g., a macro) that was loaded from an AST file is
301  /// later mutated.
302  PPMutationListener *Listener;
303
304  struct MacroExpandsInfo {
305    Token Tok;
306    MacroInfo *MI;
307    SourceRange Range;
308    MacroExpandsInfo(Token Tok, MacroInfo *MI, SourceRange Range)
309      : Tok(Tok), MI(MI), Range(Range) { }
310  };
311  SmallVector<MacroExpandsInfo, 2> DelayedMacroExpandsCallbacks;
312
313  /// Macros - For each IdentifierInfo that was associated with a macro, we
314  /// keep a mapping to the history of all macro definitions and #undefs in
315  /// the reverse order (the latest one is in the head of the list).
316  llvm::DenseMap<const IdentifierInfo*, MacroInfo*> Macros;
317  friend class ASTReader;
318
319  /// \brief Macros that we want to warn because they are not used at the end
320  /// of the translation unit; we store just their SourceLocations instead
321  /// something like MacroInfo*. The benefit of this is that when we are
322  /// deserializing from PCH, we don't need to deserialize identifier & macros
323  /// just so that we can report that they are unused, we just warn using
324  /// the SourceLocations of this set (that will be filled by the ASTReader).
325  /// We are using SmallPtrSet instead of a vector for faster removal.
326  typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy;
327  WarnUnusedMacroLocsTy WarnUnusedMacroLocs;
328
329  /// MacroArgCache - This is a "freelist" of MacroArg objects that can be
330  /// reused for quick allocation.
331  MacroArgs *MacroArgCache;
332  friend class MacroArgs;
333
334  /// PragmaPushMacroInfo - For each IdentifierInfo used in a #pragma
335  /// push_macro directive, we keep a MacroInfo stack used to restore
336  /// previous macro value.
337  llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo;
338
339  // Various statistics we track for performance analysis.
340  unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
341  unsigned NumIf, NumElse, NumEndif;
342  unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
343  unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
344  unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
345  unsigned NumSkipped;
346
347  /// Predefines - This string is the predefined macros that preprocessor
348  /// should use from the command line etc.
349  std::string Predefines;
350
351  /// \brief The file ID for the preprocessor predefines.
352  FileID PredefinesFileID;
353
354  /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
355  enum { TokenLexerCacheSize = 8 };
356  unsigned NumCachedTokenLexers;
357  TokenLexer *TokenLexerCache[TokenLexerCacheSize];
358
359  /// \brief Keeps macro expanded tokens for TokenLexers.
360  //
361  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
362  /// going to lex in the cache and when it finishes the tokens are removed
363  /// from the end of the cache.
364  SmallVector<Token, 16> MacroExpandedTokens;
365  std::vector<std::pair<TokenLexer *, size_t> > MacroExpandingLexersStack;
366
367  /// \brief A record of the macro definitions and expansions that
368  /// occurred during preprocessing.
369  ///
370  /// This is an optional side structure that can be enabled with
371  /// \c createPreprocessingRecord() prior to preprocessing.
372  PreprocessingRecord *Record;
373
374private:  // Cached tokens state.
375  typedef SmallVector<Token, 1> CachedTokensTy;
376
377  /// CachedTokens - Cached tokens are stored here when we do backtracking or
378  /// lookahead. They are "lexed" by the CachingLex() method.
379  CachedTokensTy CachedTokens;
380
381  /// CachedLexPos - The position of the cached token that CachingLex() should
382  /// "lex" next. If it points beyond the CachedTokens vector, it means that
383  /// a normal Lex() should be invoked.
384  CachedTokensTy::size_type CachedLexPos;
385
386  /// BacktrackPositions - Stack of backtrack positions, allowing nested
387  /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
388  /// indicate where CachedLexPos should be set when the BackTrack() method is
389  /// invoked (at which point the last position is popped).
390  std::vector<CachedTokensTy::size_type> BacktrackPositions;
391
392  struct MacroInfoChain {
393    MacroInfo MI;
394    MacroInfoChain *Next;
395    MacroInfoChain *Prev;
396  };
397
398  /// MacroInfos are managed as a chain for easy disposal.  This is the head
399  /// of that list.
400  MacroInfoChain *MIChainHead;
401
402  /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
403  /// allocation.
404  MacroInfoChain *MICache;
405
406public:
407  Preprocessor(IntrusiveRefCntPtr<PreprocessorOptions> PPOpts,
408               DiagnosticsEngine &diags, LangOptions &opts,
409               const TargetInfo *target,
410               SourceManager &SM, HeaderSearch &Headers,
411               ModuleLoader &TheModuleLoader,
412               IdentifierInfoLookup *IILookup = 0,
413               bool OwnsHeaderSearch = false,
414               bool DelayInitialization = false,
415               bool IncrProcessing = false);
416
417  ~Preprocessor();
418
419  /// \brief Initialize the preprocessor, if the constructor did not already
420  /// perform the initialization.
421  ///
422  /// \param Target Information about the target.
423  void Initialize(const TargetInfo &Target);
424
425  /// \brief Retrieve the preprocessor options used to initialize this
426  /// preprocessor.
427  PreprocessorOptions &getPreprocessorOpts() const { return *PPOpts; }
428
429  DiagnosticsEngine &getDiagnostics() const { return *Diags; }
430  void setDiagnostics(DiagnosticsEngine &D) { Diags = &D; }
431
432  const LangOptions &getLangOpts() const { return LangOpts; }
433  const TargetInfo &getTargetInfo() const { return *Target; }
434  FileManager &getFileManager() const { return FileMgr; }
435  SourceManager &getSourceManager() const { return SourceMgr; }
436  HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
437
438  IdentifierTable &getIdentifierTable() { return Identifiers; }
439  SelectorTable &getSelectorTable() { return Selectors; }
440  Builtin::Context &getBuiltinInfo() { return BuiltinInfo; }
441  llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
442
443  void setPTHManager(PTHManager* pm);
444
445  PTHManager *getPTHManager() { return PTH.get(); }
446
447  void setExternalSource(ExternalPreprocessorSource *Source) {
448    ExternalSource = Source;
449  }
450
451  ExternalPreprocessorSource *getExternalSource() const {
452    return ExternalSource;
453  }
454
455  /// \brief Retrieve the module loader associated with this preprocessor.
456  ModuleLoader &getModuleLoader() const { return TheModuleLoader; }
457
458  /// \brief True if we are currently preprocessing a #if or #elif directive
459  bool isParsingIfOrElifDirective() const {
460    return ParsingIfOrElifDirective;
461  }
462
463  /// SetCommentRetentionState - Control whether or not the preprocessor retains
464  /// comments in output.
465  void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
466    this->KeepComments = KeepComments | KeepMacroComments;
467    this->KeepMacroComments = KeepMacroComments;
468  }
469
470  bool getCommentRetentionState() const { return KeepComments; }
471
472  void setPragmasEnabled(bool Enabled) { PragmasEnabled = Enabled; }
473  bool getPragmasEnabled() const { return PragmasEnabled; }
474
475  void SetSuppressIncludeNotFoundError(bool Suppress) {
476    SuppressIncludeNotFoundError = Suppress;
477  }
478
479  bool GetSuppressIncludeNotFoundError() {
480    return SuppressIncludeNotFoundError;
481  }
482
483  /// Sets whether the preprocessor is responsible for producing output or if
484  /// it is producing tokens to be consumed by Parse and Sema.
485  void setPreprocessedOutput(bool IsPreprocessedOutput) {
486    PreprocessedOutput = IsPreprocessedOutput;
487  }
488
489  /// Returns true if the preprocessor is responsible for generating output,
490  /// false if it is producing tokens to be consumed by Parse and Sema.
491  bool isPreprocessedOutput() const { return PreprocessedOutput; }
492
493  /// isCurrentLexer - Return true if we are lexing directly from the specified
494  /// lexer.
495  bool isCurrentLexer(const PreprocessorLexer *L) const {
496    return CurPPLexer == L;
497  }
498
499  /// getCurrentLexer - Return the current lexer being lexed from.  Note
500  /// that this ignores any potentially active macro expansions and _Pragma
501  /// expansions going on at the time.
502  PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; }
503
504  /// getCurrentFileLexer - Return the current file lexer being lexed from.
505  /// Note that this ignores any potentially active macro expansions and _Pragma
506  /// expansions going on at the time.
507  PreprocessorLexer *getCurrentFileLexer() const;
508
509  /// \brief Returns the file ID for the preprocessor predefines.
510  FileID getPredefinesFileID() const { return PredefinesFileID; }
511
512  /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks.
513  /// Note that this class takes ownership of any PPCallbacks object given to
514  /// it.
515  PPCallbacks *getPPCallbacks() const { return Callbacks; }
516  void addPPCallbacks(PPCallbacks *C) {
517    if (Callbacks)
518      C = new PPChainedCallbacks(C, Callbacks);
519    Callbacks = C;
520  }
521
522  /// \brief Attach an preprocessor mutation listener to the preprocessor.
523  ///
524  /// The preprocessor mutation listener provides the ability to track
525  /// modifications to the preprocessor entities committed after they were
526  /// initially created.
527  void setPPMutationListener(PPMutationListener *Listener) {
528    this->Listener = Listener;
529  }
530
531  /// \brief Retrieve a pointer to the preprocessor mutation listener
532  /// associated with this preprocessor, if any.
533  PPMutationListener *getPPMutationListener() const { return Listener; }
534
535  /// \brief Given an identifier, return the MacroInfo it is \#defined to
536  /// or null if it isn't \#define'd.
537  MacroInfo *getMacroInfo(IdentifierInfo *II) const {
538    if (!II->hasMacroDefinition())
539      return 0;
540
541    MacroInfo *MI = getMacroInfoHistory(II);
542    assert(MI->getUndefLoc().isInvalid() && "Macro is undefined!");
543    return MI;
544  }
545
546  /// \brief Given an identifier, return the (probably #undef'd) MacroInfo
547  /// representing the most recent macro definition. One can iterate over all
548  /// previous macro definitions from it. This method should only be called for
549  /// identifiers that hadMacroDefinition().
550  MacroInfo *getMacroInfoHistory(const IdentifierInfo *II) const;
551
552  /// \brief Specify a macro for this identifier.
553  void setMacroInfo(IdentifierInfo *II, MacroInfo *MI);
554  /// \brief Add a MacroInfo that was loaded from an AST file.
555  void addLoadedMacroInfo(IdentifierInfo *II, MacroInfo *MI,
556                          MacroInfo *Hint = 0);
557  /// \brief Make the given MacroInfo, that was loaded from an AST file and
558  /// previously hidden, visible.
559  void makeLoadedMacroInfoVisible(IdentifierInfo *II, MacroInfo *MI);
560  /// \brief Undefine a macro for this identifier.
561  void clearMacroInfo(IdentifierInfo *II);
562
563  /// macro_iterator/macro_begin/macro_end - This allows you to walk the macro
564  /// history table. Currently defined macros have
565  /// IdentifierInfo::hasMacroDefinition() set and an empty
566  /// MacroInfo::getUndefLoc() at the head of the list.
567  typedef llvm::DenseMap<const IdentifierInfo *,
568                         MacroInfo*>::const_iterator macro_iterator;
569  macro_iterator macro_begin(bool IncludeExternalMacros = true) const;
570  macro_iterator macro_end(bool IncludeExternalMacros = true) const;
571
572  /// \brief Return the name of the macro defined before \p Loc that has
573  /// spelling \p Tokens.  If there are multiple macros with same spelling,
574  /// return the last one defined.
575  StringRef getLastMacroWithSpelling(SourceLocation Loc,
576                                     ArrayRef<TokenValue> Tokens) const;
577
578  const std::string &getPredefines() const { return Predefines; }
579  /// setPredefines - Set the predefines for this Preprocessor.  These
580  /// predefines are automatically injected when parsing the main file.
581  void setPredefines(const char *P) { Predefines = P; }
582  void setPredefines(const std::string &P) { Predefines = P; }
583
584  /// Return information about the specified preprocessor
585  /// identifier token.
586  IdentifierInfo *getIdentifierInfo(StringRef Name) const {
587    return &Identifiers.get(Name);
588  }
589
590  /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
591  /// If 'Namespace' is non-null, then it is a token required to exist on the
592  /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
593  void AddPragmaHandler(StringRef Namespace, PragmaHandler *Handler);
594  void AddPragmaHandler(PragmaHandler *Handler) {
595    AddPragmaHandler(StringRef(), Handler);
596  }
597
598  /// RemovePragmaHandler - Remove the specific pragma handler from
599  /// the preprocessor. If \p Namespace is non-null, then it should
600  /// be the namespace that \p Handler was added to. It is an error
601  /// to remove a handler that has not been registered.
602  void RemovePragmaHandler(StringRef Namespace, PragmaHandler *Handler);
603  void RemovePragmaHandler(PragmaHandler *Handler) {
604    RemovePragmaHandler(StringRef(), Handler);
605  }
606
607  /// \brief Add the specified comment handler to the preprocessor.
608  void addCommentHandler(CommentHandler *Handler);
609
610  /// \brief Remove the specified comment handler.
611  ///
612  /// It is an error to remove a handler that has not been registered.
613  void removeCommentHandler(CommentHandler *Handler);
614
615  /// \brief Set the code completion handler to the given object.
616  void setCodeCompletionHandler(CodeCompletionHandler &Handler) {
617    CodeComplete = &Handler;
618  }
619
620  /// \brief Retrieve the current code-completion handler.
621  CodeCompletionHandler *getCodeCompletionHandler() const {
622    return CodeComplete;
623  }
624
625  /// \brief Clear out the code completion handler.
626  void clearCodeCompletionHandler() {
627    CodeComplete = 0;
628  }
629
630  /// \brief Hook used by the lexer to invoke the "natural language" code
631  /// completion point.
632  void CodeCompleteNaturalLanguage();
633
634  /// \brief Retrieve the preprocessing record, or NULL if there is no
635  /// preprocessing record.
636  PreprocessingRecord *getPreprocessingRecord() const { return Record; }
637
638  /// \brief Create a new preprocessing record, which will keep track of
639  /// all macro expansions, macro definitions, etc.
640  void createPreprocessingRecord();
641
642  /// EnterMainSourceFile - Enter the specified FileID as the main source file,
643  /// which implicitly adds the builtin defines etc.
644  void EnterMainSourceFile();
645
646  /// EndSourceFile - Inform the preprocessor callbacks that processing is
647  /// complete.
648  void EndSourceFile();
649
650  /// EnterSourceFile - Add a source file to the top of the include stack and
651  /// start lexing tokens from it instead of the current buffer.  Emit an error
652  /// and don't enter the file on error.
653  void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir,
654                       SourceLocation Loc);
655
656  /// EnterMacro - Add a Macro to the top of the include stack and start lexing
657  /// tokens from it instead of the current buffer.  Args specifies the
658  /// tokens input to a function-like macro.
659  ///
660  /// ILEnd specifies the location of the ')' for a function-like macro or the
661  /// identifier for an object-like macro.
662  void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroInfo *Macro,
663                  MacroArgs *Args);
664
665  /// EnterTokenStream - Add a "macro" context to the top of the include stack,
666  /// which will cause the lexer to start returning the specified tokens.
667  ///
668  /// If DisableMacroExpansion is true, tokens lexed from the token stream will
669  /// not be subject to further macro expansion.  Otherwise, these tokens will
670  /// be re-macro-expanded when/if expansion is enabled.
671  ///
672  /// If OwnsTokens is false, this method assumes that the specified stream of
673  /// tokens has a permanent owner somewhere, so they do not need to be copied.
674  /// If it is true, it assumes the array of tokens is allocated with new[] and
675  /// must be freed.
676  ///
677  void EnterTokenStream(const Token *Toks, unsigned NumToks,
678                        bool DisableMacroExpansion, bool OwnsTokens);
679
680  /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
681  /// lexer stack.  This should only be used in situations where the current
682  /// state of the top-of-stack lexer is known.
683  void RemoveTopOfLexerStack();
684
685  /// EnableBacktrackAtThisPos - From the point that this method is called, and
686  /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
687  /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
688  /// make the Preprocessor re-lex the same tokens.
689  ///
690  /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
691  /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
692  /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
693  ///
694  /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
695  /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
696  /// tokens will continue indefinitely.
697  ///
698  void EnableBacktrackAtThisPos();
699
700  /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
701  void CommitBacktrackedTokens();
702
703  /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
704  /// EnableBacktrackAtThisPos() was previously called.
705  void Backtrack();
706
707  /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
708  /// caching of tokens is on.
709  bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
710
711  /// Lex - To lex a token from the preprocessor, just pull a token from the
712  /// current lexer or macro object.
713  void Lex(Token &Result) {
714    switch (CurLexerKind) {
715    case CLK_Lexer: CurLexer->Lex(Result); break;
716    case CLK_PTHLexer: CurPTHLexer->Lex(Result); break;
717    case CLK_TokenLexer: CurTokenLexer->Lex(Result); break;
718    case CLK_CachingLexer: CachingLex(Result); break;
719    case CLK_LexAfterModuleImport: LexAfterModuleImport(Result); break;
720    }
721  }
722
723  void LexAfterModuleImport(Token &Result);
724
725  /// \brief Lex a string literal, which may be the concatenation of multiple
726  /// string literals and may even come from macro expansion.
727  /// \returns true on success, false if a error diagnostic has been generated.
728  bool LexStringLiteral(Token &Result, std::string &String,
729                        const char *DiagnosticTag, bool AllowMacroExpansion) {
730    if (AllowMacroExpansion)
731      Lex(Result);
732    else
733      LexUnexpandedToken(Result);
734    return FinishLexStringLiteral(Result, String, DiagnosticTag,
735                                  AllowMacroExpansion);
736  }
737
738  /// \brief Complete the lexing of a string literal where the first token has
739  /// already been lexed (see LexStringLiteral).
740  bool FinishLexStringLiteral(Token &Result, std::string &String,
741                              const char *DiagnosticTag,
742                              bool AllowMacroExpansion);
743
744  /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
745  /// something not a comment.  This is useful in -E -C mode where comments
746  /// would foul up preprocessor directive handling.
747  void LexNonComment(Token &Result) {
748    do
749      Lex(Result);
750    while (Result.getKind() == tok::comment);
751  }
752
753  /// LexUnexpandedToken - This is just like Lex, but this disables macro
754  /// expansion of identifier tokens.
755  void LexUnexpandedToken(Token &Result) {
756    // Disable macro expansion.
757    bool OldVal = DisableMacroExpansion;
758    DisableMacroExpansion = true;
759    // Lex the token.
760    Lex(Result);
761
762    // Reenable it.
763    DisableMacroExpansion = OldVal;
764  }
765
766  /// LexUnexpandedNonComment - Like LexNonComment, but this disables macro
767  /// expansion of identifier tokens.
768  void LexUnexpandedNonComment(Token &Result) {
769    do
770      LexUnexpandedToken(Result);
771    while (Result.getKind() == tok::comment);
772  }
773
774  /// Disables macro expansion everywhere except for preprocessor directives.
775  void SetMacroExpansionOnlyInDirectives() {
776    DisableMacroExpansion = true;
777    MacroExpansionInDirectivesOverride = true;
778  }
779
780  /// LookAhead - This peeks ahead N tokens and returns that token without
781  /// consuming any tokens.  LookAhead(0) returns the next token that would be
782  /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
783  /// returns normal tokens after phase 5.  As such, it is equivalent to using
784  /// 'Lex', not 'LexUnexpandedToken'.
785  const Token &LookAhead(unsigned N) {
786    if (CachedLexPos + N < CachedTokens.size())
787      return CachedTokens[CachedLexPos+N];
788    else
789      return PeekAhead(N+1);
790  }
791
792  /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
793  /// this allows to revert a specific number of tokens.
794  /// Note that the number of tokens being reverted should be up to the last
795  /// backtrack position, not more.
796  void RevertCachedTokens(unsigned N) {
797    assert(isBacktrackEnabled() &&
798           "Should only be called when tokens are cached for backtracking");
799    assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
800         && "Should revert tokens up to the last backtrack position, not more");
801    assert(signed(CachedLexPos) - signed(N) >= 0 &&
802           "Corrupted backtrack positions ?");
803    CachedLexPos -= N;
804  }
805
806  /// EnterToken - Enters a token in the token stream to be lexed next. If
807  /// BackTrack() is called afterwards, the token will remain at the insertion
808  /// point.
809  void EnterToken(const Token &Tok) {
810    EnterCachingLexMode();
811    CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
812  }
813
814  /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
815  /// tokens (because backtrack is enabled) it should replace the most recent
816  /// cached tokens with the given annotation token. This function has no effect
817  /// if backtracking is not enabled.
818  ///
819  /// Note that the use of this function is just for optimization; so that the
820  /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
821  /// invoked.
822  void AnnotateCachedTokens(const Token &Tok) {
823    assert(Tok.isAnnotation() && "Expected annotation token");
824    if (CachedLexPos != 0 && isBacktrackEnabled())
825      AnnotatePreviousCachedTokens(Tok);
826  }
827
828  /// \brief Replace the last token with an annotation token.
829  ///
830  /// Like AnnotateCachedTokens(), this routine replaces an
831  /// already-parsed (and resolved) token with an annotation
832  /// token. However, this routine only replaces the last token with
833  /// the annotation token; it does not affect any other cached
834  /// tokens. This function has no effect if backtracking is not
835  /// enabled.
836  void ReplaceLastTokenWithAnnotation(const Token &Tok) {
837    assert(Tok.isAnnotation() && "Expected annotation token");
838    if (CachedLexPos != 0 && isBacktrackEnabled())
839      CachedTokens[CachedLexPos-1] = Tok;
840  }
841
842  /// TypoCorrectToken - Update the current token to represent the provided
843  /// identifier, in order to cache an action performed by typo correction.
844  void TypoCorrectToken(const Token &Tok) {
845    assert(Tok.getIdentifierInfo() && "Expected identifier token");
846    if (CachedLexPos != 0 && isBacktrackEnabled())
847      CachedTokens[CachedLexPos-1] = Tok;
848  }
849
850  /// \brief Recompute the current lexer kind based on the CurLexer/CurPTHLexer/
851  /// CurTokenLexer pointers.
852  void recomputeCurLexerKind();
853
854  /// \brief Returns true if incremental processing is enabled
855  bool isIncrementalProcessingEnabled() const { return IncrementalProcessing; }
856
857  /// \brief Enables the incremental processing
858  void enableIncrementalProcessing(bool value = true) {
859    IncrementalProcessing = value;
860  }
861
862  /// \brief Specify the point at which code-completion will be performed.
863  ///
864  /// \param File the file in which code completion should occur. If
865  /// this file is included multiple times, code-completion will
866  /// perform completion the first time it is included. If NULL, this
867  /// function clears out the code-completion point.
868  ///
869  /// \param Line the line at which code completion should occur
870  /// (1-based).
871  ///
872  /// \param Column the column at which code completion should occur
873  /// (1-based).
874  ///
875  /// \returns true if an error occurred, false otherwise.
876  bool SetCodeCompletionPoint(const FileEntry *File,
877                              unsigned Line, unsigned Column);
878
879  /// \brief Determine if we are performing code completion.
880  bool isCodeCompletionEnabled() const { return CodeCompletionFile != 0; }
881
882  /// \brief Returns the location of the code-completion point.
883  /// Returns an invalid location if code-completion is not enabled or the file
884  /// containing the code-completion point has not been lexed yet.
885  SourceLocation getCodeCompletionLoc() const { return CodeCompletionLoc; }
886
887  /// \brief Returns the start location of the file of code-completion point.
888  /// Returns an invalid location if code-completion is not enabled or the file
889  /// containing the code-completion point has not been lexed yet.
890  SourceLocation getCodeCompletionFileLoc() const {
891    return CodeCompletionFileLoc;
892  }
893
894  /// \brief Returns true if code-completion is enabled and we have hit the
895  /// code-completion point.
896  bool isCodeCompletionReached() const { return CodeCompletionReached; }
897
898  /// \brief Note that we hit the code-completion point.
899  void setCodeCompletionReached() {
900    assert(isCodeCompletionEnabled() && "Code-completion not enabled!");
901    CodeCompletionReached = true;
902    // Silence any diagnostics that occur after we hit the code-completion.
903    getDiagnostics().setSuppressAllDiagnostics(true);
904  }
905
906  /// \brief The location of the currently-active \#pragma clang
907  /// arc_cf_code_audited begin.  Returns an invalid location if there
908  /// is no such pragma active.
909  SourceLocation getPragmaARCCFCodeAuditedLoc() const {
910    return PragmaARCCFCodeAuditedLoc;
911  }
912
913  /// \brief Set the location of the currently-active \#pragma clang
914  /// arc_cf_code_audited begin.  An invalid location ends the pragma.
915  void setPragmaARCCFCodeAuditedLoc(SourceLocation Loc) {
916    PragmaARCCFCodeAuditedLoc = Loc;
917  }
918
919  /// \brief Instruct the preprocessor to skip part of the main source file.
920  ///
921  /// \param Bytes The number of bytes in the preamble to skip.
922  ///
923  /// \param StartOfLine Whether skipping these bytes puts the lexer at the
924  /// start of a line.
925  void setSkipMainFilePreamble(unsigned Bytes, bool StartOfLine) {
926    SkipMainFilePreamble.first = Bytes;
927    SkipMainFilePreamble.second = StartOfLine;
928  }
929
930  /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
931  /// the specified Token's location, translating the token's start
932  /// position in the current buffer into a SourcePosition object for rendering.
933  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) const {
934    return Diags->Report(Loc, DiagID);
935  }
936
937  DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) const {
938    return Diags->Report(Tok.getLocation(), DiagID);
939  }
940
941  /// getSpelling() - Return the 'spelling' of the token at the given
942  /// location; does not go up to the spelling location or down to the
943  /// expansion location.
944  ///
945  /// \param buffer A buffer which will be used only if the token requires
946  ///   "cleaning", e.g. if it contains trigraphs or escaped newlines
947  /// \param invalid If non-null, will be set \c true if an error occurs.
948  StringRef getSpelling(SourceLocation loc,
949                              SmallVectorImpl<char> &buffer,
950                              bool *invalid = 0) const {
951    return Lexer::getSpelling(loc, buffer, SourceMgr, LangOpts, invalid);
952  }
953
954  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
955  /// token is the characters used to represent the token in the source file
956  /// after trigraph expansion and escaped-newline folding.  In particular, this
957  /// wants to get the true, uncanonicalized, spelling of things like digraphs
958  /// UCNs, etc.
959  ///
960  /// \param Invalid If non-null, will be set \c true if an error occurs.
961  std::string getSpelling(const Token &Tok, bool *Invalid = 0) const {
962    return Lexer::getSpelling(Tok, SourceMgr, LangOpts, Invalid);
963  }
964
965  /// getSpelling - This method is used to get the spelling of a token into a
966  /// preallocated buffer, instead of as an std::string.  The caller is required
967  /// to allocate enough space for the token, which is guaranteed to be at least
968  /// Tok.getLength() bytes long.  The length of the actual result is returned.
969  ///
970  /// Note that this method may do two possible things: it may either fill in
971  /// the buffer specified with characters, or it may *change the input pointer*
972  /// to point to a constant buffer with the data already in it (avoiding a
973  /// copy).  The caller is not allowed to modify the returned buffer pointer
974  /// if an internal buffer is returned.
975  unsigned getSpelling(const Token &Tok, const char *&Buffer,
976                       bool *Invalid = 0) const {
977    return Lexer::getSpelling(Tok, Buffer, SourceMgr, LangOpts, Invalid);
978  }
979
980  /// getSpelling - This method is used to get the spelling of a token into a
981  /// SmallVector. Note that the returned StringRef may not point to the
982  /// supplied buffer if a copy can be avoided.
983  StringRef getSpelling(const Token &Tok,
984                        SmallVectorImpl<char> &Buffer,
985                        bool *Invalid = 0) const;
986
987  /// \brief Relex the token at the specified location.
988  /// \returns true if there was a failure, false on success.
989  bool getRawToken(SourceLocation Loc, Token &Result) {
990    return Lexer::getRawToken(Loc, Result, SourceMgr, LangOpts);
991  }
992
993  /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
994  /// with length 1, return the character.
995  char getSpellingOfSingleCharacterNumericConstant(const Token &Tok,
996                                                   bool *Invalid = 0) const {
997    assert(Tok.is(tok::numeric_constant) &&
998           Tok.getLength() == 1 && "Called on unsupported token");
999    assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
1000
1001    // If the token is carrying a literal data pointer, just use it.
1002    if (const char *D = Tok.getLiteralData())
1003      return *D;
1004
1005    // Otherwise, fall back on getCharacterData, which is slower, but always
1006    // works.
1007    return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid);
1008  }
1009
1010  /// \brief Retrieve the name of the immediate macro expansion.
1011  ///
1012  /// This routine starts from a source location, and finds the name of the macro
1013  /// responsible for its immediate expansion. It looks through any intervening
1014  /// macro argument expansions to compute this. It returns a StringRef which
1015  /// refers to the SourceManager-owned buffer of the source where that macro
1016  /// name is spelled. Thus, the result shouldn't out-live the SourceManager.
1017  StringRef getImmediateMacroName(SourceLocation Loc) {
1018    return Lexer::getImmediateMacroName(Loc, SourceMgr, getLangOpts());
1019  }
1020
1021  /// CreateString - Plop the specified string into a scratch buffer and set the
1022  /// specified token's location and length to it.  If specified, the source
1023  /// location provides a location of the expansion point of the token.
1024  void CreateString(StringRef Str, Token &Tok,
1025                    SourceLocation ExpansionLocStart = SourceLocation(),
1026                    SourceLocation ExpansionLocEnd = SourceLocation());
1027
1028  /// \brief Computes the source location just past the end of the
1029  /// token at this source location.
1030  ///
1031  /// This routine can be used to produce a source location that
1032  /// points just past the end of the token referenced by \p Loc, and
1033  /// is generally used when a diagnostic needs to point just after a
1034  /// token where it expected something different that it received. If
1035  /// the returned source location would not be meaningful (e.g., if
1036  /// it points into a macro), this routine returns an invalid
1037  /// source location.
1038  ///
1039  /// \param Offset an offset from the end of the token, where the source
1040  /// location should refer to. The default offset (0) produces a source
1041  /// location pointing just past the end of the token; an offset of 1 produces
1042  /// a source location pointing to the last character in the token, etc.
1043  SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0) {
1044    return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, LangOpts);
1045  }
1046
1047  /// \brief Returns true if the given MacroID location points at the first
1048  /// token of the macro expansion.
1049  ///
1050  /// \param MacroBegin If non-null and function returns true, it is set to
1051  /// begin location of the macro.
1052  bool isAtStartOfMacroExpansion(SourceLocation loc,
1053                                 SourceLocation *MacroBegin = 0) const {
1054    return Lexer::isAtStartOfMacroExpansion(loc, SourceMgr, LangOpts,
1055                                            MacroBegin);
1056  }
1057
1058  /// \brief Returns true if the given MacroID location points at the last
1059  /// token of the macro expansion.
1060  ///
1061  /// \param MacroEnd If non-null and function returns true, it is set to
1062  /// end location of the macro.
1063  bool isAtEndOfMacroExpansion(SourceLocation loc,
1064                               SourceLocation *MacroEnd = 0) const {
1065    return Lexer::isAtEndOfMacroExpansion(loc, SourceMgr, LangOpts, MacroEnd);
1066  }
1067
1068  /// DumpToken - Print the token to stderr, used for debugging.
1069  ///
1070  void DumpToken(const Token &Tok, bool DumpFlags = false) const;
1071  void DumpLocation(SourceLocation Loc) const;
1072  void DumpMacro(const MacroInfo &MI) const;
1073
1074  /// AdvanceToTokenCharacter - Given a location that specifies the start of a
1075  /// token, return a new location that specifies a character within the token.
1076  SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,
1077                                         unsigned Char) const {
1078    return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, LangOpts);
1079  }
1080
1081  /// IncrementPasteCounter - Increment the counters for the number of token
1082  /// paste operations performed.  If fast was specified, this is a 'fast paste'
1083  /// case we handled.
1084  ///
1085  void IncrementPasteCounter(bool isFast) {
1086    if (isFast)
1087      ++NumFastTokenPaste;
1088    else
1089      ++NumTokenPaste;
1090  }
1091
1092  void PrintStats();
1093
1094  size_t getTotalMemory() const;
1095
1096  /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
1097  /// comment (/##/) in microsoft mode, this method handles updating the current
1098  /// state, returning the token on the next source line.
1099  void HandleMicrosoftCommentPaste(Token &Tok);
1100
1101  //===--------------------------------------------------------------------===//
1102  // Preprocessor callback methods.  These are invoked by a lexer as various
1103  // directives and events are found.
1104
1105  /// LookUpIdentifierInfo - Given a tok::raw_identifier token, look up the
1106  /// identifier information for the token and install it into the token,
1107  /// updating the token kind accordingly.
1108  IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const;
1109
1110private:
1111  llvm::DenseMap<IdentifierInfo*,unsigned> PoisonReasons;
1112
1113public:
1114
1115  // SetPoisonReason - Call this function to indicate the reason for
1116  // poisoning an identifier. If that identifier is accessed while
1117  // poisoned, then this reason will be used instead of the default
1118  // "poisoned" diagnostic.
1119  void SetPoisonReason(IdentifierInfo *II, unsigned DiagID);
1120
1121  // HandlePoisonedIdentifier - Display reason for poisoned
1122  // identifier.
1123  void HandlePoisonedIdentifier(Token & Tok);
1124
1125  void MaybeHandlePoisonedIdentifier(Token & Identifier) {
1126    if(IdentifierInfo * II = Identifier.getIdentifierInfo()) {
1127      if(II->isPoisoned()) {
1128        HandlePoisonedIdentifier(Identifier);
1129      }
1130    }
1131  }
1132
1133private:
1134  /// Identifiers used for SEH handling in Borland. These are only
1135  /// allowed in particular circumstances
1136  // __except block
1137  IdentifierInfo *Ident__exception_code,
1138                 *Ident___exception_code,
1139                 *Ident_GetExceptionCode;
1140  // __except filter expression
1141  IdentifierInfo *Ident__exception_info,
1142                 *Ident___exception_info,
1143                 *Ident_GetExceptionInfo;
1144  // __finally
1145  IdentifierInfo *Ident__abnormal_termination,
1146                 *Ident___abnormal_termination,
1147                 *Ident_AbnormalTermination;
1148public:
1149  void PoisonSEHIdentifiers(bool Poison = true); // Borland
1150
1151  /// HandleIdentifier - This callback is invoked when the lexer reads an
1152  /// identifier and has filled in the tokens IdentifierInfo member.  This
1153  /// callback potentially macro expands it or turns it into a named token (like
1154  /// 'for').
1155  void HandleIdentifier(Token &Identifier);
1156
1157
1158  /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
1159  /// the current file.  This either returns the EOF token and returns true, or
1160  /// pops a level off the include stack and returns false, at which point the
1161  /// client should call lex again.
1162  bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
1163
1164  /// HandleEndOfTokenLexer - This callback is invoked when the current
1165  /// TokenLexer hits the end of its token stream.
1166  bool HandleEndOfTokenLexer(Token &Result);
1167
1168  /// HandleDirective - This callback is invoked when the lexer sees a # token
1169  /// at the start of a line.  This consumes the directive, modifies the
1170  /// lexer/preprocessor state, and advances the lexer(s) so that the next token
1171  /// read is the correct one.
1172  void HandleDirective(Token &Result);
1173
1174  /// CheckEndOfDirective - Ensure that the next token is a tok::eod token.  If
1175  /// not, emit a diagnostic and consume up until the eod.  If EnableMacros is
1176  /// true, then we consider macros that expand to zero tokens as being ok.
1177  void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
1178
1179  /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
1180  /// current line until the tok::eod token is found.
1181  void DiscardUntilEndOfDirective();
1182
1183  /// SawDateOrTime - This returns true if the preprocessor has seen a use of
1184  /// __DATE__ or __TIME__ in the file so far.
1185  bool SawDateOrTime() const {
1186    return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
1187  }
1188  unsigned getCounterValue() const { return CounterValue; }
1189  void setCounterValue(unsigned V) { CounterValue = V; }
1190
1191  /// \brief Retrieves the module that we're currently building, if any.
1192  Module *getCurrentModule();
1193
1194  /// \brief Allocate a new MacroInfo object with the provided SourceLocation.
1195  MacroInfo *AllocateMacroInfo(SourceLocation L);
1196
1197  /// \brief Allocate a new MacroInfo object which is clone of \p MI.
1198  MacroInfo *CloneMacroInfo(const MacroInfo &MI);
1199
1200  /// \brief Turn the specified lexer token into a fully checked and spelled
1201  /// filename, e.g. as an operand of \#include.
1202  ///
1203  /// The caller is expected to provide a buffer that is large enough to hold
1204  /// the spelling of the filename, but is also expected to handle the case
1205  /// when this method decides to use a different buffer.
1206  ///
1207  /// \returns true if the input filename was in <>'s or false if it was
1208  /// in ""'s.
1209  bool GetIncludeFilenameSpelling(SourceLocation Loc,StringRef &Filename);
1210
1211  /// \brief Given a "foo" or \<foo> reference, look up the indicated file.
1212  ///
1213  /// Returns null on failure.  \p isAngled indicates whether the file
1214  /// reference is for system \#include's or not (i.e. using <> instead of "").
1215  const FileEntry *LookupFile(StringRef Filename,
1216                              bool isAngled, const DirectoryLookup *FromDir,
1217                              const DirectoryLookup *&CurDir,
1218                              SmallVectorImpl<char> *SearchPath,
1219                              SmallVectorImpl<char> *RelativePath,
1220                              Module **SuggestedModule,
1221                              bool SkipCache = false);
1222
1223  /// GetCurLookup - The DirectoryLookup structure used to find the current
1224  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
1225  /// implement \#include_next and find directory-specific properties.
1226  const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; }
1227
1228  /// \brief Return true if we're in the top-level file, not in a \#include.
1229  bool isInPrimaryFile() const;
1230
1231  /// ConcatenateIncludeName - Handle cases where the \#include name is expanded
1232  /// from a macro as multiple tokens, which need to be glued together.  This
1233  /// occurs for code like:
1234  /// \code
1235  ///    \#define FOO <x/y.h>
1236  ///    \#include FOO
1237  /// \endcode
1238  /// because in this case, "<x/y.h>" is returned as 7 tokens, not one.
1239  ///
1240  /// This code concatenates and consumes tokens up to the '>' token.  It
1241  /// returns false if the > was found, otherwise it returns true if it finds
1242  /// and consumes the EOD marker.
1243  bool ConcatenateIncludeName(SmallString<128> &FilenameBuffer,
1244                              SourceLocation &End);
1245
1246  /// LexOnOffSwitch - Lex an on-off-switch (C99 6.10.6p2) and verify that it is
1247  /// followed by EOD.  Return true if the token is not a valid on-off-switch.
1248  bool LexOnOffSwitch(tok::OnOffSwitch &OOS);
1249
1250private:
1251
1252  void PushIncludeMacroStack() {
1253    IncludeMacroStack.push_back(IncludeStackInfo(CurLexerKind,
1254                                                 CurLexer.take(),
1255                                                 CurPTHLexer.take(),
1256                                                 CurPPLexer,
1257                                                 CurTokenLexer.take(),
1258                                                 CurDirLookup));
1259    CurPPLexer = 0;
1260  }
1261
1262  void PopIncludeMacroStack() {
1263    CurLexer.reset(IncludeMacroStack.back().TheLexer);
1264    CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer);
1265    CurPPLexer = IncludeMacroStack.back().ThePPLexer;
1266    CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer);
1267    CurDirLookup  = IncludeMacroStack.back().TheDirLookup;
1268    CurLexerKind = IncludeMacroStack.back().CurLexerKind;
1269    IncludeMacroStack.pop_back();
1270  }
1271
1272  /// \brief Allocate a new MacroInfo object.
1273  MacroInfo *AllocateMacroInfo();
1274
1275  /// \brief Release the specified MacroInfo for re-use.
1276  ///
1277  /// This memory will  be reused for allocating new MacroInfo objects.
1278  void ReleaseMacroInfo(MacroInfo* MI);
1279
1280  /// ReadMacroName - Lex and validate a macro name, which occurs after a
1281  /// \#define or \#undef.  This emits a diagnostic, sets the token kind to eod,
1282  /// and discards the rest of the macro line if the macro name is invalid.
1283  void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
1284
1285  /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
1286  /// definition has just been read.  Lex the rest of the arguments and the
1287  /// closing ), updating MI with what we learn and saving in LastTok the
1288  /// last token read.
1289  /// Return true if an error occurs parsing the arg list.
1290  bool ReadMacroDefinitionArgList(MacroInfo *MI, Token& LastTok);
1291
1292  /// We just read a \#if or related directive and decided that the
1293  /// subsequent tokens are in the \#if'd out portion of the
1294  /// file.  Lex the rest of the file, until we see an \#endif.  If \p
1295  /// FoundNonSkipPortion is true, then we have already emitted code for part of
1296  /// this \#if directive, so \#else/\#elif blocks should never be entered. If
1297  /// \p FoundElse is false, then \#else directives are ok, if not, then we have
1298  /// already seen one so a \#else directive is a duplicate.  When this returns,
1299  /// the caller can lex the first valid token.
1300  void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
1301                                    bool FoundNonSkipPortion, bool FoundElse,
1302                                    SourceLocation ElseLoc = SourceLocation());
1303
1304  /// \brief A fast PTH version of SkipExcludedConditionalBlock.
1305  void PTHSkipExcludedConditionalBlock();
1306
1307  /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
1308  /// may occur after a #if or #elif directive and return it as a bool.  If the
1309  /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
1310  bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
1311
1312  /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
1313  /// \#pragma GCC poison/system_header/dependency and \#pragma once.
1314  void RegisterBuiltinPragmas();
1315
1316  /// \brief Register builtin macros such as __LINE__ with the identifier table.
1317  void RegisterBuiltinMacros();
1318
1319  /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
1320  /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
1321  /// the macro should not be expanded return true, otherwise return false.
1322  bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI);
1323
1324  /// \brief Cache macro expanded tokens for TokenLexers.
1325  //
1326  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
1327  /// going to lex in the cache and when it finishes the tokens are removed
1328  /// from the end of the cache.
1329  Token *cacheMacroExpandedTokens(TokenLexer *tokLexer,
1330                                  ArrayRef<Token> tokens);
1331  void removeCachedMacroExpandedTokensOfLastLexer();
1332  friend void TokenLexer::ExpandFunctionArguments();
1333
1334  /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
1335  /// lexed is a '('.  If so, consume the token and return true, if not, this
1336  /// method should have no observable side-effect on the lexed tokens.
1337  bool isNextPPTokenLParen();
1338
1339  /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
1340  /// invoked to read all of the formal arguments specified for the macro
1341  /// invocation.  This returns null on error.
1342  MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
1343                                       SourceLocation &ExpansionEnd);
1344
1345  /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
1346  /// as a builtin macro, handle it and return the next token as 'Tok'.
1347  void ExpandBuiltinMacro(Token &Tok);
1348
1349  /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
1350  /// return the first token after the directive.  The _Pragma token has just
1351  /// been read into 'Tok'.
1352  void Handle_Pragma(Token &Tok);
1353
1354  /// HandleMicrosoft__pragma - Like Handle_Pragma except the pragma text
1355  /// is not enclosed within a string literal.
1356  void HandleMicrosoft__pragma(Token &Tok);
1357
1358  /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
1359  /// start lexing tokens from it instead of the current buffer.
1360  void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
1361
1362  /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
1363  /// start getting tokens from it using the PTH cache.
1364  void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
1365
1366  /// \brief Set the file ID for the preprocessor predefines.
1367  void setPredefinesFileID(FileID FID) {
1368    assert(PredefinesFileID.isInvalid() && "PredefinesFileID already set!");
1369    PredefinesFileID = FID;
1370  }
1371
1372  /// IsFileLexer - Returns true if we are lexing from a file and not a
1373  ///  pragma or a macro.
1374  static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
1375    return L ? !L->isPragmaLexer() : P != 0;
1376  }
1377
1378  static bool IsFileLexer(const IncludeStackInfo& I) {
1379    return IsFileLexer(I.TheLexer, I.ThePPLexer);
1380  }
1381
1382  bool IsFileLexer() const {
1383    return IsFileLexer(CurLexer.get(), CurPPLexer);
1384  }
1385
1386  //===--------------------------------------------------------------------===//
1387  // Caching stuff.
1388  void CachingLex(Token &Result);
1389  bool InCachingLexMode() const {
1390    // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means
1391    // that we are past EOF, not that we are in CachingLex mode.
1392    return CurPPLexer == 0 && CurTokenLexer == 0 && CurPTHLexer == 0 &&
1393           !IncludeMacroStack.empty();
1394  }
1395  void EnterCachingLexMode();
1396  void ExitCachingLexMode() {
1397    if (InCachingLexMode())
1398      RemoveTopOfLexerStack();
1399  }
1400  const Token &PeekAhead(unsigned N);
1401  void AnnotatePreviousCachedTokens(const Token &Tok);
1402
1403  //===--------------------------------------------------------------------===//
1404  /// Handle*Directive - implement the various preprocessor directives.  These
1405  /// should side-effect the current preprocessor object so that the next call
1406  /// to Lex() will return the appropriate token next.
1407  void HandleLineDirective(Token &Tok);
1408  void HandleDigitDirective(Token &Tok);
1409  void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
1410  void HandleIdentSCCSDirective(Token &Tok);
1411  void HandleMacroPublicDirective(Token &Tok);
1412  void HandleMacroPrivateDirective(Token &Tok);
1413
1414  // File inclusion.
1415  void HandleIncludeDirective(SourceLocation HashLoc,
1416                              Token &Tok,
1417                              const DirectoryLookup *LookupFrom = 0,
1418                              bool isImport = false);
1419  void HandleIncludeNextDirective(SourceLocation HashLoc, Token &Tok);
1420  void HandleIncludeMacrosDirective(SourceLocation HashLoc, Token &Tok);
1421  void HandleImportDirective(SourceLocation HashLoc, Token &Tok);
1422  void HandleMicrosoftImportDirective(Token &Tok);
1423
1424  // Macro handling.
1425  void HandleDefineDirective(Token &Tok);
1426  void HandleUndefDirective(Token &Tok);
1427  void UndefineMacro(IdentifierInfo *II, MacroInfo *MI,
1428                     SourceLocation UndefLoc);
1429
1430  // Conditional Inclusion.
1431  void HandleIfdefDirective(Token &Tok, bool isIfndef,
1432                            bool ReadAnyTokensBeforeDirective);
1433  void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
1434  void HandleEndifDirective(Token &Tok);
1435  void HandleElseDirective(Token &Tok);
1436  void HandleElifDirective(Token &Tok);
1437
1438  // Pragmas.
1439  void HandlePragmaDirective(unsigned Introducer);
1440public:
1441  void HandlePragmaOnce(Token &OnceTok);
1442  void HandlePragmaMark();
1443  void HandlePragmaPoison(Token &PoisonTok);
1444  void HandlePragmaSystemHeader(Token &SysHeaderTok);
1445  void HandlePragmaDependency(Token &DependencyTok);
1446  void HandlePragmaComment(Token &CommentTok);
1447  void HandlePragmaMessage(Token &MessageTok);
1448  void HandlePragmaPushMacro(Token &Tok);
1449  void HandlePragmaPopMacro(Token &Tok);
1450  void HandlePragmaIncludeAlias(Token &Tok);
1451  IdentifierInfo *ParsePragmaPushOrPopMacro(Token &Tok);
1452
1453  // Return true and store the first token only if any CommentHandler
1454  // has inserted some tokens and getCommentRetentionState() is false.
1455  bool HandleComment(Token &Token, SourceRange Comment);
1456
1457  /// \brief A macro is used, update information about macros that need unused
1458  /// warnings.
1459  void markMacroAsUsed(MacroInfo *MI);
1460};
1461
1462/// \brief Abstract base class that describes a handler that will receive
1463/// source ranges for each of the comments encountered in the source file.
1464class CommentHandler {
1465public:
1466  virtual ~CommentHandler();
1467
1468  // The handler shall return true if it has pushed any tokens
1469  // to be read using e.g. EnterToken or EnterTokenStream.
1470  virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0;
1471};
1472
1473}  // end namespace clang
1474
1475#endif
1476