Preprocessor.h revision e40c4238a572bf8241a04e0005f70550cbfc1cfb
1//===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines the Preprocessor interface.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15#define LLVM_CLANG_LEX_PREPROCESSOR_H
16
17#include "clang/Lex/MacroInfo.h"
18#include "clang/Lex/Lexer.h"
19#include "clang/Lex/PTHLexer.h"
20#include "clang/Lex/PPCallbacks.h"
21#include "clang/Lex/TokenLexer.h"
22#include "clang/Lex/PTHManager.h"
23#include "clang/Basic/Builtins.h"
24#include "clang/Basic/Diagnostic.h"
25#include "clang/Basic/IdentifierTable.h"
26#include "clang/Basic/SourceLocation.h"
27#include "llvm/ADT/DenseMap.h"
28#include "llvm/ADT/IntrusiveRefCntPtr.h"
29#include "llvm/ADT/SmallPtrSet.h"
30#include "llvm/ADT/OwningPtr.h"
31#include "llvm/ADT/SmallVector.h"
32#include "llvm/ADT/ArrayRef.h"
33#include "llvm/Support/Allocator.h"
34#include <vector>
35
36namespace llvm {
37  template<unsigned InternalLen> class SmallString;
38}
39
40namespace clang {
41
42class SourceManager;
43class ExternalPreprocessorSource;
44class FileManager;
45class FileEntry;
46class HeaderSearch;
47class PragmaNamespace;
48class PragmaHandler;
49class CommentHandler;
50class ScratchBuffer;
51class TargetInfo;
52class PPCallbacks;
53class CodeCompletionHandler;
54class DirectoryLookup;
55class PreprocessingRecord;
56class ModuleLoader;
57
58/// Preprocessor - This object engages in a tight little dance with the lexer to
59/// efficiently preprocess tokens.  Lexers know only about tokens within a
60/// single source file, and don't know anything about preprocessor-level issues
61/// like the \#include stack, token expansion, etc.
62///
63class Preprocessor : public RefCountedBase<Preprocessor> {
64  DiagnosticsEngine        *Diags;
65  LangOptions       &LangOpts;
66  const TargetInfo  *Target;
67  FileManager       &FileMgr;
68  SourceManager     &SourceMgr;
69  ScratchBuffer     *ScratchBuf;
70  HeaderSearch      &HeaderInfo;
71  ModuleLoader      &TheModuleLoader;
72
73  /// \brief External source of macros.
74  ExternalPreprocessorSource *ExternalSource;
75
76
77  /// PTH - An optional PTHManager object used for getting tokens from
78  ///  a token cache rather than lexing the original source file.
79  OwningPtr<PTHManager> PTH;
80
81  /// BP - A BumpPtrAllocator object used to quickly allocate and release
82  ///  objects internal to the Preprocessor.
83  llvm::BumpPtrAllocator BP;
84
85  /// Identifiers for builtin macros and other builtins.
86  IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
87  IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
88  IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
89  IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
90  IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
91  IdentifierInfo *Ident__COUNTER__;                // __COUNTER__
92  IdentifierInfo *Ident_Pragma, *Ident__pragma;    // _Pragma, __pragma
93  IdentifierInfo *Ident__VA_ARGS__;                // __VA_ARGS__
94  IdentifierInfo *Ident__has_feature;              // __has_feature
95  IdentifierInfo *Ident__has_extension;            // __has_extension
96  IdentifierInfo *Ident__has_builtin;              // __has_builtin
97  IdentifierInfo *Ident__has_attribute;            // __has_attribute
98  IdentifierInfo *Ident__has_include;              // __has_include
99  IdentifierInfo *Ident__has_include_next;         // __has_include_next
100  IdentifierInfo *Ident__has_warning;              // __has_warning
101
102  SourceLocation DATELoc, TIMELoc;
103  unsigned CounterValue;  // Next __COUNTER__ value.
104
105  enum {
106    /// MaxIncludeStackDepth - Maximum depth of \#includes.
107    MaxAllowedIncludeStackDepth = 200
108  };
109
110  // State that is set before the preprocessor begins.
111  bool KeepComments : 1;
112  bool KeepMacroComments : 1;
113  bool SuppressIncludeNotFoundError : 1;
114
115  // State that changes while the preprocessor runs:
116  bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
117
118  /// Whether the preprocessor owns the header search object.
119  bool OwnsHeaderSearch : 1;
120
121  /// DisableMacroExpansion - True if macro expansion is disabled.
122  bool DisableMacroExpansion : 1;
123
124  /// MacroExpansionInDirectivesOverride - Temporarily disables
125  /// DisableMacroExpansion (i.e. enables expansion) when parsing preprocessor
126  /// directives.
127  bool MacroExpansionInDirectivesOverride : 1;
128
129  class ResetMacroExpansionHelper;
130
131  /// \brief Whether we have already loaded macros from the external source.
132  mutable bool ReadMacrosFromExternalSource : 1;
133
134  /// \brief True if pragmas are enabled.
135  bool PragmasEnabled : 1;
136
137  /// \brief True if we are pre-expanding macro arguments.
138  bool InMacroArgPreExpansion;
139
140  /// Identifiers - This is mapping/lookup information for all identifiers in
141  /// the program, including program keywords.
142  mutable IdentifierTable Identifiers;
143
144  /// Selectors - This table contains all the selectors in the program. Unlike
145  /// IdentifierTable above, this table *isn't* populated by the preprocessor.
146  /// It is declared/expanded here because it's role/lifetime is
147  /// conceptually similar the IdentifierTable. In addition, the current control
148  /// flow (in clang::ParseAST()), make it convenient to put here.
149  /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
150  /// the lifetime of the preprocessor.
151  SelectorTable Selectors;
152
153  /// BuiltinInfo - Information about builtins.
154  Builtin::Context BuiltinInfo;
155
156  /// PragmaHandlers - This tracks all of the pragmas that the client registered
157  /// with this preprocessor.
158  PragmaNamespace *PragmaHandlers;
159
160  /// \brief Tracks all of the comment handlers that the client registered
161  /// with this preprocessor.
162  std::vector<CommentHandler *> CommentHandlers;
163
164  /// \brief True if we want to ignore EOF token and continue later on (thus
165  /// avoid tearing the Lexer and etc. down).
166  bool IncrementalProcessing;
167
168  /// \brief The code-completion handler.
169  CodeCompletionHandler *CodeComplete;
170
171  /// \brief The file that we're performing code-completion for, if any.
172  const FileEntry *CodeCompletionFile;
173
174  /// \brief The offset in file for the code-completion point.
175  unsigned CodeCompletionOffset;
176
177  /// \brief The location for the code-completion point. This gets instantiated
178  /// when the CodeCompletionFile gets \#include'ed for preprocessing.
179  SourceLocation CodeCompletionLoc;
180
181  /// \brief The start location for the file of the code-completion point.
182  ///
183  /// This gets instantiated when the CodeCompletionFile gets \#include'ed
184  /// for preprocessing.
185  SourceLocation CodeCompletionFileLoc;
186
187  /// \brief The source location of the 'import' contextual keyword we just
188  /// lexed, if any.
189  SourceLocation ModuleImportLoc;
190
191  /// \brief The module import path that we're currently processing.
192  llvm::SmallVector<std::pair<IdentifierInfo *, SourceLocation>, 2>
193    ModuleImportPath;
194
195  /// \brief Whether the module import expectes an identifier next. Otherwise,
196  /// it expects a '.' or ';'.
197  bool ModuleImportExpectsIdentifier;
198
199  /// \brief The source location of the currently-active
200  /// #pragma clang arc_cf_code_audited begin.
201  SourceLocation PragmaARCCFCodeAuditedLoc;
202
203  /// \brief True if we hit the code-completion point.
204  bool CodeCompletionReached;
205
206  /// \brief The number of bytes that we will initially skip when entering the
207  /// main file, which is used when loading a precompiled preamble, along
208  /// with a flag that indicates whether skipping this number of bytes will
209  /// place the lexer at the start of a line.
210  std::pair<unsigned, bool> SkipMainFilePreamble;
211
212  /// CurLexer - This is the current top of the stack that we're lexing from if
213  /// not expanding a macro and we are lexing directly from source code.
214  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
215  OwningPtr<Lexer> CurLexer;
216
217  /// CurPTHLexer - This is the current top of stack that we're lexing from if
218  ///  not expanding from a macro and we are lexing from a PTH cache.
219  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
220  OwningPtr<PTHLexer> CurPTHLexer;
221
222  /// CurPPLexer - This is the current top of the stack what we're lexing from
223  ///  if not expanding a macro.  This is an alias for either CurLexer or
224  ///  CurPTHLexer.
225  PreprocessorLexer *CurPPLexer;
226
227  /// CurLookup - The DirectoryLookup structure used to find the current
228  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
229  /// implement \#include_next and find directory-specific properties.
230  const DirectoryLookup *CurDirLookup;
231
232  /// CurTokenLexer - This is the current macro we are expanding, if we are
233  /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
234  OwningPtr<TokenLexer> CurTokenLexer;
235
236  /// \brief The kind of lexer we're currently working with.
237  enum CurLexerKind {
238    CLK_Lexer,
239    CLK_PTHLexer,
240    CLK_TokenLexer,
241    CLK_CachingLexer,
242    CLK_LexAfterModuleImport
243  } CurLexerKind;
244
245  /// IncludeMacroStack - This keeps track of the stack of files currently
246  /// \#included, and macros currently being expanded from, not counting
247  /// CurLexer/CurTokenLexer.
248  struct IncludeStackInfo {
249    enum CurLexerKind     CurLexerKind;
250    Lexer                 *TheLexer;
251    PTHLexer              *ThePTHLexer;
252    PreprocessorLexer     *ThePPLexer;
253    TokenLexer            *TheTokenLexer;
254    const DirectoryLookup *TheDirLookup;
255
256    IncludeStackInfo(enum CurLexerKind K, Lexer *L, PTHLexer* P,
257                     PreprocessorLexer* PPL,
258                     TokenLexer* TL, const DirectoryLookup *D)
259      : CurLexerKind(K), TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL),
260        TheTokenLexer(TL), TheDirLookup(D) {}
261  };
262  std::vector<IncludeStackInfo> IncludeMacroStack;
263
264  /// Callbacks - These are actions invoked when some preprocessor activity is
265  /// encountered (e.g. a file is \#included, etc).
266  PPCallbacks *Callbacks;
267
268  struct MacroExpandsInfo {
269    Token Tok;
270    MacroInfo *MI;
271    SourceRange Range;
272    MacroExpandsInfo(Token Tok, MacroInfo *MI, SourceRange Range)
273      : Tok(Tok), MI(MI), Range(Range) { }
274  };
275  SmallVector<MacroExpandsInfo, 2> DelayedMacroExpandsCallbacks;
276
277  /// Macros - For each IdentifierInfo that was associated with a macro, we
278  /// keep a mapping to the history of all macro definitions and #undefs in
279  /// the reverse order (the latest one is in the head of the list).
280  llvm::DenseMap<IdentifierInfo*, MacroInfo*> Macros;
281
282  /// \brief Macros that we want to warn because they are not used at the end
283  /// of the translation unit; we store just their SourceLocations instead
284  /// something like MacroInfo*. The benefit of this is that when we are
285  /// deserializing from PCH, we don't need to deserialize identifier & macros
286  /// just so that we can report that they are unused, we just warn using
287  /// the SourceLocations of this set (that will be filled by the ASTReader).
288  /// We are using SmallPtrSet instead of a vector for faster removal.
289  typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy;
290  WarnUnusedMacroLocsTy WarnUnusedMacroLocs;
291
292  /// MacroArgCache - This is a "freelist" of MacroArg objects that can be
293  /// reused for quick allocation.
294  MacroArgs *MacroArgCache;
295  friend class MacroArgs;
296
297  /// PragmaPushMacroInfo - For each IdentifierInfo used in a #pragma
298  /// push_macro directive, we keep a MacroInfo stack used to restore
299  /// previous macro value.
300  llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo;
301
302  // Various statistics we track for performance analysis.
303  unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
304  unsigned NumIf, NumElse, NumEndif;
305  unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
306  unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
307  unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
308  unsigned NumSkipped;
309
310  /// Predefines - This string is the predefined macros that preprocessor
311  /// should use from the command line etc.
312  std::string Predefines;
313
314  /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
315  enum { TokenLexerCacheSize = 8 };
316  unsigned NumCachedTokenLexers;
317  TokenLexer *TokenLexerCache[TokenLexerCacheSize];
318
319  /// \brief Keeps macro expanded tokens for TokenLexers.
320  //
321  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
322  /// going to lex in the cache and when it finishes the tokens are removed
323  /// from the end of the cache.
324  SmallVector<Token, 16> MacroExpandedTokens;
325  std::vector<std::pair<TokenLexer *, size_t> > MacroExpandingLexersStack;
326
327  /// \brief A record of the macro definitions and expansions that
328  /// occurred during preprocessing.
329  ///
330  /// This is an optional side structure that can be enabled with
331  /// \c createPreprocessingRecord() prior to preprocessing.
332  PreprocessingRecord *Record;
333
334private:  // Cached tokens state.
335  typedef SmallVector<Token, 1> CachedTokensTy;
336
337  /// CachedTokens - Cached tokens are stored here when we do backtracking or
338  /// lookahead. They are "lexed" by the CachingLex() method.
339  CachedTokensTy CachedTokens;
340
341  /// CachedLexPos - The position of the cached token that CachingLex() should
342  /// "lex" next. If it points beyond the CachedTokens vector, it means that
343  /// a normal Lex() should be invoked.
344  CachedTokensTy::size_type CachedLexPos;
345
346  /// BacktrackPositions - Stack of backtrack positions, allowing nested
347  /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
348  /// indicate where CachedLexPos should be set when the BackTrack() method is
349  /// invoked (at which point the last position is popped).
350  std::vector<CachedTokensTy::size_type> BacktrackPositions;
351
352  struct MacroInfoChain {
353    MacroInfo MI;
354    MacroInfoChain *Next;
355    MacroInfoChain *Prev;
356  };
357
358  /// MacroInfos are managed as a chain for easy disposal.  This is the head
359  /// of that list.
360  MacroInfoChain *MIChainHead;
361
362  /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
363  /// allocation.
364  MacroInfoChain *MICache;
365
366  MacroInfo *getInfoForMacro(IdentifierInfo *II) const;
367
368public:
369  Preprocessor(DiagnosticsEngine &diags, LangOptions &opts,
370               const TargetInfo *target,
371               SourceManager &SM, HeaderSearch &Headers,
372               ModuleLoader &TheModuleLoader,
373               IdentifierInfoLookup *IILookup = 0,
374               bool OwnsHeaderSearch = false,
375               bool DelayInitialization = false,
376               bool IncrProcessing = false);
377
378  ~Preprocessor();
379
380  /// \brief Initialize the preprocessor, if the constructor did not already
381  /// perform the initialization.
382  ///
383  /// \param Target Information about the target.
384  void Initialize(const TargetInfo &Target);
385
386  DiagnosticsEngine &getDiagnostics() const { return *Diags; }
387  void setDiagnostics(DiagnosticsEngine &D) { Diags = &D; }
388
389  const LangOptions &getLangOpts() const { return LangOpts; }
390  const TargetInfo &getTargetInfo() const { return *Target; }
391  FileManager &getFileManager() const { return FileMgr; }
392  SourceManager &getSourceManager() const { return SourceMgr; }
393  HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
394
395  IdentifierTable &getIdentifierTable() { return Identifiers; }
396  SelectorTable &getSelectorTable() { return Selectors; }
397  Builtin::Context &getBuiltinInfo() { return BuiltinInfo; }
398  llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
399
400  void setPTHManager(PTHManager* pm);
401
402  PTHManager *getPTHManager() { return PTH.get(); }
403
404  void setExternalSource(ExternalPreprocessorSource *Source) {
405    ExternalSource = Source;
406  }
407
408  ExternalPreprocessorSource *getExternalSource() const {
409    return ExternalSource;
410  }
411
412  /// \brief Retrieve the module loader associated with this preprocessor.
413  ModuleLoader &getModuleLoader() const { return TheModuleLoader; }
414
415  /// SetCommentRetentionState - Control whether or not the preprocessor retains
416  /// comments in output.
417  void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
418    this->KeepComments = KeepComments | KeepMacroComments;
419    this->KeepMacroComments = KeepMacroComments;
420  }
421
422  bool getCommentRetentionState() const { return KeepComments; }
423
424  void setPragmasEnabled(bool Enabled) { PragmasEnabled = Enabled; }
425  bool getPragmasEnabled() const { return PragmasEnabled; }
426
427  void SetSuppressIncludeNotFoundError(bool Suppress) {
428    SuppressIncludeNotFoundError = Suppress;
429  }
430
431  bool GetSuppressIncludeNotFoundError() {
432    return SuppressIncludeNotFoundError;
433  }
434
435  /// isCurrentLexer - Return true if we are lexing directly from the specified
436  /// lexer.
437  bool isCurrentLexer(const PreprocessorLexer *L) const {
438    return CurPPLexer == L;
439  }
440
441  /// getCurrentLexer - Return the current lexer being lexed from.  Note
442  /// that this ignores any potentially active macro expansions and _Pragma
443  /// expansions going on at the time.
444  PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; }
445
446  /// getCurrentFileLexer - Return the current file lexer being lexed from.
447  /// Note that this ignores any potentially active macro expansions and _Pragma
448  /// expansions going on at the time.
449  PreprocessorLexer *getCurrentFileLexer() const;
450
451  /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks.
452  /// Note that this class takes ownership of any PPCallbacks object given to
453  /// it.
454  PPCallbacks *getPPCallbacks() const { return Callbacks; }
455  void addPPCallbacks(PPCallbacks *C) {
456    if (Callbacks)
457      C = new PPChainedCallbacks(C, Callbacks);
458    Callbacks = C;
459  }
460
461  /// \brief Given an identifier, return the MacroInfo it is \#defined to
462  /// or null if it isn't \#define'd.
463  MacroInfo *getMacroInfo(IdentifierInfo *II) const {
464    if (!II->hasMacroDefinition())
465      return 0;
466
467    return getInfoForMacro(II);
468  }
469
470  /// \brief Specify a macro for this identifier.
471  void setMacroInfo(IdentifierInfo *II, MacroInfo *MI,
472                    bool LoadedFromAST = false);
473  /// \brief Undefine a macro for this identifier.
474  void clearMacroInfo(IdentifierInfo *II);
475
476  /// macro_iterator/macro_begin/macro_end - This allows you to walk the macro
477  /// history table. Currently defined macros have
478  /// IdentifierInfo::hasMacroDefinition() set and an empty
479  /// MacroInfo::getUndefLoc() at the head of the list.
480  typedef llvm::DenseMap<IdentifierInfo*,
481                         MacroInfo*>::const_iterator macro_iterator;
482  macro_iterator macro_begin(bool IncludeExternalMacros = true) const;
483  macro_iterator macro_end(bool IncludeExternalMacros = true) const;
484
485  const std::string &getPredefines() const { return Predefines; }
486  /// setPredefines - Set the predefines for this Preprocessor.  These
487  /// predefines are automatically injected when parsing the main file.
488  void setPredefines(const char *P) { Predefines = P; }
489  void setPredefines(const std::string &P) { Predefines = P; }
490
491  /// getIdentifierInfo - Return information about the specified preprocessor
492  /// identifier token.  The version of this method that takes two character
493  /// pointers is preferred unless the identifier is already available as a
494  /// string (this avoids allocation and copying of memory to construct an
495  /// std::string).
496  IdentifierInfo *getIdentifierInfo(StringRef Name) const {
497    return &Identifiers.get(Name);
498  }
499
500  /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
501  /// If 'Namespace' is non-null, then it is a token required to exist on the
502  /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
503  void AddPragmaHandler(StringRef Namespace, PragmaHandler *Handler);
504  void AddPragmaHandler(PragmaHandler *Handler) {
505    AddPragmaHandler(StringRef(), Handler);
506  }
507
508  /// RemovePragmaHandler - Remove the specific pragma handler from
509  /// the preprocessor. If \arg Namespace is non-null, then it should
510  /// be the namespace that \arg Handler was added to. It is an error
511  /// to remove a handler that has not been registered.
512  void RemovePragmaHandler(StringRef Namespace, PragmaHandler *Handler);
513  void RemovePragmaHandler(PragmaHandler *Handler) {
514    RemovePragmaHandler(StringRef(), Handler);
515  }
516
517  /// \brief Add the specified comment handler to the preprocessor.
518  void addCommentHandler(CommentHandler *Handler);
519
520  /// \brief Remove the specified comment handler.
521  ///
522  /// It is an error to remove a handler that has not been registered.
523  void removeCommentHandler(CommentHandler *Handler);
524
525  /// \brief Set the code completion handler to the given object.
526  void setCodeCompletionHandler(CodeCompletionHandler &Handler) {
527    CodeComplete = &Handler;
528  }
529
530  /// \brief Retrieve the current code-completion handler.
531  CodeCompletionHandler *getCodeCompletionHandler() const {
532    return CodeComplete;
533  }
534
535  /// \brief Clear out the code completion handler.
536  void clearCodeCompletionHandler() {
537    CodeComplete = 0;
538  }
539
540  /// \brief Hook used by the lexer to invoke the "natural language" code
541  /// completion point.
542  void CodeCompleteNaturalLanguage();
543
544  /// \brief Retrieve the preprocessing record, or NULL if there is no
545  /// preprocessing record.
546  PreprocessingRecord *getPreprocessingRecord() const { return Record; }
547
548  /// \brief Create a new preprocessing record, which will keep track of
549  /// all macro expansions, macro definitions, etc.
550  void createPreprocessingRecord(bool RecordConditionalDirectives);
551
552  /// EnterMainSourceFile - Enter the specified FileID as the main source file,
553  /// which implicitly adds the builtin defines etc.
554  void EnterMainSourceFile();
555
556  /// EndSourceFile - Inform the preprocessor callbacks that processing is
557  /// complete.
558  void EndSourceFile();
559
560  /// EnterSourceFile - Add a source file to the top of the include stack and
561  /// start lexing tokens from it instead of the current buffer.  Emit an error
562  /// and don't enter the file on error.
563  void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir,
564                       SourceLocation Loc);
565
566  /// EnterMacro - Add a Macro to the top of the include stack and start lexing
567  /// tokens from it instead of the current buffer.  Args specifies the
568  /// tokens input to a function-like macro.
569  ///
570  /// ILEnd specifies the location of the ')' for a function-like macro or the
571  /// identifier for an object-like macro.
572  void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroArgs *Args);
573
574  /// EnterTokenStream - Add a "macro" context to the top of the include stack,
575  /// which will cause the lexer to start returning the specified tokens.
576  ///
577  /// If DisableMacroExpansion is true, tokens lexed from the token stream will
578  /// not be subject to further macro expansion.  Otherwise, these tokens will
579  /// be re-macro-expanded when/if expansion is enabled.
580  ///
581  /// If OwnsTokens is false, this method assumes that the specified stream of
582  /// tokens has a permanent owner somewhere, so they do not need to be copied.
583  /// If it is true, it assumes the array of tokens is allocated with new[] and
584  /// must be freed.
585  ///
586  void EnterTokenStream(const Token *Toks, unsigned NumToks,
587                        bool DisableMacroExpansion, bool OwnsTokens);
588
589  /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
590  /// lexer stack.  This should only be used in situations where the current
591  /// state of the top-of-stack lexer is known.
592  void RemoveTopOfLexerStack();
593
594  /// EnableBacktrackAtThisPos - From the point that this method is called, and
595  /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
596  /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
597  /// make the Preprocessor re-lex the same tokens.
598  ///
599  /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
600  /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
601  /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
602  ///
603  /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
604  /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
605  /// tokens will continue indefinitely.
606  ///
607  void EnableBacktrackAtThisPos();
608
609  /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
610  void CommitBacktrackedTokens();
611
612  /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
613  /// EnableBacktrackAtThisPos() was previously called.
614  void Backtrack();
615
616  /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
617  /// caching of tokens is on.
618  bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
619
620  /// Lex - To lex a token from the preprocessor, just pull a token from the
621  /// current lexer or macro object.
622  void Lex(Token &Result) {
623    switch (CurLexerKind) {
624    case CLK_Lexer: CurLexer->Lex(Result); break;
625    case CLK_PTHLexer: CurPTHLexer->Lex(Result); break;
626    case CLK_TokenLexer: CurTokenLexer->Lex(Result); break;
627    case CLK_CachingLexer: CachingLex(Result); break;
628    case CLK_LexAfterModuleImport: LexAfterModuleImport(Result); break;
629    }
630  }
631
632  void LexAfterModuleImport(Token &Result);
633
634  /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
635  /// something not a comment.  This is useful in -E -C mode where comments
636  /// would foul up preprocessor directive handling.
637  void LexNonComment(Token &Result) {
638    do
639      Lex(Result);
640    while (Result.getKind() == tok::comment);
641  }
642
643  /// LexUnexpandedToken - This is just like Lex, but this disables macro
644  /// expansion of identifier tokens.
645  void LexUnexpandedToken(Token &Result) {
646    // Disable macro expansion.
647    bool OldVal = DisableMacroExpansion;
648    DisableMacroExpansion = true;
649    // Lex the token.
650    Lex(Result);
651
652    // Reenable it.
653    DisableMacroExpansion = OldVal;
654  }
655
656  /// LexUnexpandedNonComment - Like LexNonComment, but this disables macro
657  /// expansion of identifier tokens.
658  void LexUnexpandedNonComment(Token &Result) {
659    do
660      LexUnexpandedToken(Result);
661    while (Result.getKind() == tok::comment);
662  }
663
664  /// Disables macro expansion everywhere except for preprocessor directives.
665  void SetMacroExpansionOnlyInDirectives() {
666    DisableMacroExpansion = true;
667    MacroExpansionInDirectivesOverride = true;
668  }
669
670  /// LookAhead - This peeks ahead N tokens and returns that token without
671  /// consuming any tokens.  LookAhead(0) returns the next token that would be
672  /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
673  /// returns normal tokens after phase 5.  As such, it is equivalent to using
674  /// 'Lex', not 'LexUnexpandedToken'.
675  const Token &LookAhead(unsigned N) {
676    if (CachedLexPos + N < CachedTokens.size())
677      return CachedTokens[CachedLexPos+N];
678    else
679      return PeekAhead(N+1);
680  }
681
682  /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
683  /// this allows to revert a specific number of tokens.
684  /// Note that the number of tokens being reverted should be up to the last
685  /// backtrack position, not more.
686  void RevertCachedTokens(unsigned N) {
687    assert(isBacktrackEnabled() &&
688           "Should only be called when tokens are cached for backtracking");
689    assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
690         && "Should revert tokens up to the last backtrack position, not more");
691    assert(signed(CachedLexPos) - signed(N) >= 0 &&
692           "Corrupted backtrack positions ?");
693    CachedLexPos -= N;
694  }
695
696  /// EnterToken - Enters a token in the token stream to be lexed next. If
697  /// BackTrack() is called afterwards, the token will remain at the insertion
698  /// point.
699  void EnterToken(const Token &Tok) {
700    EnterCachingLexMode();
701    CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
702  }
703
704  /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
705  /// tokens (because backtrack is enabled) it should replace the most recent
706  /// cached tokens with the given annotation token. This function has no effect
707  /// if backtracking is not enabled.
708  ///
709  /// Note that the use of this function is just for optimization; so that the
710  /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
711  /// invoked.
712  void AnnotateCachedTokens(const Token &Tok) {
713    assert(Tok.isAnnotation() && "Expected annotation token");
714    if (CachedLexPos != 0 && isBacktrackEnabled())
715      AnnotatePreviousCachedTokens(Tok);
716  }
717
718  /// \brief Replace the last token with an annotation token.
719  ///
720  /// Like AnnotateCachedTokens(), this routine replaces an
721  /// already-parsed (and resolved) token with an annotation
722  /// token. However, this routine only replaces the last token with
723  /// the annotation token; it does not affect any other cached
724  /// tokens. This function has no effect if backtracking is not
725  /// enabled.
726  void ReplaceLastTokenWithAnnotation(const Token &Tok) {
727    assert(Tok.isAnnotation() && "Expected annotation token");
728    if (CachedLexPos != 0 && isBacktrackEnabled())
729      CachedTokens[CachedLexPos-1] = Tok;
730  }
731
732  /// TypoCorrectToken - Update the current token to represent the provided
733  /// identifier, in order to cache an action performed by typo correction.
734  void TypoCorrectToken(const Token &Tok) {
735    assert(Tok.getIdentifierInfo() && "Expected identifier token");
736    if (CachedLexPos != 0 && isBacktrackEnabled())
737      CachedTokens[CachedLexPos-1] = Tok;
738  }
739
740  /// \brief Recompute the current lexer kind based on the CurLexer/CurPTHLexer/
741  /// CurTokenLexer pointers.
742  void recomputeCurLexerKind();
743
744  /// \brief Returns true if incremental processing is enabled
745  bool isIncrementalProcessingEnabled() const { return IncrementalProcessing; }
746
747  /// \brief Enables the incremental processing
748  void enableIncrementalProcessing(bool value = true) {
749    IncrementalProcessing = value;
750  }
751
752  /// \brief Specify the point at which code-completion will be performed.
753  ///
754  /// \param File the file in which code completion should occur. If
755  /// this file is included multiple times, code-completion will
756  /// perform completion the first time it is included. If NULL, this
757  /// function clears out the code-completion point.
758  ///
759  /// \param Line the line at which code completion should occur
760  /// (1-based).
761  ///
762  /// \param Column the column at which code completion should occur
763  /// (1-based).
764  ///
765  /// \returns true if an error occurred, false otherwise.
766  bool SetCodeCompletionPoint(const FileEntry *File,
767                              unsigned Line, unsigned Column);
768
769  /// \brief Determine if we are performing code completion.
770  bool isCodeCompletionEnabled() const { return CodeCompletionFile != 0; }
771
772  /// \brief Returns the location of the code-completion point.
773  /// Returns an invalid location if code-completion is not enabled or the file
774  /// containing the code-completion point has not been lexed yet.
775  SourceLocation getCodeCompletionLoc() const { return CodeCompletionLoc; }
776
777  /// \brief Returns the start location of the file of code-completion point.
778  /// Returns an invalid location if code-completion is not enabled or the file
779  /// containing the code-completion point has not been lexed yet.
780  SourceLocation getCodeCompletionFileLoc() const {
781    return CodeCompletionFileLoc;
782  }
783
784  /// \brief Returns true if code-completion is enabled and we have hit the
785  /// code-completion point.
786  bool isCodeCompletionReached() const { return CodeCompletionReached; }
787
788  /// \brief Note that we hit the code-completion point.
789  void setCodeCompletionReached() {
790    assert(isCodeCompletionEnabled() && "Code-completion not enabled!");
791    CodeCompletionReached = true;
792    // Silence any diagnostics that occur after we hit the code-completion.
793    getDiagnostics().setSuppressAllDiagnostics(true);
794  }
795
796  /// \brief The location of the currently-active \#pragma clang
797  /// arc_cf_code_audited begin.  Returns an invalid location if there
798  /// is no such pragma active.
799  SourceLocation getPragmaARCCFCodeAuditedLoc() const {
800    return PragmaARCCFCodeAuditedLoc;
801  }
802
803  /// \brief Set the location of the currently-active \#pragma clang
804  /// arc_cf_code_audited begin.  An invalid location ends the pragma.
805  void setPragmaARCCFCodeAuditedLoc(SourceLocation Loc) {
806    PragmaARCCFCodeAuditedLoc = Loc;
807  }
808
809  /// \brief Instruct the preprocessor to skip part of the main source file.
810  ///
811  /// \param Bytes The number of bytes in the preamble to skip.
812  ///
813  /// \param StartOfLine Whether skipping these bytes puts the lexer at the
814  /// start of a line.
815  void setSkipMainFilePreamble(unsigned Bytes, bool StartOfLine) {
816    SkipMainFilePreamble.first = Bytes;
817    SkipMainFilePreamble.second = StartOfLine;
818  }
819
820  /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
821  /// the specified Token's location, translating the token's start
822  /// position in the current buffer into a SourcePosition object for rendering.
823  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) const {
824    return Diags->Report(Loc, DiagID);
825  }
826
827  DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) const {
828    return Diags->Report(Tok.getLocation(), DiagID);
829  }
830
831  /// getSpelling() - Return the 'spelling' of the token at the given
832  /// location; does not go up to the spelling location or down to the
833  /// expansion location.
834  ///
835  /// \param buffer A buffer which will be used only if the token requires
836  ///   "cleaning", e.g. if it contains trigraphs or escaped newlines
837  /// \param invalid If non-null, will be set \c true if an error occurs.
838  StringRef getSpelling(SourceLocation loc,
839                              SmallVectorImpl<char> &buffer,
840                              bool *invalid = 0) const {
841    return Lexer::getSpelling(loc, buffer, SourceMgr, LangOpts, invalid);
842  }
843
844  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
845  /// token is the characters used to represent the token in the source file
846  /// after trigraph expansion and escaped-newline folding.  In particular, this
847  /// wants to get the true, uncanonicalized, spelling of things like digraphs
848  /// UCNs, etc.
849  ///
850  /// \param Invalid If non-null, will be set \c true if an error occurs.
851  std::string getSpelling(const Token &Tok, bool *Invalid = 0) const {
852    return Lexer::getSpelling(Tok, SourceMgr, LangOpts, Invalid);
853  }
854
855  /// getSpelling - This method is used to get the spelling of a token into a
856  /// preallocated buffer, instead of as an std::string.  The caller is required
857  /// to allocate enough space for the token, which is guaranteed to be at least
858  /// Tok.getLength() bytes long.  The length of the actual result is returned.
859  ///
860  /// Note that this method may do two possible things: it may either fill in
861  /// the buffer specified with characters, or it may *change the input pointer*
862  /// to point to a constant buffer with the data already in it (avoiding a
863  /// copy).  The caller is not allowed to modify the returned buffer pointer
864  /// if an internal buffer is returned.
865  unsigned getSpelling(const Token &Tok, const char *&Buffer,
866                       bool *Invalid = 0) const {
867    return Lexer::getSpelling(Tok, Buffer, SourceMgr, LangOpts, Invalid);
868  }
869
870  /// getSpelling - This method is used to get the spelling of a token into a
871  /// SmallVector. Note that the returned StringRef may not point to the
872  /// supplied buffer if a copy can be avoided.
873  StringRef getSpelling(const Token &Tok,
874                        SmallVectorImpl<char> &Buffer,
875                        bool *Invalid = 0) const;
876
877  /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
878  /// with length 1, return the character.
879  char getSpellingOfSingleCharacterNumericConstant(const Token &Tok,
880                                                   bool *Invalid = 0) const {
881    assert(Tok.is(tok::numeric_constant) &&
882           Tok.getLength() == 1 && "Called on unsupported token");
883    assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
884
885    // If the token is carrying a literal data pointer, just use it.
886    if (const char *D = Tok.getLiteralData())
887      return *D;
888
889    // Otherwise, fall back on getCharacterData, which is slower, but always
890    // works.
891    return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid);
892  }
893
894  /// \brief Retrieve the name of the immediate macro expansion.
895  ///
896  /// This routine starts from a source location, and finds the name of the macro
897  /// responsible for its immediate expansion. It looks through any intervening
898  /// macro argument expansions to compute this. It returns a StringRef which
899  /// refers to the SourceManager-owned buffer of the source where that macro
900  /// name is spelled. Thus, the result shouldn't out-live the SourceManager.
901  StringRef getImmediateMacroName(SourceLocation Loc) {
902    return Lexer::getImmediateMacroName(Loc, SourceMgr, getLangOpts());
903  }
904
905  /// CreateString - Plop the specified string into a scratch buffer and set the
906  /// specified token's location and length to it.  If specified, the source
907  /// location provides a location of the expansion point of the token.
908  void CreateString(const char *Buf, unsigned Len, Token &Tok,
909                    SourceLocation ExpansionLocStart = SourceLocation(),
910                    SourceLocation ExpansionLocEnd = SourceLocation());
911
912  /// \brief Computes the source location just past the end of the
913  /// token at this source location.
914  ///
915  /// This routine can be used to produce a source location that
916  /// points just past the end of the token referenced by \p Loc, and
917  /// is generally used when a diagnostic needs to point just after a
918  /// token where it expected something different that it received. If
919  /// the returned source location would not be meaningful (e.g., if
920  /// it points into a macro), this routine returns an invalid
921  /// source location.
922  ///
923  /// \param Offset an offset from the end of the token, where the source
924  /// location should refer to. The default offset (0) produces a source
925  /// location pointing just past the end of the token; an offset of 1 produces
926  /// a source location pointing to the last character in the token, etc.
927  SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0) {
928    return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, LangOpts);
929  }
930
931  /// \brief Returns true if the given MacroID location points at the first
932  /// token of the macro expansion.
933  ///
934  /// \param MacroBegin If non-null and function returns true, it is set to
935  /// begin location of the macro.
936  bool isAtStartOfMacroExpansion(SourceLocation loc,
937                                 SourceLocation *MacroBegin = 0) const {
938    return Lexer::isAtStartOfMacroExpansion(loc, SourceMgr, LangOpts,
939                                            MacroBegin);
940  }
941
942  /// \brief Returns true if the given MacroID location points at the last
943  /// token of the macro expansion.
944  ///
945  /// \param MacroEnd If non-null and function returns true, it is set to
946  /// end location of the macro.
947  bool isAtEndOfMacroExpansion(SourceLocation loc,
948                               SourceLocation *MacroEnd = 0) const {
949    return Lexer::isAtEndOfMacroExpansion(loc, SourceMgr, LangOpts, MacroEnd);
950  }
951
952  /// DumpToken - Print the token to stderr, used for debugging.
953  ///
954  void DumpToken(const Token &Tok, bool DumpFlags = false) const;
955  void DumpLocation(SourceLocation Loc) const;
956  void DumpMacro(const MacroInfo &MI) const;
957
958  /// AdvanceToTokenCharacter - Given a location that specifies the start of a
959  /// token, return a new location that specifies a character within the token.
960  SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,
961                                         unsigned Char) const {
962    return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, LangOpts);
963  }
964
965  /// IncrementPasteCounter - Increment the counters for the number of token
966  /// paste operations performed.  If fast was specified, this is a 'fast paste'
967  /// case we handled.
968  ///
969  void IncrementPasteCounter(bool isFast) {
970    if (isFast)
971      ++NumFastTokenPaste;
972    else
973      ++NumTokenPaste;
974  }
975
976  void PrintStats();
977
978  size_t getTotalMemory() const;
979
980  /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
981  /// comment (/##/) in microsoft mode, this method handles updating the current
982  /// state, returning the token on the next source line.
983  void HandleMicrosoftCommentPaste(Token &Tok);
984
985  //===--------------------------------------------------------------------===//
986  // Preprocessor callback methods.  These are invoked by a lexer as various
987  // directives and events are found.
988
989  /// LookUpIdentifierInfo - Given a tok::raw_identifier token, look up the
990  /// identifier information for the token and install it into the token,
991  /// updating the token kind accordingly.
992  IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const;
993
994private:
995  llvm::DenseMap<IdentifierInfo*,unsigned> PoisonReasons;
996
997public:
998
999  // SetPoisonReason - Call this function to indicate the reason for
1000  // poisoning an identifier. If that identifier is accessed while
1001  // poisoned, then this reason will be used instead of the default
1002  // "poisoned" diagnostic.
1003  void SetPoisonReason(IdentifierInfo *II, unsigned DiagID);
1004
1005  // HandlePoisonedIdentifier - Display reason for poisoned
1006  // identifier.
1007  void HandlePoisonedIdentifier(Token & Tok);
1008
1009  void MaybeHandlePoisonedIdentifier(Token & Identifier) {
1010    if(IdentifierInfo * II = Identifier.getIdentifierInfo()) {
1011      if(II->isPoisoned()) {
1012        HandlePoisonedIdentifier(Identifier);
1013      }
1014    }
1015  }
1016
1017private:
1018  /// Identifiers used for SEH handling in Borland. These are only
1019  /// allowed in particular circumstances
1020  // __except block
1021  IdentifierInfo *Ident__exception_code,
1022                 *Ident___exception_code,
1023                 *Ident_GetExceptionCode;
1024  // __except filter expression
1025  IdentifierInfo *Ident__exception_info,
1026                 *Ident___exception_info,
1027                 *Ident_GetExceptionInfo;
1028  // __finally
1029  IdentifierInfo *Ident__abnormal_termination,
1030                 *Ident___abnormal_termination,
1031                 *Ident_AbnormalTermination;
1032public:
1033  void PoisonSEHIdentifiers(bool Poison = true); // Borland
1034
1035  /// HandleIdentifier - This callback is invoked when the lexer reads an
1036  /// identifier and has filled in the tokens IdentifierInfo member.  This
1037  /// callback potentially macro expands it or turns it into a named token (like
1038  /// 'for').
1039  void HandleIdentifier(Token &Identifier);
1040
1041
1042  /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
1043  /// the current file.  This either returns the EOF token and returns true, or
1044  /// pops a level off the include stack and returns false, at which point the
1045  /// client should call lex again.
1046  bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
1047
1048  /// HandleEndOfTokenLexer - This callback is invoked when the current
1049  /// TokenLexer hits the end of its token stream.
1050  bool HandleEndOfTokenLexer(Token &Result);
1051
1052  /// HandleDirective - This callback is invoked when the lexer sees a # token
1053  /// at the start of a line.  This consumes the directive, modifies the
1054  /// lexer/preprocessor state, and advances the lexer(s) so that the next token
1055  /// read is the correct one.
1056  void HandleDirective(Token &Result);
1057
1058  /// CheckEndOfDirective - Ensure that the next token is a tok::eod token.  If
1059  /// not, emit a diagnostic and consume up until the eod.  If EnableMacros is
1060  /// true, then we consider macros that expand to zero tokens as being ok.
1061  void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
1062
1063  /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
1064  /// current line until the tok::eod token is found.
1065  void DiscardUntilEndOfDirective();
1066
1067  /// SawDateOrTime - This returns true if the preprocessor has seen a use of
1068  /// __DATE__ or __TIME__ in the file so far.
1069  bool SawDateOrTime() const {
1070    return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
1071  }
1072  unsigned getCounterValue() const { return CounterValue; }
1073  void setCounterValue(unsigned V) { CounterValue = V; }
1074
1075  /// \brief Retrieves the module that we're currently building, if any.
1076  Module *getCurrentModule();
1077
1078  /// \brief Allocate a new MacroInfo object with the provided SourceLocation.
1079  MacroInfo *AllocateMacroInfo(SourceLocation L);
1080
1081  /// \brief Allocate a new MacroInfo object which is clone of \p MI.
1082  MacroInfo *CloneMacroInfo(const MacroInfo &MI);
1083
1084  /// \brief Turn the specified lexer token into a fully checked and spelled
1085  /// filename, e.g. as an operand of \#include.
1086  ///
1087  /// The caller is expected to provide a buffer that is large enough to hold
1088  /// the spelling of the filename, but is also expected to handle the case
1089  /// when this method decides to use a different buffer.
1090  ///
1091  /// \returns true if the input filename was in <>'s or false if it was
1092  /// in ""'s.
1093  bool GetIncludeFilenameSpelling(SourceLocation Loc,StringRef &Filename);
1094
1095  /// \brief Given a "foo" or \<foo> reference, look up the indicated file.
1096  ///
1097  /// Returns null on failure.  \p isAngled indicates whether the file
1098  /// reference is for system \#include's or not (i.e. using <> instead of "").
1099  const FileEntry *LookupFile(StringRef Filename,
1100                              bool isAngled, const DirectoryLookup *FromDir,
1101                              const DirectoryLookup *&CurDir,
1102                              SmallVectorImpl<char> *SearchPath,
1103                              SmallVectorImpl<char> *RelativePath,
1104                              Module **SuggestedModule,
1105                              bool SkipCache = false);
1106
1107  /// GetCurLookup - The DirectoryLookup structure used to find the current
1108  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
1109  /// implement \#include_next and find directory-specific properties.
1110  const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; }
1111
1112  /// \brief Return true if we're in the top-level file, not in a \#include.
1113  bool isInPrimaryFile() const;
1114
1115  /// ConcatenateIncludeName - Handle cases where the \#include name is expanded
1116  /// from a macro as multiple tokens, which need to be glued together.  This
1117  /// occurs for code like:
1118  /// \code
1119  ///    \#define FOO <x/y.h>
1120  ///    \#include FOO
1121  /// \endcode
1122  /// because in this case, "<x/y.h>" is returned as 7 tokens, not one.
1123  ///
1124  /// This code concatenates and consumes tokens up to the '>' token.  It
1125  /// returns false if the > was found, otherwise it returns true if it finds
1126  /// and consumes the EOD marker.
1127  bool ConcatenateIncludeName(SmallString<128> &FilenameBuffer,
1128                              SourceLocation &End);
1129
1130  /// LexOnOffSwitch - Lex an on-off-switch (C99 6.10.6p2) and verify that it is
1131  /// followed by EOD.  Return true if the token is not a valid on-off-switch.
1132  bool LexOnOffSwitch(tok::OnOffSwitch &OOS);
1133
1134private:
1135
1136  void PushIncludeMacroStack() {
1137    IncludeMacroStack.push_back(IncludeStackInfo(CurLexerKind,
1138                                                 CurLexer.take(),
1139                                                 CurPTHLexer.take(),
1140                                                 CurPPLexer,
1141                                                 CurTokenLexer.take(),
1142                                                 CurDirLookup));
1143    CurPPLexer = 0;
1144  }
1145
1146  void PopIncludeMacroStack() {
1147    CurLexer.reset(IncludeMacroStack.back().TheLexer);
1148    CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer);
1149    CurPPLexer = IncludeMacroStack.back().ThePPLexer;
1150    CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer);
1151    CurDirLookup  = IncludeMacroStack.back().TheDirLookup;
1152    CurLexerKind = IncludeMacroStack.back().CurLexerKind;
1153    IncludeMacroStack.pop_back();
1154  }
1155
1156  /// \brief Allocate a new MacroInfo object.
1157  MacroInfo *AllocateMacroInfo();
1158
1159  /// \brief Release the specified MacroInfo for re-use.
1160  ///
1161  /// This memory will  be reused for allocating new MacroInfo objects.
1162  void ReleaseMacroInfo(MacroInfo* MI);
1163
1164  /// ReadMacroName - Lex and validate a macro name, which occurs after a
1165  /// \#define or \#undef.  This emits a diagnostic, sets the token kind to eod,
1166  /// and discards the rest of the macro line if the macro name is invalid.
1167  void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
1168
1169  /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
1170  /// definition has just been read.  Lex the rest of the arguments and the
1171  /// closing ), updating MI with what we learn and saving in LastTok the
1172  /// last token read.
1173  /// Return true if an error occurs parsing the arg list.
1174  bool ReadMacroDefinitionArgList(MacroInfo *MI, Token& LastTok);
1175
1176  /// We just read a \#if or related directive and decided that the
1177  /// subsequent tokens are in the \#if'd out portion of the
1178  /// file.  Lex the rest of the file, until we see an \#endif.  If \p
1179  /// FoundNonSkipPortion is true, then we have already emitted code for part of
1180  /// this \#if directive, so \#else/\#elif blocks should never be entered. If
1181  /// \p FoundElse is false, then \#else directives are ok, if not, then we have
1182  /// already seen one so a \#else directive is a duplicate.  When this returns,
1183  /// the caller can lex the first valid token.
1184  void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
1185                                    bool FoundNonSkipPortion, bool FoundElse,
1186                                    SourceLocation ElseLoc = SourceLocation());
1187
1188  /// \brief A fast PTH version of SkipExcludedConditionalBlock.
1189  void PTHSkipExcludedConditionalBlock();
1190
1191  /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
1192  /// may occur after a #if or #elif directive and return it as a bool.  If the
1193  /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
1194  bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
1195
1196  /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
1197  /// \#pragma GCC poison/system_header/dependency and \#pragma once.
1198  void RegisterBuiltinPragmas();
1199
1200  /// \brief Register builtin macros such as __LINE__ with the identifier table.
1201  void RegisterBuiltinMacros();
1202
1203  /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
1204  /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
1205  /// the macro should not be expanded return true, otherwise return false.
1206  bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI);
1207
1208  /// \brief Cache macro expanded tokens for TokenLexers.
1209  //
1210  /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
1211  /// going to lex in the cache and when it finishes the tokens are removed
1212  /// from the end of the cache.
1213  Token *cacheMacroExpandedTokens(TokenLexer *tokLexer,
1214                                  ArrayRef<Token> tokens);
1215  void removeCachedMacroExpandedTokensOfLastLexer();
1216  friend void TokenLexer::ExpandFunctionArguments();
1217
1218  /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
1219  /// lexed is a '('.  If so, consume the token and return true, if not, this
1220  /// method should have no observable side-effect on the lexed tokens.
1221  bool isNextPPTokenLParen();
1222
1223  /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
1224  /// invoked to read all of the formal arguments specified for the macro
1225  /// invocation.  This returns null on error.
1226  MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
1227                                       SourceLocation &ExpansionEnd);
1228
1229  /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
1230  /// as a builtin macro, handle it and return the next token as 'Tok'.
1231  void ExpandBuiltinMacro(Token &Tok);
1232
1233  /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
1234  /// return the first token after the directive.  The _Pragma token has just
1235  /// been read into 'Tok'.
1236  void Handle_Pragma(Token &Tok);
1237
1238  /// HandleMicrosoft__pragma - Like Handle_Pragma except the pragma text
1239  /// is not enclosed within a string literal.
1240  void HandleMicrosoft__pragma(Token &Tok);
1241
1242  /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
1243  /// start lexing tokens from it instead of the current buffer.
1244  void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
1245
1246  /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
1247  /// start getting tokens from it using the PTH cache.
1248  void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
1249
1250  /// IsFileLexer - Returns true if we are lexing from a file and not a
1251  ///  pragma or a macro.
1252  static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
1253    return L ? !L->isPragmaLexer() : P != 0;
1254  }
1255
1256  static bool IsFileLexer(const IncludeStackInfo& I) {
1257    return IsFileLexer(I.TheLexer, I.ThePPLexer);
1258  }
1259
1260  bool IsFileLexer() const {
1261    return IsFileLexer(CurLexer.get(), CurPPLexer);
1262  }
1263
1264  //===--------------------------------------------------------------------===//
1265  // Caching stuff.
1266  void CachingLex(Token &Result);
1267  bool InCachingLexMode() const {
1268    // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means
1269    // that we are past EOF, not that we are in CachingLex mode.
1270    return CurPPLexer == 0 && CurTokenLexer == 0 && CurPTHLexer == 0 &&
1271           !IncludeMacroStack.empty();
1272  }
1273  void EnterCachingLexMode();
1274  void ExitCachingLexMode() {
1275    if (InCachingLexMode())
1276      RemoveTopOfLexerStack();
1277  }
1278  const Token &PeekAhead(unsigned N);
1279  void AnnotatePreviousCachedTokens(const Token &Tok);
1280
1281  //===--------------------------------------------------------------------===//
1282  /// Handle*Directive - implement the various preprocessor directives.  These
1283  /// should side-effect the current preprocessor object so that the next call
1284  /// to Lex() will return the appropriate token next.
1285  void HandleLineDirective(Token &Tok);
1286  void HandleDigitDirective(Token &Tok);
1287  void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
1288  void HandleIdentSCCSDirective(Token &Tok);
1289  void HandleMacroPublicDirective(Token &Tok);
1290  void HandleMacroPrivateDirective(Token &Tok);
1291
1292  // File inclusion.
1293  void HandleIncludeDirective(SourceLocation HashLoc,
1294                              Token &Tok,
1295                              const DirectoryLookup *LookupFrom = 0,
1296                              bool isImport = false);
1297  void HandleIncludeNextDirective(SourceLocation HashLoc, Token &Tok);
1298  void HandleIncludeMacrosDirective(SourceLocation HashLoc, Token &Tok);
1299  void HandleImportDirective(SourceLocation HashLoc, Token &Tok);
1300  void HandleMicrosoftImportDirective(Token &Tok);
1301
1302  // Macro handling.
1303  void HandleDefineDirective(Token &Tok);
1304  void HandleUndefDirective(Token &Tok);
1305
1306  // Conditional Inclusion.
1307  void HandleIfdefDirective(Token &Tok, bool isIfndef,
1308                            bool ReadAnyTokensBeforeDirective);
1309  void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
1310  void HandleEndifDirective(Token &Tok);
1311  void HandleElseDirective(Token &Tok);
1312  void HandleElifDirective(Token &Tok);
1313
1314  // Pragmas.
1315  void HandlePragmaDirective(unsigned Introducer);
1316public:
1317  void HandlePragmaOnce(Token &OnceTok);
1318  void HandlePragmaMark();
1319  void HandlePragmaPoison(Token &PoisonTok);
1320  void HandlePragmaSystemHeader(Token &SysHeaderTok);
1321  void HandlePragmaDependency(Token &DependencyTok);
1322  void HandlePragmaComment(Token &CommentTok);
1323  void HandlePragmaMessage(Token &MessageTok);
1324  void HandlePragmaPushMacro(Token &Tok);
1325  void HandlePragmaPopMacro(Token &Tok);
1326  void HandlePragmaIncludeAlias(Token &Tok);
1327  IdentifierInfo *ParsePragmaPushOrPopMacro(Token &Tok);
1328
1329  // Return true and store the first token only if any CommentHandler
1330  // has inserted some tokens and getCommentRetentionState() is false.
1331  bool HandleComment(Token &Token, SourceRange Comment);
1332
1333  /// \brief A macro is used, update information about macros that need unused
1334  /// warnings.
1335  void markMacroAsUsed(MacroInfo *MI);
1336};
1337
1338/// \brief Abstract base class that describes a handler that will receive
1339/// source ranges for each of the comments encountered in the source file.
1340class CommentHandler {
1341public:
1342  virtual ~CommentHandler();
1343
1344  // The handler shall return true if it has pushed any tokens
1345  // to be read using e.g. EnterToken or EnterTokenStream.
1346  virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0;
1347};
1348
1349}  // end namespace clang
1350
1351#endif
1352