Preprocessor.h revision 3437f1f1294499d4ef306c1089fcb3e29ec2aa68
1//===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines the Preprocessor interface.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15#define LLVM_CLANG_LEX_PREPROCESSOR_H
16
17#include "clang/Lex/MacroInfo.h"
18#include "clang/Lex/Lexer.h"
19#include "clang/Lex/PTHLexer.h"
20#include "clang/Lex/PPCallbacks.h"
21#include "clang/Lex/TokenLexer.h"
22#include "clang/Lex/PTHManager.h"
23#include "clang/Basic/Builtins.h"
24#include "clang/Basic/Diagnostic.h"
25#include "clang/Basic/IdentifierTable.h"
26#include "clang/Basic/SourceLocation.h"
27#include "llvm/ADT/DenseMap.h"
28#include "llvm/ADT/SmallPtrSet.h"
29#include "llvm/ADT/OwningPtr.h"
30#include "llvm/ADT/SmallVector.h"
31#include "llvm/Support/Allocator.h"
32#include <vector>
33
34namespace clang {
35
36class SourceManager;
37class ExternalPreprocessorSource;
38class FileManager;
39class FileEntry;
40class HeaderSearch;
41class PragmaNamespace;
42class PragmaHandler;
43class CommentHandler;
44class ScratchBuffer;
45class TargetInfo;
46class PPCallbacks;
47class CodeCompletionHandler;
48class DirectoryLookup;
49class PreprocessingRecord;
50
51/// Preprocessor - This object engages in a tight little dance with the lexer to
52/// efficiently preprocess tokens.  Lexers know only about tokens within a
53/// single source file, and don't know anything about preprocessor-level issues
54/// like the #include stack, token expansion, etc.
55///
56class Preprocessor {
57  Diagnostic        *Diags;
58  LangOptions        Features;
59  const TargetInfo  &Target;
60  FileManager       &FileMgr;
61  SourceManager     &SourceMgr;
62  ScratchBuffer     *ScratchBuf;
63  HeaderSearch      &HeaderInfo;
64
65  /// \brief External source of macros.
66  ExternalPreprocessorSource *ExternalSource;
67
68  /// PTH - An optional PTHManager object used for getting tokens from
69  ///  a token cache rather than lexing the original source file.
70  llvm::OwningPtr<PTHManager> PTH;
71
72  /// BP - A BumpPtrAllocator object used to quickly allocate and release
73  ///  objects internal to the Preprocessor.
74  llvm::BumpPtrAllocator BP;
75
76  /// Identifiers for builtin macros and other builtins.
77  IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
78  IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
79  IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
80  IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
81  IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
82  IdentifierInfo *Ident__COUNTER__;                // __COUNTER__
83  IdentifierInfo *Ident_Pragma, *Ident__pragma;    // _Pragma, __pragma
84  IdentifierInfo *Ident__VA_ARGS__;                // __VA_ARGS__
85  IdentifierInfo *Ident__has_feature;              // __has_feature
86  IdentifierInfo *Ident__has_builtin;              // __has_builtin
87  IdentifierInfo *Ident__has_attribute;            // __has_attribute
88  IdentifierInfo *Ident__has_include;              // __has_include
89  IdentifierInfo *Ident__has_include_next;         // __has_include_next
90
91  SourceLocation DATELoc, TIMELoc;
92  unsigned CounterValue;  // Next __COUNTER__ value.
93
94  enum {
95    /// MaxIncludeStackDepth - Maximum depth of #includes.
96    MaxAllowedIncludeStackDepth = 200
97  };
98
99  // State that is set before the preprocessor begins.
100  bool KeepComments : 1;
101  bool KeepMacroComments : 1;
102
103  // State that changes while the preprocessor runs:
104  bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
105
106  /// Whether the preprocessor owns the header search object.
107  bool OwnsHeaderSearch : 1;
108
109  /// DisableMacroExpansion - True if macro expansion is disabled.
110  bool DisableMacroExpansion : 1;
111
112  /// \brief Whether we have already loaded macros from the external source.
113  mutable bool ReadMacrosFromExternalSource : 1;
114
115  /// Identifiers - This is mapping/lookup information for all identifiers in
116  /// the program, including program keywords.
117  mutable IdentifierTable Identifiers;
118
119  /// Selectors - This table contains all the selectors in the program. Unlike
120  /// IdentifierTable above, this table *isn't* populated by the preprocessor.
121  /// It is declared/instantiated here because it's role/lifetime is
122  /// conceptually similar the IdentifierTable. In addition, the current control
123  /// flow (in clang::ParseAST()), make it convenient to put here.
124  /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
125  /// the lifetime of the preprocessor.
126  SelectorTable Selectors;
127
128  /// BuiltinInfo - Information about builtins.
129  Builtin::Context BuiltinInfo;
130
131  /// PragmaHandlers - This tracks all of the pragmas that the client registered
132  /// with this preprocessor.
133  PragmaNamespace *PragmaHandlers;
134
135  /// \brief Tracks all of the comment handlers that the client registered
136  /// with this preprocessor.
137  std::vector<CommentHandler *> CommentHandlers;
138
139  /// \brief The code-completion handler.
140  CodeCompletionHandler *CodeComplete;
141
142  /// \brief The file that we're performing code-completion for, if any.
143  const FileEntry *CodeCompletionFile;
144
145  /// \brief The number of bytes that we will initially skip when entering the
146  /// main file, which is used when loading a precompiled preamble, along
147  /// with a flag that indicates whether skipping this number of bytes will
148  /// place the lexer at the start of a line.
149  std::pair<unsigned, bool> SkipMainFilePreamble;
150
151  /// CurLexer - This is the current top of the stack that we're lexing from if
152  /// not expanding a macro and we are lexing directly from source code.
153  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
154  llvm::OwningPtr<Lexer> CurLexer;
155
156  /// CurPTHLexer - This is the current top of stack that we're lexing from if
157  ///  not expanding from a macro and we are lexing from a PTH cache.
158  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
159  llvm::OwningPtr<PTHLexer> CurPTHLexer;
160
161  /// CurPPLexer - This is the current top of the stack what we're lexing from
162  ///  if not expanding a macro.  This is an alias for either CurLexer or
163  ///  CurPTHLexer.
164  PreprocessorLexer *CurPPLexer;
165
166  /// CurLookup - The DirectoryLookup structure used to find the current
167  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
168  /// implement #include_next and find directory-specific properties.
169  const DirectoryLookup *CurDirLookup;
170
171  /// CurTokenLexer - This is the current macro we are expanding, if we are
172  /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
173  llvm::OwningPtr<TokenLexer> CurTokenLexer;
174
175  /// IncludeMacroStack - This keeps track of the stack of files currently
176  /// #included, and macros currently being expanded from, not counting
177  /// CurLexer/CurTokenLexer.
178  struct IncludeStackInfo {
179    Lexer                 *TheLexer;
180    PTHLexer              *ThePTHLexer;
181    PreprocessorLexer     *ThePPLexer;
182    TokenLexer            *TheTokenLexer;
183    const DirectoryLookup *TheDirLookup;
184
185    IncludeStackInfo(Lexer *L, PTHLexer* P, PreprocessorLexer* PPL,
186                     TokenLexer* TL, const DirectoryLookup *D)
187      : TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL), TheTokenLexer(TL),
188        TheDirLookup(D) {}
189  };
190  std::vector<IncludeStackInfo> IncludeMacroStack;
191
192  /// Callbacks - These are actions invoked when some preprocessor activity is
193  /// encountered (e.g. a file is #included, etc).
194  PPCallbacks *Callbacks;
195
196  /// Macros - For each IdentifierInfo with 'HasMacro' set, we keep a mapping
197  /// to the actual definition of the macro.
198  llvm::DenseMap<IdentifierInfo*, MacroInfo*> Macros;
199
200  /// \brief Macros that we want to warn because they are not used at the end
201  /// of the translation unit; we store just their SourceLocations instead
202  /// something like MacroInfo*. The benefit of this is that when we are
203  /// deserializing from PCH, we don't need to deserialize identifier & macros
204  /// just so that we can report that they are unused, we just warn using
205  /// the SourceLocations of this set (that will be filled by the ASTReader).
206  /// We are using SmallPtrSet instead of a vector for faster removal.
207  typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy;
208  WarnUnusedMacroLocsTy WarnUnusedMacroLocs;
209
210  /// MacroArgCache - This is a "freelist" of MacroArg objects that can be
211  /// reused for quick allocation.
212  MacroArgs *MacroArgCache;
213  friend class MacroArgs;
214
215  /// PragmaPushMacroInfo - For each IdentifierInfo used in a #pragma
216  /// push_macro directive, we keep a MacroInfo stack used to restore
217  /// previous macro value.
218  llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo;
219
220  // Various statistics we track for performance analysis.
221  unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
222  unsigned NumIf, NumElse, NumEndif;
223  unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
224  unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
225  unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
226  unsigned NumSkipped;
227
228  /// Predefines - This string is the predefined macros that preprocessor
229  /// should use from the command line etc.
230  std::string Predefines;
231
232  /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
233  enum { TokenLexerCacheSize = 8 };
234  unsigned NumCachedTokenLexers;
235  TokenLexer *TokenLexerCache[TokenLexerCacheSize];
236
237  /// \brief A record of the macro definitions and instantiations that
238  /// occurred during preprocessing.
239  ///
240  /// This is an optional side structure that can be enabled with
241  /// \c createPreprocessingRecord() prior to preprocessing.
242  PreprocessingRecord *Record;
243
244private:  // Cached tokens state.
245  typedef llvm::SmallVector<Token, 1> CachedTokensTy;
246
247  /// CachedTokens - Cached tokens are stored here when we do backtracking or
248  /// lookahead. They are "lexed" by the CachingLex() method.
249  CachedTokensTy CachedTokens;
250
251  /// CachedLexPos - The position of the cached token that CachingLex() should
252  /// "lex" next. If it points beyond the CachedTokens vector, it means that
253  /// a normal Lex() should be invoked.
254  CachedTokensTy::size_type CachedLexPos;
255
256  /// BacktrackPositions - Stack of backtrack positions, allowing nested
257  /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
258  /// indicate where CachedLexPos should be set when the BackTrack() method is
259  /// invoked (at which point the last position is popped).
260  std::vector<CachedTokensTy::size_type> BacktrackPositions;
261
262  struct MacroInfoChain {
263    MacroInfo MI;
264    MacroInfoChain *Next;
265    MacroInfoChain *Prev;
266  };
267
268  /// MacroInfos are managed as a chain for easy disposal.  This is the head
269  /// of that list.
270  MacroInfoChain *MIChainHead;
271
272  /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
273  /// allocation.
274  MacroInfoChain *MICache;
275
276  MacroInfo *getInfoForMacro(IdentifierInfo *II) const;
277
278public:
279  Preprocessor(Diagnostic &diags, const LangOptions &opts,
280               const TargetInfo &target,
281               SourceManager &SM, HeaderSearch &Headers,
282               IdentifierInfoLookup *IILookup = 0,
283               bool OwnsHeaderSearch = false);
284
285  ~Preprocessor();
286
287  Diagnostic &getDiagnostics() const { return *Diags; }
288  void setDiagnostics(Diagnostic &D) { Diags = &D; }
289
290  const LangOptions &getLangOptions() const { return Features; }
291  const TargetInfo &getTargetInfo() const { return Target; }
292  FileManager &getFileManager() const { return FileMgr; }
293  SourceManager &getSourceManager() const { return SourceMgr; }
294  HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
295
296  IdentifierTable &getIdentifierTable() { return Identifiers; }
297  SelectorTable &getSelectorTable() { return Selectors; }
298  Builtin::Context &getBuiltinInfo() { return BuiltinInfo; }
299  llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
300
301  void setPTHManager(PTHManager* pm);
302
303  PTHManager *getPTHManager() { return PTH.get(); }
304
305  void setExternalSource(ExternalPreprocessorSource *Source) {
306    ExternalSource = Source;
307  }
308
309  ExternalPreprocessorSource *getExternalSource() const {
310    return ExternalSource;
311  }
312
313  /// SetCommentRetentionState - Control whether or not the preprocessor retains
314  /// comments in output.
315  void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
316    this->KeepComments = KeepComments | KeepMacroComments;
317    this->KeepMacroComments = KeepMacroComments;
318  }
319
320  bool getCommentRetentionState() const { return KeepComments; }
321
322  /// isCurrentLexer - Return true if we are lexing directly from the specified
323  /// lexer.
324  bool isCurrentLexer(const PreprocessorLexer *L) const {
325    return CurPPLexer == L;
326  }
327
328  /// getCurrentLexer - Return the current lexer being lexed from.  Note
329  /// that this ignores any potentially active macro expansions and _Pragma
330  /// expansions going on at the time.
331  PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; }
332
333  /// getCurrentFileLexer - Return the current file lexer being lexed from.
334  /// Note that this ignores any potentially active macro expansions and _Pragma
335  /// expansions going on at the time.
336  PreprocessorLexer *getCurrentFileLexer() const;
337
338  /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks.
339  /// Note that this class takes ownership of any PPCallbacks object given to
340  /// it.
341  PPCallbacks *getPPCallbacks() const { return Callbacks; }
342  void addPPCallbacks(PPCallbacks *C) {
343    if (Callbacks)
344      C = new PPChainedCallbacks(C, Callbacks);
345    Callbacks = C;
346  }
347
348  /// getMacroInfo - Given an identifier, return the MacroInfo it is #defined to
349  /// or null if it isn't #define'd.
350  MacroInfo *getMacroInfo(IdentifierInfo *II) const {
351    if (!II->hasMacroDefinition())
352      return 0;
353
354    return getInfoForMacro(II);
355  }
356
357  /// setMacroInfo - Specify a macro for this identifier.
358  ///
359  void setMacroInfo(IdentifierInfo *II, MacroInfo *MI);
360
361  /// macro_iterator/macro_begin/macro_end - This allows you to walk the current
362  /// state of the macro table.  This visits every currently-defined macro.
363  typedef llvm::DenseMap<IdentifierInfo*,
364                         MacroInfo*>::const_iterator macro_iterator;
365  macro_iterator macro_begin(bool IncludeExternalMacros = true) const;
366  macro_iterator macro_end(bool IncludeExternalMacros = true) const;
367
368  const std::string &getPredefines() const { return Predefines; }
369  /// setPredefines - Set the predefines for this Preprocessor.  These
370  /// predefines are automatically injected when parsing the main file.
371  void setPredefines(const char *P) { Predefines = P; }
372  void setPredefines(const std::string &P) { Predefines = P; }
373
374  /// getIdentifierInfo - Return information about the specified preprocessor
375  /// identifier token.  The version of this method that takes two character
376  /// pointers is preferred unless the identifier is already available as a
377  /// string (this avoids allocation and copying of memory to construct an
378  /// std::string).
379  IdentifierInfo *getIdentifierInfo(llvm::StringRef Name) const {
380    return &Identifiers.get(Name);
381  }
382
383  /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
384  /// If 'Namespace' is non-null, then it is a token required to exist on the
385  /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
386  void AddPragmaHandler(llvm::StringRef Namespace, PragmaHandler *Handler);
387  void AddPragmaHandler(PragmaHandler *Handler) {
388    AddPragmaHandler(llvm::StringRef(), Handler);
389  }
390
391  /// RemovePragmaHandler - Remove the specific pragma handler from
392  /// the preprocessor. If \arg Namespace is non-null, then it should
393  /// be the namespace that \arg Handler was added to. It is an error
394  /// to remove a handler that has not been registered.
395  void RemovePragmaHandler(llvm::StringRef Namespace, PragmaHandler *Handler);
396  void RemovePragmaHandler(PragmaHandler *Handler) {
397    RemovePragmaHandler(llvm::StringRef(), Handler);
398  }
399
400  /// \brief Add the specified comment handler to the preprocessor.
401  void AddCommentHandler(CommentHandler *Handler);
402
403  /// \brief Remove the specified comment handler.
404  ///
405  /// It is an error to remove a handler that has not been registered.
406  void RemoveCommentHandler(CommentHandler *Handler);
407
408  /// \brief Set the code completion handler to the given object.
409  void setCodeCompletionHandler(CodeCompletionHandler &Handler) {
410    CodeComplete = &Handler;
411  }
412
413  /// \brief Retrieve the current code-completion handler.
414  CodeCompletionHandler *getCodeCompletionHandler() const {
415    return CodeComplete;
416  }
417
418  /// \brief Clear out the code completion handler.
419  void clearCodeCompletionHandler() {
420    CodeComplete = 0;
421  }
422
423  /// \brief Hook used by the lexer to invoke the "natural language" code
424  /// completion point.
425  void CodeCompleteNaturalLanguage();
426
427  /// \brief Retrieve the preprocessing record, or NULL if there is no
428  /// preprocessing record.
429  PreprocessingRecord *getPreprocessingRecord() const { return Record; }
430
431  /// \brief Create a new preprocessing record, which will keep track of
432  /// all macro expansions, macro definitions, etc.
433  void createPreprocessingRecord();
434
435  /// EnterMainSourceFile - Enter the specified FileID as the main source file,
436  /// which implicitly adds the builtin defines etc.
437  void EnterMainSourceFile();
438
439  /// EndSourceFile - Inform the preprocessor callbacks that processing is
440  /// complete.
441  void EndSourceFile();
442
443  /// EnterSourceFile - Add a source file to the top of the include stack and
444  /// start lexing tokens from it instead of the current buffer.  Emit an error
445  /// and don't enter the file on error.
446  void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir,
447                       SourceLocation Loc);
448
449  /// EnterMacro - Add a Macro to the top of the include stack and start lexing
450  /// tokens from it instead of the current buffer.  Args specifies the
451  /// tokens input to a function-like macro.
452  ///
453  /// ILEnd specifies the location of the ')' for a function-like macro or the
454  /// identifier for an object-like macro.
455  void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroArgs *Args);
456
457  /// EnterTokenStream - Add a "macro" context to the top of the include stack,
458  /// which will cause the lexer to start returning the specified tokens.
459  ///
460  /// If DisableMacroExpansion is true, tokens lexed from the token stream will
461  /// not be subject to further macro expansion.  Otherwise, these tokens will
462  /// be re-macro-expanded when/if expansion is enabled.
463  ///
464  /// If OwnsTokens is false, this method assumes that the specified stream of
465  /// tokens has a permanent owner somewhere, so they do not need to be copied.
466  /// If it is true, it assumes the array of tokens is allocated with new[] and
467  /// must be freed.
468  ///
469  void EnterTokenStream(const Token *Toks, unsigned NumToks,
470                        bool DisableMacroExpansion, bool OwnsTokens);
471
472  /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
473  /// lexer stack.  This should only be used in situations where the current
474  /// state of the top-of-stack lexer is known.
475  void RemoveTopOfLexerStack();
476
477  /// EnableBacktrackAtThisPos - From the point that this method is called, and
478  /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
479  /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
480  /// make the Preprocessor re-lex the same tokens.
481  ///
482  /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
483  /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
484  /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
485  ///
486  /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
487  /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
488  /// tokens will continue indefinitely.
489  ///
490  void EnableBacktrackAtThisPos();
491
492  /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
493  void CommitBacktrackedTokens();
494
495  /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
496  /// EnableBacktrackAtThisPos() was previously called.
497  void Backtrack();
498
499  /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
500  /// caching of tokens is on.
501  bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
502
503  /// Lex - To lex a token from the preprocessor, just pull a token from the
504  /// current lexer or macro object.
505  void Lex(Token &Result) {
506    if (CurLexer)
507      CurLexer->Lex(Result);
508    else if (CurPTHLexer)
509      CurPTHLexer->Lex(Result);
510    else if (CurTokenLexer)
511      CurTokenLexer->Lex(Result);
512    else
513      CachingLex(Result);
514  }
515
516  /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
517  /// something not a comment.  This is useful in -E -C mode where comments
518  /// would foul up preprocessor directive handling.
519  void LexNonComment(Token &Result) {
520    do
521      Lex(Result);
522    while (Result.getKind() == tok::comment);
523  }
524
525  /// LexUnexpandedToken - This is just like Lex, but this disables macro
526  /// expansion of identifier tokens.
527  void LexUnexpandedToken(Token &Result) {
528    // Disable macro expansion.
529    bool OldVal = DisableMacroExpansion;
530    DisableMacroExpansion = true;
531    // Lex the token.
532    Lex(Result);
533
534    // Reenable it.
535    DisableMacroExpansion = OldVal;
536  }
537
538  /// LookAhead - This peeks ahead N tokens and returns that token without
539  /// consuming any tokens.  LookAhead(0) returns the next token that would be
540  /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
541  /// returns normal tokens after phase 5.  As such, it is equivalent to using
542  /// 'Lex', not 'LexUnexpandedToken'.
543  const Token &LookAhead(unsigned N) {
544    if (CachedLexPos + N < CachedTokens.size())
545      return CachedTokens[CachedLexPos+N];
546    else
547      return PeekAhead(N+1);
548  }
549
550  /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
551  /// this allows to revert a specific number of tokens.
552  /// Note that the number of tokens being reverted should be up to the last
553  /// backtrack position, not more.
554  void RevertCachedTokens(unsigned N) {
555    assert(isBacktrackEnabled() &&
556           "Should only be called when tokens are cached for backtracking");
557    assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
558         && "Should revert tokens up to the last backtrack position, not more");
559    assert(signed(CachedLexPos) - signed(N) >= 0 &&
560           "Corrupted backtrack positions ?");
561    CachedLexPos -= N;
562  }
563
564  /// EnterToken - Enters a token in the token stream to be lexed next. If
565  /// BackTrack() is called afterwards, the token will remain at the insertion
566  /// point.
567  void EnterToken(const Token &Tok) {
568    EnterCachingLexMode();
569    CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
570  }
571
572  /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
573  /// tokens (because backtrack is enabled) it should replace the most recent
574  /// cached tokens with the given annotation token. This function has no effect
575  /// if backtracking is not enabled.
576  ///
577  /// Note that the use of this function is just for optimization; so that the
578  /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
579  /// invoked.
580  void AnnotateCachedTokens(const Token &Tok) {
581    assert(Tok.isAnnotation() && "Expected annotation token");
582    if (CachedLexPos != 0 && isBacktrackEnabled())
583      AnnotatePreviousCachedTokens(Tok);
584  }
585
586  /// \brief Replace the last token with an annotation token.
587  ///
588  /// Like AnnotateCachedTokens(), this routine replaces an
589  /// already-parsed (and resolved) token with an annotation
590  /// token. However, this routine only replaces the last token with
591  /// the annotation token; it does not affect any other cached
592  /// tokens. This function has no effect if backtracking is not
593  /// enabled.
594  void ReplaceLastTokenWithAnnotation(const Token &Tok) {
595    assert(Tok.isAnnotation() && "Expected annotation token");
596    if (CachedLexPos != 0 && isBacktrackEnabled())
597      CachedTokens[CachedLexPos-1] = Tok;
598  }
599
600  /// \brief Specify the point at which code-completion will be performed.
601  ///
602  /// \param File the file in which code completion should occur. If
603  /// this file is included multiple times, code-completion will
604  /// perform completion the first time it is included. If NULL, this
605  /// function clears out the code-completion point.
606  ///
607  /// \param Line the line at which code completion should occur
608  /// (1-based).
609  ///
610  /// \param Column the column at which code completion should occur
611  /// (1-based).
612  ///
613  /// \returns true if an error occurred, false otherwise.
614  bool SetCodeCompletionPoint(const FileEntry *File,
615                              unsigned Line, unsigned Column);
616
617  /// \brief Determine if this source location refers into the file
618  /// for which we are performing code completion.
619  bool isCodeCompletionFile(SourceLocation FileLoc) const;
620
621  /// \brief Determine if we are performing code completion.
622  bool isCodeCompletionEnabled() const { return CodeCompletionFile != 0; }
623
624  /// \brief Instruct the preprocessor to skip part of the main
625  /// the main source file.
626  ///
627  /// \brief Bytes The number of bytes in the preamble to skip.
628  ///
629  /// \brief StartOfLine Whether skipping these bytes puts the lexer at the
630  /// start of a line.
631  void setSkipMainFilePreamble(unsigned Bytes, bool StartOfLine) {
632    SkipMainFilePreamble.first = Bytes;
633    SkipMainFilePreamble.second = StartOfLine;
634  }
635
636  /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
637  /// the specified Token's location, translating the token's start
638  /// position in the current buffer into a SourcePosition object for rendering.
639  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) {
640    return Diags->Report(Loc, DiagID);
641  }
642
643  DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) {
644    return Diags->Report(Tok.getLocation(), DiagID);
645  }
646
647  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
648  /// token is the characters used to represent the token in the source file
649  /// after trigraph expansion and escaped-newline folding.  In particular, this
650  /// wants to get the true, uncanonicalized, spelling of things like digraphs
651  /// UCNs, etc.
652  ///
653  /// \param Invalid If non-NULL, will be set \c true if an error occurs.
654  std::string getSpelling(const Token &Tok, bool *Invalid = 0) const {
655    return Lexer::getSpelling(Tok, SourceMgr, Features, Invalid);
656  }
657
658  /// getSpelling - This method is used to get the spelling of a token into a
659  /// preallocated buffer, instead of as an std::string.  The caller is required
660  /// to allocate enough space for the token, which is guaranteed to be at least
661  /// Tok.getLength() bytes long.  The length of the actual result is returned.
662  ///
663  /// Note that this method may do two possible things: it may either fill in
664  /// the buffer specified with characters, or it may *change the input pointer*
665  /// to point to a constant buffer with the data already in it (avoiding a
666  /// copy).  The caller is not allowed to modify the returned buffer pointer
667  /// if an internal buffer is returned.
668  unsigned getSpelling(const Token &Tok, const char *&Buffer,
669                       bool *Invalid = 0) const {
670    return Lexer::getSpelling(Tok, Buffer, SourceMgr, Features, Invalid);
671  }
672
673  /// getSpelling - This method is used to get the spelling of a token into a
674  /// SmallVector. Note that the returned StringRef may not point to the
675  /// supplied buffer if a copy can be avoided.
676  llvm::StringRef getSpelling(const Token &Tok,
677                              llvm::SmallVectorImpl<char> &Buffer,
678                              bool *Invalid = 0) const;
679
680  /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
681  /// with length 1, return the character.
682  char getSpellingOfSingleCharacterNumericConstant(const Token &Tok,
683                                                   bool *Invalid = 0) const {
684    assert(Tok.is(tok::numeric_constant) &&
685           Tok.getLength() == 1 && "Called on unsupported token");
686    assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
687
688    // If the token is carrying a literal data pointer, just use it.
689    if (const char *D = Tok.getLiteralData())
690      return *D;
691
692    // Otherwise, fall back on getCharacterData, which is slower, but always
693    // works.
694    return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid);
695  }
696
697  /// CreateString - Plop the specified string into a scratch buffer and set the
698  /// specified token's location and length to it.  If specified, the source
699  /// location provides a location of the instantiation point of the token.
700  void CreateString(const char *Buf, unsigned Len,
701                    Token &Tok, SourceLocation SourceLoc = SourceLocation());
702
703  /// \brief Computes the source location just past the end of the
704  /// token at this source location.
705  ///
706  /// This routine can be used to produce a source location that
707  /// points just past the end of the token referenced by \p Loc, and
708  /// is generally used when a diagnostic needs to point just after a
709  /// token where it expected something different that it received. If
710  /// the returned source location would not be meaningful (e.g., if
711  /// it points into a macro), this routine returns an invalid
712  /// source location.
713  ///
714  /// \param Offset an offset from the end of the token, where the source
715  /// location should refer to. The default offset (0) produces a source
716  /// location pointing just past the end of the token; an offset of 1 produces
717  /// a source location pointing to the last character in the token, etc.
718  SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0) {
719    return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, Features);
720  }
721
722  /// DumpToken - Print the token to stderr, used for debugging.
723  ///
724  void DumpToken(const Token &Tok, bool DumpFlags = false) const;
725  void DumpLocation(SourceLocation Loc) const;
726  void DumpMacro(const MacroInfo &MI) const;
727
728  /// AdvanceToTokenCharacter - Given a location that specifies the start of a
729  /// token, return a new location that specifies a character within the token.
730  SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,
731                                         unsigned Char) const {
732    return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, Features);
733  }
734
735  /// IncrementPasteCounter - Increment the counters for the number of token
736  /// paste operations performed.  If fast was specified, this is a 'fast paste'
737  /// case we handled.
738  ///
739  void IncrementPasteCounter(bool isFast) {
740    if (isFast)
741      ++NumFastTokenPaste;
742    else
743      ++NumTokenPaste;
744  }
745
746  void PrintStats();
747
748  /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
749  /// comment (/##/) in microsoft mode, this method handles updating the current
750  /// state, returning the token on the next source line.
751  void HandleMicrosoftCommentPaste(Token &Tok);
752
753  //===--------------------------------------------------------------------===//
754  // Preprocessor callback methods.  These are invoked by a lexer as various
755  // directives and events are found.
756
757  /// LookUpIdentifierInfo - Given a tok::raw_identifier token, look up the
758  /// identifier information for the token and install it into the token,
759  /// updating the token kind accordingly.
760  IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const;
761
762  /// HandleIdentifier - This callback is invoked when the lexer reads an
763  /// identifier and has filled in the tokens IdentifierInfo member.  This
764  /// callback potentially macro expands it or turns it into a named token (like
765  /// 'for').
766  void HandleIdentifier(Token &Identifier);
767
768
769  /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
770  /// the current file.  This either returns the EOF token and returns true, or
771  /// pops a level off the include stack and returns false, at which point the
772  /// client should call lex again.
773  bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
774
775  /// HandleEndOfTokenLexer - This callback is invoked when the current
776  /// TokenLexer hits the end of its token stream.
777  bool HandleEndOfTokenLexer(Token &Result);
778
779  /// HandleDirective - This callback is invoked when the lexer sees a # token
780  /// at the start of a line.  This consumes the directive, modifies the
781  /// lexer/preprocessor state, and advances the lexer(s) so that the next token
782  /// read is the correct one.
783  void HandleDirective(Token &Result);
784
785  /// CheckEndOfDirective - Ensure that the next token is a tok::eom token.  If
786  /// not, emit a diagnostic and consume up until the eom.  If EnableMacros is
787  /// true, then we consider macros that expand to zero tokens as being ok.
788  void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
789
790  /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
791  /// current line until the tok::eom token is found.
792  void DiscardUntilEndOfDirective();
793
794  /// SawDateOrTime - This returns true if the preprocessor has seen a use of
795  /// __DATE__ or __TIME__ in the file so far.
796  bool SawDateOrTime() const {
797    return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
798  }
799  unsigned getCounterValue() const { return CounterValue; }
800  void setCounterValue(unsigned V) { CounterValue = V; }
801
802  /// AllocateMacroInfo - Allocate a new MacroInfo object with the provide
803  ///  SourceLocation.
804  MacroInfo *AllocateMacroInfo(SourceLocation L);
805
806  /// CloneMacroInfo - Allocate a new MacroInfo object which is clone of MI.
807  MacroInfo *CloneMacroInfo(const MacroInfo &MI);
808
809  /// GetIncludeFilenameSpelling - Turn the specified lexer token into a fully
810  /// checked and spelled filename, e.g. as an operand of #include. This returns
811  /// true if the input filename was in <>'s or false if it were in ""'s.  The
812  /// caller is expected to provide a buffer that is large enough to hold the
813  /// spelling of the filename, but is also expected to handle the case when
814  /// this method decides to use a different buffer.
815  bool GetIncludeFilenameSpelling(SourceLocation Loc,llvm::StringRef &Filename);
816
817  /// LookupFile - Given a "foo" or <foo> reference, look up the indicated file,
818  /// return null on failure.  isAngled indicates whether the file reference is
819  /// for system #include's or not (i.e. using <> instead of "").
820  const FileEntry *LookupFile(llvm::StringRef Filename,
821                              bool isAngled, const DirectoryLookup *FromDir,
822                              const DirectoryLookup *&CurDir);
823
824  /// GetCurLookup - The DirectoryLookup structure used to find the current
825  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
826  /// implement #include_next and find directory-specific properties.
827  const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; }
828
829  /// isInPrimaryFile - Return true if we're in the top-level file, not in a
830  /// #include.
831  bool isInPrimaryFile() const;
832
833  /// ConcatenateIncludeName - Handle cases where the #include name is expanded
834  /// from a macro as multiple tokens, which need to be glued together.  This
835  /// occurs for code like:
836  ///    #define FOO <a/b.h>
837  ///    #include FOO
838  /// because in this case, "<a/b.h>" is returned as 7 tokens, not one.
839  ///
840  /// This code concatenates and consumes tokens up to the '>' token.  It
841  /// returns false if the > was found, otherwise it returns true if it finds
842  /// and consumes the EOM marker.
843  bool ConcatenateIncludeName(llvm::SmallString<128> &FilenameBuffer,
844                              SourceLocation &End);
845
846private:
847
848  void PushIncludeMacroStack() {
849    IncludeMacroStack.push_back(IncludeStackInfo(CurLexer.take(),
850                                                 CurPTHLexer.take(),
851                                                 CurPPLexer,
852                                                 CurTokenLexer.take(),
853                                                 CurDirLookup));
854    CurPPLexer = 0;
855  }
856
857  void PopIncludeMacroStack() {
858    CurLexer.reset(IncludeMacroStack.back().TheLexer);
859    CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer);
860    CurPPLexer = IncludeMacroStack.back().ThePPLexer;
861    CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer);
862    CurDirLookup  = IncludeMacroStack.back().TheDirLookup;
863    IncludeMacroStack.pop_back();
864  }
865
866  /// AllocateMacroInfo - Allocate a new MacroInfo object.
867  MacroInfo *AllocateMacroInfo();
868
869  /// ReleaseMacroInfo - Release the specified MacroInfo.  This memory will
870  ///  be reused for allocating new MacroInfo objects.
871  void ReleaseMacroInfo(MacroInfo* MI);
872
873  /// ReadMacroName - Lex and validate a macro name, which occurs after a
874  /// #define or #undef.  This emits a diagnostic, sets the token kind to eom,
875  /// and discards the rest of the macro line if the macro name is invalid.
876  void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
877
878  /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
879  /// definition has just been read.  Lex the rest of the arguments and the
880  /// closing ), updating MI with what we learn.  Return true if an error occurs
881  /// parsing the arg list.
882  bool ReadMacroDefinitionArgList(MacroInfo *MI);
883
884  /// SkipExcludedConditionalBlock - We just read a #if or related directive and
885  /// decided that the subsequent tokens are in the #if'd out portion of the
886  /// file.  Lex the rest of the file, until we see an #endif.  If
887  /// FoundNonSkipPortion is true, then we have already emitted code for part of
888  /// this #if directive, so #else/#elif blocks should never be entered. If
889  /// FoundElse is false, then #else directives are ok, if not, then we have
890  /// already seen one so a #else directive is a duplicate.  When this returns,
891  /// the caller can lex the first valid token.
892  void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
893                                    bool FoundNonSkipPortion, bool FoundElse);
894
895  /// PTHSkipExcludedConditionalBlock - A fast PTH version of
896  ///  SkipExcludedConditionalBlock.
897  void PTHSkipExcludedConditionalBlock();
898
899  /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
900  /// may occur after a #if or #elif directive and return it as a bool.  If the
901  /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
902  bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
903
904  /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
905  /// #pragma GCC poison/system_header/dependency and #pragma once.
906  void RegisterBuiltinPragmas();
907
908  /// RegisterBuiltinMacros - Register builtin macros, such as __LINE__ with the
909  /// identifier table.
910  void RegisterBuiltinMacros();
911
912  /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
913  /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
914  /// the macro should not be expanded return true, otherwise return false.
915  bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI);
916
917  /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
918  /// lexed is a '('.  If so, consume the token and return true, if not, this
919  /// method should have no observable side-effect on the lexed tokens.
920  bool isNextPPTokenLParen();
921
922  /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
923  /// invoked to read all of the formal arguments specified for the macro
924  /// invocation.  This returns null on error.
925  MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
926                                       SourceLocation &InstantiationEnd);
927
928  /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
929  /// as a builtin macro, handle it and return the next token as 'Tok'.
930  void ExpandBuiltinMacro(Token &Tok);
931
932  /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
933  /// return the first token after the directive.  The _Pragma token has just
934  /// been read into 'Tok'.
935  void Handle_Pragma(Token &Tok);
936
937  /// HandleMicrosoft__pragma - Like Handle_Pragma except the pragma text
938  /// is not enclosed within a string literal.
939  void HandleMicrosoft__pragma(Token &Tok);
940
941  void Handle_Pragma(unsigned Introducer, const std::string &StrVal,
942                     SourceLocation PragmaLoc, SourceLocation RParenLoc);
943
944  /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
945  /// start lexing tokens from it instead of the current buffer.
946  void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
947
948  /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
949  /// start getting tokens from it using the PTH cache.
950  void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
951
952  /// IsFileLexer - Returns true if we are lexing from a file and not a
953  ///  pragma or a macro.
954  static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
955    return L ? !L->isPragmaLexer() : P != 0;
956  }
957
958  static bool IsFileLexer(const IncludeStackInfo& I) {
959    return IsFileLexer(I.TheLexer, I.ThePPLexer);
960  }
961
962  bool IsFileLexer() const {
963    return IsFileLexer(CurLexer.get(), CurPPLexer);
964  }
965
966  //===--------------------------------------------------------------------===//
967  // Caching stuff.
968  void CachingLex(Token &Result);
969  bool InCachingLexMode() const {
970    // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means
971    // that we are past EOF, not that we are in CachingLex mode.
972    return CurPPLexer == 0 && CurTokenLexer == 0 && CurPTHLexer == 0 &&
973           !IncludeMacroStack.empty();
974  }
975  void EnterCachingLexMode();
976  void ExitCachingLexMode() {
977    if (InCachingLexMode())
978      RemoveTopOfLexerStack();
979  }
980  const Token &PeekAhead(unsigned N);
981  void AnnotatePreviousCachedTokens(const Token &Tok);
982
983  //===--------------------------------------------------------------------===//
984  /// Handle*Directive - implement the various preprocessor directives.  These
985  /// should side-effect the current preprocessor object so that the next call
986  /// to Lex() will return the appropriate token next.
987  void HandleLineDirective(Token &Tok);
988  void HandleDigitDirective(Token &Tok);
989  void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
990  void HandleIdentSCCSDirective(Token &Tok);
991
992  // File inclusion.
993  void HandleIncludeDirective(SourceLocation HashLoc,
994                              Token &Tok,
995                              const DirectoryLookup *LookupFrom = 0,
996                              bool isImport = false);
997  void HandleIncludeNextDirective(SourceLocation HashLoc, Token &Tok);
998  void HandleIncludeMacrosDirective(SourceLocation HashLoc, Token &Tok);
999  void HandleImportDirective(SourceLocation HashLoc, Token &Tok);
1000
1001  // Macro handling.
1002  void HandleDefineDirective(Token &Tok);
1003  void HandleUndefDirective(Token &Tok);
1004
1005  // Conditional Inclusion.
1006  void HandleIfdefDirective(Token &Tok, bool isIfndef,
1007                            bool ReadAnyTokensBeforeDirective);
1008  void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
1009  void HandleEndifDirective(Token &Tok);
1010  void HandleElseDirective(Token &Tok);
1011  void HandleElifDirective(Token &Tok);
1012
1013  // Pragmas.
1014  void HandlePragmaDirective(unsigned Introducer);
1015public:
1016  void HandlePragmaOnce(Token &OnceTok);
1017  void HandlePragmaMark();
1018  void HandlePragmaPoison(Token &PoisonTok);
1019  void HandlePragmaSystemHeader(Token &SysHeaderTok);
1020  void HandlePragmaDependency(Token &DependencyTok);
1021  void HandlePragmaComment(Token &CommentTok);
1022  void HandlePragmaMessage(Token &MessageTok);
1023  void HandlePragmaPushMacro(Token &Tok);
1024  void HandlePragmaPopMacro(Token &Tok);
1025  IdentifierInfo *ParsePragmaPushOrPopMacro(Token &Tok);
1026
1027  // Return true and store the first token only if any CommentHandler
1028  // has inserted some tokens and getCommentRetentionState() is false.
1029  bool HandleComment(Token &Token, SourceRange Comment);
1030
1031  /// \brief A macro is used, update information about macros that need unused
1032  /// warnings.
1033  void markMacroAsUsed(MacroInfo *MI);
1034};
1035
1036/// \brief Abstract base class that describes a handler that will receive
1037/// source ranges for each of the comments encountered in the source file.
1038class CommentHandler {
1039public:
1040  virtual ~CommentHandler();
1041
1042  // The handler shall return true if it has pushed any tokens
1043  // to be read using e.g. EnterToken or EnterTokenStream.
1044  virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0;
1045};
1046
1047}  // end namespace clang
1048
1049#endif
1050