Preprocessor.h revision b657f115c3b4e262e72906a28cbcf3eaccd9460c
1//===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines the Preprocessor interface.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15#define LLVM_CLANG_LEX_PREPROCESSOR_H
16
17#include "clang/Lex/Lexer.h"
18#include "clang/Lex/PTHLexer.h"
19#include "clang/Lex/PPCallbacks.h"
20#include "clang/Lex/TokenLexer.h"
21#include "clang/Lex/PTHManager.h"
22#include "clang/Basic/Builtins.h"
23#include "clang/Basic/Diagnostic.h"
24#include "clang/Basic/IdentifierTable.h"
25#include "clang/Basic/SourceLocation.h"
26#include "llvm/ADT/DenseMap.h"
27#include "llvm/ADT/OwningPtr.h"
28#include "llvm/ADT/SmallVector.h"
29#include "llvm/Support/Allocator.h"
30#include <vector>
31
32namespace clang {
33
34class SourceManager;
35class FileManager;
36class FileEntry;
37class HeaderSearch;
38class PragmaNamespace;
39class PragmaHandler;
40class CommentHandler;
41class ScratchBuffer;
42class TargetInfo;
43class PPCallbacks;
44class DirectoryLookup;
45
46/// Preprocessor - This object engages in a tight little dance with the lexer to
47/// efficiently preprocess tokens.  Lexers know only about tokens within a
48/// single source file, and don't know anything about preprocessor-level issues
49/// like the #include stack, token expansion, etc.
50///
51class Preprocessor {
52  Diagnostic        *Diags;
53  LangOptions        Features;
54  TargetInfo        &Target;
55  FileManager       &FileMgr;
56  SourceManager     &SourceMgr;
57  ScratchBuffer     *ScratchBuf;
58  HeaderSearch      &HeaderInfo;
59
60  /// PTH - An optional PTHManager object used for getting tokens from
61  ///  a token cache rather than lexing the original source file.
62  llvm::OwningPtr<PTHManager> PTH;
63
64  /// BP - A BumpPtrAllocator object used to quickly allocate and release
65  ///  objects internal to the Preprocessor.
66  llvm::BumpPtrAllocator BP;
67
68  /// Identifiers for builtin macros and other builtins.
69  IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
70  IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
71  IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
72  IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
73  IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
74  IdentifierInfo *Ident__COUNTER__;                // __COUNTER__
75  IdentifierInfo *Ident_Pragma, *Ident__VA_ARGS__; // _Pragma, __VA_ARGS__
76  IdentifierInfo *Ident__has_feature;              // __has_feature
77  IdentifierInfo *Ident__has_builtin;              // __has_builtin
78
79  SourceLocation DATELoc, TIMELoc;
80  unsigned CounterValue;  // Next __COUNTER__ value.
81
82  enum {
83    /// MaxIncludeStackDepth - Maximum depth of #includes.
84    MaxAllowedIncludeStackDepth = 200
85  };
86
87  // State that is set before the preprocessor begins.
88  bool KeepComments : 1;
89  bool KeepMacroComments : 1;
90
91  // State that changes while the preprocessor runs:
92  bool DisableMacroExpansion : 1;  // True if macro expansion is disabled.
93  bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
94
95  /// Identifiers - This is mapping/lookup information for all identifiers in
96  /// the program, including program keywords.
97  IdentifierTable Identifiers;
98
99  /// Selectors - This table contains all the selectors in the program. Unlike
100  /// IdentifierTable above, this table *isn't* populated by the preprocessor.
101  /// It is declared/instantiated here because it's role/lifetime is
102  /// conceptually similar the IdentifierTable. In addition, the current control
103  /// flow (in clang::ParseAST()), make it convenient to put here.
104  /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
105  /// the lifetime fo the preprocessor.
106  SelectorTable Selectors;
107
108  /// BuiltinInfo - Information about builtins.
109  Builtin::Context BuiltinInfo;
110
111  /// PragmaHandlers - This tracks all of the pragmas that the client registered
112  /// with this preprocessor.
113  PragmaNamespace *PragmaHandlers;
114
115  /// \brief Tracks all of the comment handlers that the client registered
116  /// with this preprocessor.
117  std::vector<CommentHandler *> CommentHandlers;
118
119  /// CurLexer - This is the current top of the stack that we're lexing from if
120  /// not expanding a macro and we are lexing directly from source code.
121  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
122  llvm::OwningPtr<Lexer> CurLexer;
123
124  /// CurPTHLexer - This is the current top of stack that we're lexing from if
125  ///  not expanding from a macro and we are lexing from a PTH cache.
126  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
127  llvm::OwningPtr<PTHLexer> CurPTHLexer;
128
129  /// CurPPLexer - This is the current top of the stack what we're lexing from
130  ///  if not expanding a macro.  This is an alias for either CurLexer or
131  ///  CurPTHLexer.
132  PreprocessorLexer* CurPPLexer;
133
134  /// CurLookup - The DirectoryLookup structure used to find the current
135  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
136  /// implement #include_next and find directory-specific properties.
137  const DirectoryLookup *CurDirLookup;
138
139  /// CurTokenLexer - This is the current macro we are expanding, if we are
140  /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
141  llvm::OwningPtr<TokenLexer> CurTokenLexer;
142
143  /// IncludeMacroStack - This keeps track of the stack of files currently
144  /// #included, and macros currently being expanded from, not counting
145  /// CurLexer/CurTokenLexer.
146  struct IncludeStackInfo {
147    Lexer                 *TheLexer;
148    PTHLexer              *ThePTHLexer;
149    PreprocessorLexer     *ThePPLexer;
150    TokenLexer            *TheTokenLexer;
151    const DirectoryLookup *TheDirLookup;
152
153    IncludeStackInfo(Lexer *L, PTHLexer* P, PreprocessorLexer* PPL,
154                     TokenLexer* TL, const DirectoryLookup *D)
155      : TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL), TheTokenLexer(TL),
156        TheDirLookup(D) {}
157  };
158  std::vector<IncludeStackInfo> IncludeMacroStack;
159
160  /// Callbacks - These are actions invoked when some preprocessor activity is
161  /// encountered (e.g. a file is #included, etc).
162  PPCallbacks *Callbacks;
163
164  /// Macros - For each IdentifierInfo with 'HasMacro' set, we keep a mapping
165  /// to the actual definition of the macro.
166  llvm::DenseMap<IdentifierInfo*, MacroInfo*> Macros;
167
168  /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
169  ///  allocation.
170  std::vector<MacroInfo*> MICache;
171
172  // Various statistics we track for performance analysis.
173  unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
174  unsigned NumIf, NumElse, NumEndif;
175  unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
176  unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
177  unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
178  unsigned NumSkipped;
179
180  /// Predefines - This string is the predefined macros that preprocessor
181  /// should use from the command line etc.
182  std::string Predefines;
183
184  /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
185  enum { TokenLexerCacheSize = 8 };
186  unsigned NumCachedTokenLexers;
187  TokenLexer *TokenLexerCache[TokenLexerCacheSize];
188
189private:  // Cached tokens state.
190  typedef llvm::SmallVector<Token, 1> CachedTokensTy;
191
192  /// CachedTokens - Cached tokens are stored here when we do backtracking or
193  /// lookahead. They are "lexed" by the CachingLex() method.
194  CachedTokensTy CachedTokens;
195
196  /// CachedLexPos - The position of the cached token that CachingLex() should
197  /// "lex" next. If it points beyond the CachedTokens vector, it means that
198  /// a normal Lex() should be invoked.
199  CachedTokensTy::size_type CachedLexPos;
200
201  /// BacktrackPositions - Stack of backtrack positions, allowing nested
202  /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
203  /// indicate where CachedLexPos should be set when the BackTrack() method is
204  /// invoked (at which point the last position is popped).
205  std::vector<CachedTokensTy::size_type> BacktrackPositions;
206
207public:
208  Preprocessor(Diagnostic &diags, const LangOptions &opts, TargetInfo &target,
209               SourceManager &SM, HeaderSearch &Headers,
210               IdentifierInfoLookup *IILookup = 0);
211
212  ~Preprocessor();
213
214  Diagnostic &getDiagnostics() const { return *Diags; }
215  void setDiagnostics(Diagnostic &D) { Diags = &D; }
216
217  const LangOptions &getLangOptions() const { return Features; }
218  TargetInfo &getTargetInfo() const { return Target; }
219  FileManager &getFileManager() const { return FileMgr; }
220  SourceManager &getSourceManager() const { return SourceMgr; }
221  HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
222
223  IdentifierTable &getIdentifierTable() { return Identifiers; }
224  SelectorTable &getSelectorTable() { return Selectors; }
225  Builtin::Context &getBuiltinInfo() { return BuiltinInfo; }
226  llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
227
228  void setPTHManager(PTHManager* pm);
229
230  PTHManager *getPTHManager() { return PTH.get(); }
231
232  /// SetCommentRetentionState - Control whether or not the preprocessor retains
233  /// comments in output.
234  void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
235    this->KeepComments = KeepComments | KeepMacroComments;
236    this->KeepMacroComments = KeepMacroComments;
237  }
238
239  bool getCommentRetentionState() const { return KeepComments; }
240
241  /// isCurrentLexer - Return true if we are lexing directly from the specified
242  /// lexer.
243  bool isCurrentLexer(const PreprocessorLexer *L) const {
244    return CurPPLexer == L;
245  }
246
247  /// getCurrentLexer - Return the current file lexer being lexed from.  Note
248  /// that this ignores any potentially active macro expansions and _Pragma
249  /// expansions going on at the time.
250  PreprocessorLexer *getCurrentFileLexer() const;
251
252  /// getPPCallbacks/setPPCallbacks - Accessors for preprocessor callbacks.
253  /// Note that this class takes ownership of any PPCallbacks object given to
254  /// it.
255  PPCallbacks *getPPCallbacks() const { return Callbacks; }
256  void setPPCallbacks(PPCallbacks *C) {
257    if (Callbacks)
258      C = new PPChainedCallbacks(C, Callbacks);
259    Callbacks = C;
260  }
261
262  /// getMacroInfo - Given an identifier, return the MacroInfo it is #defined to
263  /// or null if it isn't #define'd.
264  MacroInfo *getMacroInfo(IdentifierInfo *II) const {
265    return II->hasMacroDefinition() ? Macros.find(II)->second : 0;
266  }
267
268  /// setMacroInfo - Specify a macro for this identifier.
269  ///
270  void setMacroInfo(IdentifierInfo *II, MacroInfo *MI);
271
272  /// macro_iterator/macro_begin/macro_end - This allows you to walk the current
273  /// state of the macro table.  This visits every currently-defined macro.
274  typedef llvm::DenseMap<IdentifierInfo*,
275                         MacroInfo*>::const_iterator macro_iterator;
276  macro_iterator macro_begin() const { return Macros.begin(); }
277  macro_iterator macro_end() const { return Macros.end(); }
278
279
280
281  const std::string &getPredefines() const { return Predefines; }
282  /// setPredefines - Set the predefines for this Preprocessor.  These
283  /// predefines are automatically injected when parsing the main file.
284  void setPredefines(const char *P) { Predefines = P; }
285  void setPredefines(const std::string &P) { Predefines = P; }
286
287  /// getIdentifierInfo - Return information about the specified preprocessor
288  /// identifier token.  The version of this method that takes two character
289  /// pointers is preferred unless the identifier is already available as a
290  /// string (this avoids allocation and copying of memory to construct an
291  /// std::string).
292  IdentifierInfo *getIdentifierInfo(const char *NameStart,
293                                    const char *NameEnd) {
294    return &Identifiers.get(NameStart, NameEnd);
295  }
296  IdentifierInfo *getIdentifierInfo(const char *NameStr) {
297    return getIdentifierInfo(NameStr, NameStr+strlen(NameStr));
298  }
299
300  /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
301  /// If 'Namespace' is non-null, then it is a token required to exist on the
302  /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
303  void AddPragmaHandler(const char *Namespace, PragmaHandler *Handler);
304
305  /// RemovePragmaHandler - Remove the specific pragma handler from
306  /// the preprocessor. If \arg Namespace is non-null, then it should
307  /// be the namespace that \arg Handler was added to. It is an error
308  /// to remove a handler that has not been registered.
309  void RemovePragmaHandler(const char *Namespace, PragmaHandler *Handler);
310
311  /// \brief Add the specified comment handler to the preprocessor.
312  void AddCommentHandler(CommentHandler *Handler);
313
314  /// \brief Remove the specified comment handler.
315  ///
316  /// It is an error to remove a handler that has not been registered.
317  void RemoveCommentHandler(CommentHandler *Handler);
318
319  /// EnterMainSourceFile - Enter the specified FileID as the main source file,
320  /// which implicitly adds the builtin defines etc.
321  void EnterMainSourceFile();
322
323  /// EnterSourceFile - Add a source file to the top of the include stack and
324  /// start lexing tokens from it instead of the current buffer.  If isMainFile
325  /// is true, this is the main file for the translation unit.
326  void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir);
327
328  /// EnterMacro - Add a Macro to the top of the include stack and start lexing
329  /// tokens from it instead of the current buffer.  Args specifies the
330  /// tokens input to a function-like macro.
331  ///
332  /// ILEnd specifies the location of the ')' for a function-like macro or the
333  /// identifier for an object-like macro.
334  void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroArgs *Args);
335
336  /// EnterTokenStream - Add a "macro" context to the top of the include stack,
337  /// which will cause the lexer to start returning the specified tokens.
338  ///
339  /// If DisableMacroExpansion is true, tokens lexed from the token stream will
340  /// not be subject to further macro expansion.  Otherwise, these tokens will
341  /// be re-macro-expanded when/if expansion is enabled.
342  ///
343  /// If OwnsTokens is false, this method assumes that the specified stream of
344  /// tokens has a permanent owner somewhere, so they do not need to be copied.
345  /// If it is true, it assumes the array of tokens is allocated with new[] and
346  /// must be freed.
347  ///
348  void EnterTokenStream(const Token *Toks, unsigned NumToks,
349                        bool DisableMacroExpansion, bool OwnsTokens);
350
351  /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
352  /// lexer stack.  This should only be used in situations where the current
353  /// state of the top-of-stack lexer is known.
354  void RemoveTopOfLexerStack();
355
356  /// EnableBacktrackAtThisPos - From the point that this method is called, and
357  /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
358  /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
359  /// make the Preprocessor re-lex the same tokens.
360  ///
361  /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
362  /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
363  /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
364  ///
365  /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
366  /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
367  /// tokens will continue indefinitely.
368  ///
369  void EnableBacktrackAtThisPos();
370
371  /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
372  void CommitBacktrackedTokens();
373
374  /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
375  /// EnableBacktrackAtThisPos() was previously called.
376  void Backtrack();
377
378  /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
379  /// caching of tokens is on.
380  bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
381
382  /// Lex - To lex a token from the preprocessor, just pull a token from the
383  /// current lexer or macro object.
384  void Lex(Token &Result) {
385    if (CurLexer)
386      CurLexer->Lex(Result);
387    else if (CurPTHLexer)
388      CurPTHLexer->Lex(Result);
389    else if (CurTokenLexer)
390      CurTokenLexer->Lex(Result);
391    else
392      CachingLex(Result);
393  }
394
395  /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
396  /// something not a comment.  This is useful in -E -C mode where comments
397  /// would foul up preprocessor directive handling.
398  void LexNonComment(Token &Result) {
399    do
400      Lex(Result);
401    while (Result.getKind() == tok::comment);
402  }
403
404  /// LexUnexpandedToken - This is just like Lex, but this disables macro
405  /// expansion of identifier tokens.
406  void LexUnexpandedToken(Token &Result) {
407    // Disable macro expansion.
408    bool OldVal = DisableMacroExpansion;
409    DisableMacroExpansion = true;
410    // Lex the token.
411    Lex(Result);
412
413    // Reenable it.
414    DisableMacroExpansion = OldVal;
415  }
416
417  /// LookAhead - This peeks ahead N tokens and returns that token without
418  /// consuming any tokens.  LookAhead(0) returns the next token that would be
419  /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
420  /// returns normal tokens after phase 5.  As such, it is equivalent to using
421  /// 'Lex', not 'LexUnexpandedToken'.
422  const Token &LookAhead(unsigned N) {
423    if (CachedLexPos + N < CachedTokens.size())
424      return CachedTokens[CachedLexPos+N];
425    else
426      return PeekAhead(N+1);
427  }
428
429  /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
430  /// this allows to revert a specific number of tokens.
431  /// Note that the number of tokens being reverted should be up to the last
432  /// backtrack position, not more.
433  void RevertCachedTokens(unsigned N) {
434    assert(isBacktrackEnabled() &&
435           "Should only be called when tokens are cached for backtracking");
436    assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
437         && "Should revert tokens up to the last backtrack position, not more");
438    assert(signed(CachedLexPos) - signed(N) >= 0 &&
439           "Corrupted backtrack positions ?");
440    CachedLexPos -= N;
441  }
442
443  /// EnterToken - Enters a token in the token stream to be lexed next. If
444  /// BackTrack() is called afterwards, the token will remain at the insertion
445  /// point.
446  void EnterToken(const Token &Tok) {
447    EnterCachingLexMode();
448    CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
449  }
450
451  /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
452  /// tokens (because backtrack is enabled) it should replace the most recent
453  /// cached tokens with the given annotation token. This function has no effect
454  /// if backtracking is not enabled.
455  ///
456  /// Note that the use of this function is just for optimization; so that the
457  /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
458  /// invoked.
459  void AnnotateCachedTokens(const Token &Tok) {
460    assert(Tok.isAnnotation() && "Expected annotation token");
461    if (CachedLexPos != 0 && isBacktrackEnabled())
462      AnnotatePreviousCachedTokens(Tok);
463  }
464
465  /// \brief Replace the last token with an annotation token.
466  ///
467  /// Like AnnotateCachedTokens(), this routine replaces an
468  /// already-parsed (and resolved) token with an annotation
469  /// token. However, this routine only replaces the last token with
470  /// the annotation token; it does not affect any other cached
471  /// tokens. This function has no effect if backtracking is not
472  /// enabled.
473  void ReplaceLastTokenWithAnnotation(const Token &Tok) {
474    assert(Tok.isAnnotation() && "Expected annotation token");
475    if (CachedLexPos != 0 && isBacktrackEnabled())
476      CachedTokens[CachedLexPos-1] = Tok;
477  }
478
479  /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
480  /// the specified Token's location, translating the token's start
481  /// position in the current buffer into a SourcePosition object for rendering.
482  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) {
483    return Diags->Report(FullSourceLoc(Loc, getSourceManager()), DiagID);
484  }
485
486  DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) {
487    return Diags->Report(FullSourceLoc(Tok.getLocation(), getSourceManager()),
488                         DiagID);
489  }
490
491  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
492  /// token is the characters used to represent the token in the source file
493  /// after trigraph expansion and escaped-newline folding.  In particular, this
494  /// wants to get the true, uncanonicalized, spelling of things like digraphs
495  /// UCNs, etc.
496  std::string getSpelling(const Token &Tok) const;
497
498  /// getSpelling - This method is used to get the spelling of a token into a
499  /// preallocated buffer, instead of as an std::string.  The caller is required
500  /// to allocate enough space for the token, which is guaranteed to be at least
501  /// Tok.getLength() bytes long.  The length of the actual result is returned.
502  ///
503  /// Note that this method may do two possible things: it may either fill in
504  /// the buffer specified with characters, or it may *change the input pointer*
505  /// to point to a constant buffer with the data already in it (avoiding a
506  /// copy).  The caller is not allowed to modify the returned buffer pointer
507  /// if an internal buffer is returned.
508  unsigned getSpelling(const Token &Tok, const char *&Buffer) const;
509
510  /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
511  /// with length 1, return the character.
512  char getSpellingOfSingleCharacterNumericConstant(const Token &Tok) const {
513    assert(Tok.is(tok::numeric_constant) &&
514           Tok.getLength() == 1 && "Called on unsupported token");
515    assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
516
517    // If the token is carrying a literal data pointer, just use it.
518    if (const char *D = Tok.getLiteralData())
519      return *D;
520
521    // Otherwise, fall back on getCharacterData, which is slower, but always
522    // works.
523    return *SourceMgr.getCharacterData(Tok.getLocation());
524  }
525
526  /// CreateString - Plop the specified string into a scratch buffer and set the
527  /// specified token's location and length to it.  If specified, the source
528  /// location provides a location of the instantiation point of the token.
529  void CreateString(const char *Buf, unsigned Len,
530                    Token &Tok, SourceLocation SourceLoc = SourceLocation());
531
532  /// \brief Computes the source location just past the end of the
533  /// token at this source location.
534  ///
535  /// This routine can be used to produce a source location that
536  /// points just past the end of the token referenced by \p Loc, and
537  /// is generally used when a diagnostic needs to point just after a
538  /// token where it expected something different that it received. If
539  /// the returned source location would not be meaningful (e.g., if
540  /// it points into a macro), this routine returns an invalid
541  /// source location.
542  SourceLocation getLocForEndOfToken(SourceLocation Loc);
543
544  /// DumpToken - Print the token to stderr, used for debugging.
545  ///
546  void DumpToken(const Token &Tok, bool DumpFlags = false) const;
547  void DumpLocation(SourceLocation Loc) const;
548  void DumpMacro(const MacroInfo &MI) const;
549
550  /// AdvanceToTokenCharacter - Given a location that specifies the start of a
551  /// token, return a new location that specifies a character within the token.
552  SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,unsigned Char);
553
554  /// IncrementPasteCounter - Increment the counters for the number of token
555  /// paste operations performed.  If fast was specified, this is a 'fast paste'
556  /// case we handled.
557  ///
558  void IncrementPasteCounter(bool isFast) {
559    if (isFast)
560      ++NumFastTokenPaste;
561    else
562      ++NumTokenPaste;
563  }
564
565  void PrintStats();
566
567  /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
568  /// comment (/##/) in microsoft mode, this method handles updating the current
569  /// state, returning the token on the next source line.
570  void HandleMicrosoftCommentPaste(Token &Tok);
571
572  //===--------------------------------------------------------------------===//
573  // Preprocessor callback methods.  These are invoked by a lexer as various
574  // directives and events are found.
575
576  /// LookUpIdentifierInfo - Given a tok::identifier token, look up the
577  /// identifier information for the token and install it into the token.
578  IdentifierInfo *LookUpIdentifierInfo(Token &Identifier,
579                                       const char *BufPtr = 0);
580
581  /// HandleIdentifier - This callback is invoked when the lexer reads an
582  /// identifier and has filled in the tokens IdentifierInfo member.  This
583  /// callback potentially macro expands it or turns it into a named token (like
584  /// 'for').
585  void HandleIdentifier(Token &Identifier);
586
587
588  /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
589  /// the current file.  This either returns the EOF token and returns true, or
590  /// pops a level off the include stack and returns false, at which point the
591  /// client should call lex again.
592  bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
593
594  /// HandleEndOfTokenLexer - This callback is invoked when the current
595  /// TokenLexer hits the end of its token stream.
596  bool HandleEndOfTokenLexer(Token &Result);
597
598  /// HandleDirective - This callback is invoked when the lexer sees a # token
599  /// at the start of a line.  This consumes the directive, modifies the
600  /// lexer/preprocessor state, and advances the lexer(s) so that the next token
601  /// read is the correct one.
602  void HandleDirective(Token &Result);
603
604  /// CheckEndOfDirective - Ensure that the next token is a tok::eom token.  If
605  /// not, emit a diagnostic and consume up until the eom.  If EnableMacros is
606  /// true, then we consider macros that expand to zero tokens as being ok.
607  void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
608
609  /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
610  /// current line until the tok::eom token is found.
611  void DiscardUntilEndOfDirective();
612
613  /// SawDateOrTime - This returns true if the preprocessor has seen a use of
614  /// __DATE__ or __TIME__ in the file so far.
615  bool SawDateOrTime() const {
616    return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
617  }
618  unsigned getCounterValue() const { return CounterValue; }
619  void setCounterValue(unsigned V) { CounterValue = V; }
620
621  /// AllocateMacroInfo - Allocate a new MacroInfo object with the provide
622  ///  SourceLocation.
623  MacroInfo* AllocateMacroInfo(SourceLocation L);
624
625private:
626
627  void PushIncludeMacroStack() {
628    IncludeMacroStack.push_back(IncludeStackInfo(CurLexer.take(),
629                                                 CurPTHLexer.take(),
630                                                 CurPPLexer,
631                                                 CurTokenLexer.take(),
632                                                 CurDirLookup));
633    CurPPLexer = 0;
634  }
635
636  void PopIncludeMacroStack() {
637    CurLexer.reset(IncludeMacroStack.back().TheLexer);
638    CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer);
639    CurPPLexer = IncludeMacroStack.back().ThePPLexer;
640    CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer);
641    CurDirLookup  = IncludeMacroStack.back().TheDirLookup;
642    IncludeMacroStack.pop_back();
643  }
644
645  /// ReleaseMacroInfo - Release the specified MacroInfo.  This memory will
646  ///  be reused for allocating new MacroInfo objects.
647  void ReleaseMacroInfo(MacroInfo* MI);
648
649  /// isInPrimaryFile - Return true if we're in the top-level file, not in a
650  /// #include.
651  bool isInPrimaryFile() const;
652
653  /// ReadMacroName - Lex and validate a macro name, which occurs after a
654  /// #define or #undef.  This emits a diagnostic, sets the token kind to eom,
655  /// and discards the rest of the macro line if the macro name is invalid.
656  void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
657
658  /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
659  /// definition has just been read.  Lex the rest of the arguments and the
660  /// closing ), updating MI with what we learn.  Return true if an error occurs
661  /// parsing the arg list.
662  bool ReadMacroDefinitionArgList(MacroInfo *MI);
663
664  /// SkipExcludedConditionalBlock - We just read a #if or related directive and
665  /// decided that the subsequent tokens are in the #if'd out portion of the
666  /// file.  Lex the rest of the file, until we see an #endif.  If
667  /// FoundNonSkipPortion is true, then we have already emitted code for part of
668  /// this #if directive, so #else/#elif blocks should never be entered. If
669  /// FoundElse is false, then #else directives are ok, if not, then we have
670  /// already seen one so a #else directive is a duplicate.  When this returns,
671  /// the caller can lex the first valid token.
672  void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
673                                    bool FoundNonSkipPortion, bool FoundElse);
674
675  /// PTHSkipExcludedConditionalBlock - A fast PTH version of
676  ///  SkipExcludedConditionalBlock.
677  void PTHSkipExcludedConditionalBlock();
678
679  /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
680  /// may occur after a #if or #elif directive and return it as a bool.  If the
681  /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
682  bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
683
684  /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
685  /// #pragma GCC poison/system_header/dependency and #pragma once.
686  void RegisterBuiltinPragmas();
687
688  /// RegisterBuiltinMacros - Register builtin macros, such as __LINE__ with the
689  /// identifier table.
690  void RegisterBuiltinMacros();
691
692  /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
693  /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
694  /// the macro should not be expanded return true, otherwise return false.
695  bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI);
696
697  /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
698  /// lexed is a '('.  If so, consume the token and return true, if not, this
699  /// method should have no observable side-effect on the lexed tokens.
700  bool isNextPPTokenLParen();
701
702  /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
703  /// invoked to read all of the formal arguments specified for the macro
704  /// invocation.  This returns null on error.
705  MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
706                                       SourceLocation &InstantiationEnd);
707
708  /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
709  /// as a builtin macro, handle it and return the next token as 'Tok'.
710  void ExpandBuiltinMacro(Token &Tok);
711
712  /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
713  /// return the first token after the directive.  The _Pragma token has just
714  /// been read into 'Tok'.
715  void Handle_Pragma(Token &Tok);
716
717  /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
718  /// start lexing tokens from it instead of the current buffer.
719  void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
720
721  /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
722  /// start getting tokens from it using the PTH cache.
723  void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
724
725  /// GetIncludeFilenameSpelling - Turn the specified lexer token into a fully
726  /// checked and spelled filename, e.g. as an operand of #include. This returns
727  /// true if the input filename was in <>'s or false if it were in ""'s.  The
728  /// caller is expected to provide a buffer that is large enough to hold the
729  /// spelling of the filename, but is also expected to handle the case when
730  /// this method decides to use a different buffer.
731  bool GetIncludeFilenameSpelling(SourceLocation Loc,
732                                  const char *&BufStart, const char *&BufEnd);
733
734  /// LookupFile - Given a "foo" or <foo> reference, look up the indicated file,
735  /// return null on failure.  isAngled indicates whether the file reference is
736  /// for system #include's or not (i.e. using <> instead of "").
737  const FileEntry *LookupFile(const char *FilenameStart,const char *FilenameEnd,
738                              bool isAngled, const DirectoryLookup *FromDir,
739                              const DirectoryLookup *&CurDir);
740
741
742
743  /// IsFileLexer - Returns true if we are lexing from a file and not a
744  ///  pragma or a macro.
745  static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
746    return L ? !L->isPragmaLexer() : P != 0;
747  }
748
749  static bool IsFileLexer(const IncludeStackInfo& I) {
750    return IsFileLexer(I.TheLexer, I.ThePPLexer);
751  }
752
753  bool IsFileLexer() const {
754    return IsFileLexer(CurLexer.get(), CurPPLexer);
755  }
756
757  //===--------------------------------------------------------------------===//
758  // Caching stuff.
759  void CachingLex(Token &Result);
760  bool InCachingLexMode() const { return CurPPLexer == 0 && CurTokenLexer == 0;}
761  void EnterCachingLexMode();
762  void ExitCachingLexMode() {
763    if (InCachingLexMode())
764      RemoveTopOfLexerStack();
765  }
766  const Token &PeekAhead(unsigned N);
767  void AnnotatePreviousCachedTokens(const Token &Tok);
768
769  //===--------------------------------------------------------------------===//
770  /// Handle*Directive - implement the various preprocessor directives.  These
771  /// should side-effect the current preprocessor object so that the next call
772  /// to Lex() will return the appropriate token next.
773  void HandleLineDirective(Token &Tok);
774  void HandleDigitDirective(Token &Tok);
775  void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
776  void HandleIdentSCCSDirective(Token &Tok);
777
778  // File inclusion.
779  void HandleIncludeDirective(Token &Tok,
780                              const DirectoryLookup *LookupFrom = 0,
781                              bool isImport = false);
782  void HandleIncludeNextDirective(Token &Tok);
783  void HandleIncludeMacrosDirective(Token &Tok);
784  void HandleImportDirective(Token &Tok);
785
786  // Macro handling.
787  void HandleDefineDirective(Token &Tok);
788  void HandleUndefDirective(Token &Tok);
789  // HandleAssertDirective(Token &Tok);
790  // HandleUnassertDirective(Token &Tok);
791
792  // Conditional Inclusion.
793  void HandleIfdefDirective(Token &Tok, bool isIfndef,
794                            bool ReadAnyTokensBeforeDirective);
795  void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
796  void HandleEndifDirective(Token &Tok);
797  void HandleElseDirective(Token &Tok);
798  void HandleElifDirective(Token &Tok);
799
800  // Pragmas.
801  void HandlePragmaDirective();
802public:
803  void HandlePragmaOnce(Token &OnceTok);
804  void HandlePragmaMark();
805  void HandlePragmaPoison(Token &PoisonTok);
806  void HandlePragmaSystemHeader(Token &SysHeaderTok);
807  void HandlePragmaDependency(Token &DependencyTok);
808  void HandlePragmaComment(Token &CommentTok);
809  void HandleComment(SourceRange Comment);
810};
811
812/// PreprocessorFactory - A generic factory interface for lazily creating
813///  Preprocessor objects on-demand when they are needed.
814class PreprocessorFactory {
815public:
816  virtual ~PreprocessorFactory();
817  virtual Preprocessor* CreatePreprocessor() = 0;
818};
819
820/// \brief Abstract base class that describes a handler that will receive
821/// source ranges for each of the comments encountered in the source file.
822class CommentHandler {
823public:
824  virtual ~CommentHandler();
825
826  virtual void HandleComment(Preprocessor &PP, SourceRange Comment) = 0;
827};
828
829}  // end namespace clang
830
831#endif
832