Preprocessor.h revision 32fca722dd974b8202d0fb9c71b6c185c0767da6
1//===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines the Preprocessor interface.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15#define LLVM_CLANG_LEX_PREPROCESSOR_H
16
17#include "clang/Lex/Lexer.h"
18#include "clang/Lex/PTHLexer.h"
19#include "clang/Lex/PPCallbacks.h"
20#include "clang/Lex/TokenLexer.h"
21#include "clang/Lex/PTHManager.h"
22#include "clang/Basic/Diagnostic.h"
23#include "clang/Basic/IdentifierTable.h"
24#include "clang/Basic/SourceLocation.h"
25#include "llvm/ADT/DenseMap.h"
26#include "llvm/ADT/OwningPtr.h"
27#include "llvm/Support/Allocator.h"
28
29namespace clang {
30
31class SourceManager;
32class FileManager;
33class FileEntry;
34class HeaderSearch;
35class PragmaNamespace;
36class PragmaHandler;
37class ScratchBuffer;
38class TargetInfo;
39class PPCallbacks;
40class DirectoryLookup;
41
42/// Preprocessor - This object engages in a tight little dance with the lexer to
43/// efficiently preprocess tokens.  Lexers know only about tokens within a
44/// single source file, and don't know anything about preprocessor-level issues
45/// like the #include stack, token expansion, etc.
46///
47class Preprocessor {
48  Diagnostic        &Diags;
49  const LangOptions &Features;
50  TargetInfo        &Target;
51  FileManager       &FileMgr;
52  SourceManager     &SourceMgr;
53  ScratchBuffer     *ScratchBuf;
54  HeaderSearch      &HeaderInfo;
55
56  /// PTH - An optional PTHManager object used for getting tokens from
57  ///  a token cache rather than lexing the original source file.
58  llvm::OwningPtr<PTHManager> PTH;
59
60  /// BP - A BumpPtrAllocator object used to quickly allocate and release
61  ///  objects internal to the Preprocessor.
62  llvm::BumpPtrAllocator BP;
63
64  /// Identifiers for builtin macros and other builtins.
65  IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
66  IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
67  IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
68  IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
69  IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
70  IdentifierInfo *Ident_Pragma, *Ident__VA_ARGS__; // _Pragma, __VA_ARGS__
71
72  SourceLocation DATELoc, TIMELoc;
73
74  enum {
75    /// MaxIncludeStackDepth - Maximum depth of #includes.
76    MaxAllowedIncludeStackDepth = 200
77  };
78
79  // State that is set before the preprocessor begins.
80  bool KeepComments : 1;
81  bool KeepMacroComments : 1;
82
83  // State that changes while the preprocessor runs:
84  bool DisableMacroExpansion : 1;  // True if macro expansion is disabled.
85  bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
86
87  /// Identifiers - This is mapping/lookup information for all identifiers in
88  /// the program, including program keywords.
89  IdentifierTable Identifiers;
90
91  /// Selectors - This table contains all the selectors in the program. Unlike
92  /// IdentifierTable above, this table *isn't* populated by the preprocessor.
93  /// It is declared/instantiated here because it's role/lifetime is
94  /// conceptually similar the IdentifierTable. In addition, the current control
95  /// flow (in clang::ParseAST()), make it convenient to put here.
96  /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
97  /// the lifetime fo the preprocessor.
98  SelectorTable Selectors;
99
100  /// PragmaHandlers - This tracks all of the pragmas that the client registered
101  /// with this preprocessor.
102  PragmaNamespace *PragmaHandlers;
103
104  /// CurLexer - This is the current top of the stack that we're lexing from if
105  /// not expanding a macro and we are lexing directly from source code.
106  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
107  llvm::OwningPtr<Lexer> CurLexer;
108
109  /// CurPTHLexer - This is the current top of stack that we're lexing from if
110  ///  not expanding from a macro and we are lexing from a PTH cache.
111  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
112  llvm::OwningPtr<PTHLexer> CurPTHLexer;
113
114  /// CurPPLexer - This is the current top of the stack what we're lexing from
115  ///  if not expanding a macro.  This is an alias for either CurLexer or
116  ///  CurPTHLexer.
117  PreprocessorLexer* CurPPLexer;
118
119  /// CurLookup - The DirectoryLookup structure used to find the current
120  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
121  /// implement #include_next and find directory-specific properties.
122  const DirectoryLookup *CurDirLookup;
123
124  /// CurTokenLexer - This is the current macro we are expanding, if we are
125  /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
126  llvm::OwningPtr<TokenLexer> CurTokenLexer;
127
128  /// IncludeMacroStack - This keeps track of the stack of files currently
129  /// #included, and macros currently being expanded from, not counting
130  /// CurLexer/CurTokenLexer.
131  struct IncludeStackInfo {
132    Lexer                 *TheLexer;
133    PTHLexer              *ThePTHLexer;
134    PreprocessorLexer     *ThePPLexer;
135    TokenLexer            *TheTokenLexer;
136    const DirectoryLookup *TheDirLookup;
137
138    IncludeStackInfo(Lexer *L, PTHLexer* P, PreprocessorLexer* PPL,
139                     TokenLexer* TL, const DirectoryLookup *D)
140      : TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL), TheTokenLexer(TL),
141        TheDirLookup(D) {}
142  };
143  std::vector<IncludeStackInfo> IncludeMacroStack;
144
145  /// Callbacks - These are actions invoked when some preprocessor activity is
146  /// encountered (e.g. a file is #included, etc).
147  PPCallbacks *Callbacks;
148
149  /// Macros - For each IdentifierInfo with 'HasMacro' set, we keep a mapping
150  /// to the actual definition of the macro.
151  llvm::DenseMap<IdentifierInfo*, MacroInfo*> Macros;
152
153  /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
154  ///  allocation.
155  std::vector<MacroInfo*> MICache;
156
157  // Various statistics we track for performance analysis.
158  unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
159  unsigned NumIf, NumElse, NumEndif;
160  unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
161  unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
162  unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
163  unsigned NumSkipped;
164
165  /// Predefines - This string is the predefined macros that preprocessor
166  /// should use from the command line etc.
167  std::string Predefines;
168
169  /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
170  enum { TokenLexerCacheSize = 8 };
171  unsigned NumCachedTokenLexers;
172  TokenLexer *TokenLexerCache[TokenLexerCacheSize];
173
174private:  // Cached tokens state.
175  typedef std::vector<Token> CachedTokensTy;
176
177  /// CachedTokens - Cached tokens are stored here when we do backtracking or
178  /// lookahead. They are "lexed" by the CachingLex() method.
179  CachedTokensTy CachedTokens;
180
181  /// CachedLexPos - The position of the cached token that CachingLex() should
182  /// "lex" next. If it points beyond the CachedTokens vector, it means that
183  /// a normal Lex() should be invoked.
184  CachedTokensTy::size_type CachedLexPos;
185
186  /// BacktrackPositions - Stack of backtrack positions, allowing nested
187  /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
188  /// indicate where CachedLexPos should be set when the BackTrack() method is
189  /// invoked (at which point the last position is popped).
190  std::vector<CachedTokensTy::size_type> BacktrackPositions;
191
192public:
193  Preprocessor(Diagnostic &diags, const LangOptions &opts, TargetInfo &target,
194               SourceManager &SM, HeaderSearch &Headers,
195               IdentifierInfoLookup* IILookup = 0);
196
197  ~Preprocessor();
198
199  Diagnostic &getDiagnostics() const { return Diags; }
200  const LangOptions &getLangOptions() const { return Features; }
201  TargetInfo &getTargetInfo() const { return Target; }
202  FileManager &getFileManager() const { return FileMgr; }
203  SourceManager &getSourceManager() const { return SourceMgr; }
204  HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
205
206  IdentifierTable &getIdentifierTable() { return Identifiers; }
207  SelectorTable &getSelectorTable() { return Selectors; }
208  llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
209
210
211  void setPTHManager(PTHManager* pm);
212
213  /// SetCommentRetentionState - Control whether or not the preprocessor retains
214  /// comments in output.
215  void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
216    this->KeepComments = KeepComments | KeepMacroComments;
217    this->KeepMacroComments = KeepMacroComments;
218  }
219
220  bool getCommentRetentionState() const { return KeepComments; }
221
222  /// isCurrentLexer - Return true if we are lexing directly from the specified
223  /// lexer.
224  bool isCurrentLexer(const PreprocessorLexer *L) const {
225    return CurPPLexer == L;
226  }
227
228  /// getCurrentLexer - Return the current file lexer being lexed from.  Note
229  /// that this ignores any potentially active macro expansions and _Pragma
230  /// expansions going on at the time.
231  PreprocessorLexer *getCurrentFileLexer() const;
232
233  /// getPPCallbacks/setPPCallbacks - Accessors for preprocessor callbacks.
234  /// Note that this class takes ownership of any PPCallbacks object given to
235  /// it.
236  PPCallbacks *getPPCallbacks() const { return Callbacks; }
237  void setPPCallbacks(PPCallbacks *C) {
238    delete Callbacks;
239    Callbacks = C;
240  }
241
242  /// getMacroInfo - Given an identifier, return the MacroInfo it is #defined to
243  /// or null if it isn't #define'd.
244  MacroInfo *getMacroInfo(IdentifierInfo *II) const {
245    return II->hasMacroDefinition() ? Macros.find(II)->second : 0;
246  }
247
248  /// setMacroInfo - Specify a macro for this identifier.
249  ///
250  void setMacroInfo(IdentifierInfo *II, MacroInfo *MI);
251
252  /// macro_iterator/macro_begin/macro_end - This allows you to walk the current
253  /// state of the macro table.  This visits every currently-defined macro.
254  typedef llvm::DenseMap<IdentifierInfo*,
255                         MacroInfo*>::const_iterator macro_iterator;
256  macro_iterator macro_begin() const { return Macros.begin(); }
257  macro_iterator macro_end() const { return Macros.end(); }
258
259
260
261  const std::string &getPredefines() const { return Predefines; }
262  /// setPredefines - Set the predefines for this Preprocessor.  These
263  /// predefines are automatically injected when parsing the main file.
264  void setPredefines(const char *P) { Predefines = P; }
265  void setPredefines(const std::string &P) { Predefines = P; }
266
267  /// getIdentifierInfo - Return information about the specified preprocessor
268  /// identifier token.  The version of this method that takes two character
269  /// pointers is preferred unless the identifier is already available as a
270  /// string (this avoids allocation and copying of memory to construct an
271  /// std::string).
272  IdentifierInfo *getIdentifierInfo(const char *NameStart,
273                                    const char *NameEnd) {
274    return &Identifiers.get(NameStart, NameEnd);
275  }
276  IdentifierInfo *getIdentifierInfo(const char *NameStr) {
277    return getIdentifierInfo(NameStr, NameStr+strlen(NameStr));
278  }
279
280  /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
281  /// If 'Namespace' is non-null, then it is a token required to exist on the
282  /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
283  void AddPragmaHandler(const char *Namespace, PragmaHandler *Handler);
284
285  /// RemovePragmaHandler - Remove the specific pragma handler from
286  /// the preprocessor. If \arg Namespace is non-null, then it should
287  /// be the namespace that \arg Handler was added to. It is an error
288  /// to remove a handler that has not been registered.
289  void RemovePragmaHandler(const char *Namespace, PragmaHandler *Handler);
290
291  /// EnterMainSourceFile - Enter the specified FileID as the main source file,
292  /// which implicitly adds the builtin defines etc.
293  void EnterMainSourceFile();
294
295  /// EnterSourceFile - Add a source file to the top of the include stack and
296  /// start lexing tokens from it instead of the current buffer.  If isMainFile
297  /// is true, this is the main file for the translation unit.
298  void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir);
299
300  /// EnterMacro - Add a Macro to the top of the include stack and start lexing
301  /// tokens from it instead of the current buffer.  Args specifies the
302  /// tokens input to a function-like macro.
303  ///
304  /// ILEnd specifies the location of the ')' for a function-like macro or the
305  /// identifier for an object-like macro.
306  void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroArgs *Args);
307
308  /// EnterTokenStream - Add a "macro" context to the top of the include stack,
309  /// which will cause the lexer to start returning the specified tokens.
310  ///
311  /// If DisableMacroExpansion is true, tokens lexed from the token stream will
312  /// not be subject to further macro expansion.  Otherwise, these tokens will
313  /// be re-macro-expanded when/if expansion is enabled.
314  ///
315  /// If OwnsTokens is false, this method assumes that the specified stream of
316  /// tokens has a permanent owner somewhere, so they do not need to be copied.
317  /// If it is true, it assumes the array of tokens is allocated with new[] and
318  /// must be freed.
319  ///
320  void EnterTokenStream(const Token *Toks, unsigned NumToks,
321                        bool DisableMacroExpansion, bool OwnsTokens);
322
323  /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
324  /// lexer stack.  This should only be used in situations where the current
325  /// state of the top-of-stack lexer is known.
326  void RemoveTopOfLexerStack();
327
328  /// EnableBacktrackAtThisPos - From the point that this method is called, and
329  /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
330  /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
331  /// make the Preprocessor re-lex the same tokens.
332  ///
333  /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
334  /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
335  /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
336  ///
337  /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
338  /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
339  /// tokens will continue indefinitely.
340  ///
341  void EnableBacktrackAtThisPos();
342
343  /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
344  void CommitBacktrackedTokens();
345
346  /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
347  /// EnableBacktrackAtThisPos() was previously called.
348  void Backtrack();
349
350  /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
351  /// caching of tokens is on.
352  bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
353
354  /// Lex - To lex a token from the preprocessor, just pull a token from the
355  /// current lexer or macro object.
356  void Lex(Token &Result) {
357    if (CurLexer)
358      CurLexer->Lex(Result);
359    else if (CurPTHLexer)
360      CurPTHLexer->Lex(Result);
361    else if (CurTokenLexer)
362      CurTokenLexer->Lex(Result);
363    else
364      CachingLex(Result);
365  }
366
367  /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
368  /// something not a comment.  This is useful in -E -C mode where comments
369  /// would foul up preprocessor directive handling.
370  void LexNonComment(Token &Result) {
371    do
372      Lex(Result);
373    while (Result.getKind() == tok::comment);
374  }
375
376  /// LexUnexpandedToken - This is just like Lex, but this disables macro
377  /// expansion of identifier tokens.
378  void LexUnexpandedToken(Token &Result) {
379    // Disable macro expansion.
380    bool OldVal = DisableMacroExpansion;
381    DisableMacroExpansion = true;
382    // Lex the token.
383    Lex(Result);
384
385    // Reenable it.
386    DisableMacroExpansion = OldVal;
387  }
388
389  /// LookAhead - This peeks ahead N tokens and returns that token without
390  /// consuming any tokens.  LookAhead(0) returns the next token that would be
391  /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
392  /// returns normal tokens after phase 5.  As such, it is equivalent to using
393  /// 'Lex', not 'LexUnexpandedToken'.
394  const Token &LookAhead(unsigned N) {
395    if (CachedLexPos + N < CachedTokens.size())
396      return CachedTokens[CachedLexPos+N];
397    else
398      return PeekAhead(N+1);
399  }
400
401  /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
402  /// this allows to revert a specific number of tokens.
403  /// Note that the number of tokens being reverted should be up to the last
404  /// backtrack position, not more.
405  void RevertCachedTokens(unsigned N) {
406    assert(isBacktrackEnabled() &&
407           "Should only be called when tokens are cached for backtracking");
408    assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
409         && "Should revert tokens up to the last backtrack position, not more");
410    assert(signed(CachedLexPos) - signed(N) >= 0 &&
411           "Corrupted backtrack positions ?");
412    CachedLexPos -= N;
413  }
414
415  /// EnterToken - Enters a token in the token stream to be lexed next. If
416  /// BackTrack() is called afterwards, the token will remain at the insertion
417  /// point.
418  void EnterToken(const Token &Tok) {
419    EnterCachingLexMode();
420    CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
421  }
422
423  /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
424  /// tokens (because backtrack is enabled) it should replace the most recent
425  /// cached tokens with the given annotation token. This function has no effect
426  /// if backtracking is not enabled.
427  ///
428  /// Note that the use of this function is just for optimization; so that the
429  /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
430  /// invoked.
431  void AnnotateCachedTokens(const Token &Tok) {
432    assert(Tok.isAnnotation() && "Expected annotation token");
433    if (CachedLexPos != 0 && isBacktrackEnabled())
434      AnnotatePreviousCachedTokens(Tok);
435  }
436
437  /// \brief Replace the last token with an annotation token.
438  ///
439  /// Like AnnotateCachedTokens(), this routine replaces an
440  /// already-parsed (and resolved) token with an annotation
441  /// token. However, this routine only replaces the last token with
442  /// the annotation token; it does not affect any other cached
443  /// tokens. This function has no effect if backtracking is not
444  /// enabled.
445  void ReplaceLastTokenWithAnnotation(const Token &Tok) {
446    assert(Tok.isAnnotation() && "Expected annotation token");
447    if (CachedLexPos != 0 && isBacktrackEnabled())
448      CachedTokens[CachedLexPos-1] = Tok;
449  }
450
451  /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
452  /// the specified Token's location, translating the token's start
453  /// position in the current buffer into a SourcePosition object for rendering.
454  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) {
455    return Diags.Report(FullSourceLoc(Loc, getSourceManager()), DiagID);
456  }
457
458  DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) {
459    return Diags.Report(FullSourceLoc(Tok.getLocation(), getSourceManager()),
460                        DiagID);
461  }
462
463  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
464  /// token is the characters used to represent the token in the source file
465  /// after trigraph expansion and escaped-newline folding.  In particular, this
466  /// wants to get the true, uncanonicalized, spelling of things like digraphs
467  /// UCNs, etc.
468  std::string getSpelling(const Token &Tok) const;
469
470  /// getSpelling - This method is used to get the spelling of a token into a
471  /// preallocated buffer, instead of as an std::string.  The caller is required
472  /// to allocate enough space for the token, which is guaranteed to be at least
473  /// Tok.getLength() bytes long.  The length of the actual result is returned.
474  ///
475  /// Note that this method may do two possible things: it may either fill in
476  /// the buffer specified with characters, or it may *change the input pointer*
477  /// to point to a constant buffer with the data already in it (avoiding a
478  /// copy).  The caller is not allowed to modify the returned buffer pointer
479  /// if an internal buffer is returned.
480  unsigned getSpelling(const Token &Tok, const char *&Buffer) const;
481
482  /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
483  /// with length 1, return the character.
484  char getSpellingOfSingleCharacterNumericConstant(const Token &Tok) const {
485    assert(Tok.is(tok::numeric_constant) &&
486           Tok.getLength() == 1 && "Called on unsupported token");
487    assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
488
489    // If the token is carrying a literal data pointer, just use it.
490    if (const char *D = Tok.getLiteralData())
491      return *D;
492
493    // Otherwise, fall back on getCharacterData, which is slower, but always
494    // works.
495    return *SourceMgr.getCharacterData(Tok.getLocation());
496  }
497
498  /// CreateString - Plop the specified string into a scratch buffer and set the
499  /// specified token's location and length to it.  If specified, the source
500  /// location provides a location of the instantiation point of the token.
501  void CreateString(const char *Buf, unsigned Len,
502                    Token &Tok, SourceLocation SourceLoc = SourceLocation());
503
504  /// \brief Computes the source location just past the end of the
505  /// token at this source location.
506  ///
507  /// This routine can be used to produce a source location that
508  /// points just past the end of the token referenced by \p Loc, and
509  /// is generally used when a diagnostic needs to point just after a
510  /// token where it expected something different that it received. If
511  /// the returned source location would not be meaningful (e.g., if
512  /// it points into a macro), this routine returns an invalid
513  /// source location.
514  SourceLocation getLocForEndOfToken(SourceLocation Loc);
515
516  /// DumpToken - Print the token to stderr, used for debugging.
517  ///
518  void DumpToken(const Token &Tok, bool DumpFlags = false) const;
519  void DumpLocation(SourceLocation Loc) const;
520  void DumpMacro(const MacroInfo &MI) const;
521
522  /// AdvanceToTokenCharacter - Given a location that specifies the start of a
523  /// token, return a new location that specifies a character within the token.
524  SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,unsigned Char);
525
526  /// IncrementPasteCounter - Increment the counters for the number of token
527  /// paste operations performed.  If fast was specified, this is a 'fast paste'
528  /// case we handled.
529  ///
530  void IncrementPasteCounter(bool isFast) {
531    if (isFast)
532      ++NumFastTokenPaste;
533    else
534      ++NumTokenPaste;
535  }
536
537  void PrintStats();
538
539  /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
540  /// comment (/##/) in microsoft mode, this method handles updating the current
541  /// state, returning the token on the next source line.
542  void HandleMicrosoftCommentPaste(Token &Tok);
543
544  //===--------------------------------------------------------------------===//
545  // Preprocessor callback methods.  These are invoked by a lexer as various
546  // directives and events are found.
547
548  /// LookUpIdentifierInfo - Given a tok::identifier token, look up the
549  /// identifier information for the token and install it into the token.
550  IdentifierInfo *LookUpIdentifierInfo(Token &Identifier,
551                                       const char *BufPtr = 0);
552
553  /// HandleIdentifier - This callback is invoked when the lexer reads an
554  /// identifier and has filled in the tokens IdentifierInfo member.  This
555  /// callback potentially macro expands it or turns it into a named token (like
556  /// 'for').
557  void HandleIdentifier(Token &Identifier);
558
559
560  /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
561  /// the current file.  This either returns the EOF token and returns true, or
562  /// pops a level off the include stack and returns false, at which point the
563  /// client should call lex again.
564  bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
565
566  /// HandleEndOfTokenLexer - This callback is invoked when the current
567  /// TokenLexer hits the end of its token stream.
568  bool HandleEndOfTokenLexer(Token &Result);
569
570  /// HandleDirective - This callback is invoked when the lexer sees a # token
571  /// at the start of a line.  This consumes the directive, modifies the
572  /// lexer/preprocessor state, and advances the lexer(s) so that the next token
573  /// read is the correct one.
574  void HandleDirective(Token &Result);
575
576  /// CheckEndOfDirective - Ensure that the next token is a tok::eom token.  If
577  /// not, emit a diagnostic and consume up until the eom.
578  void CheckEndOfDirective(const char *Directive);
579
580  /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
581  /// current line until the tok::eom token is found.
582  void DiscardUntilEndOfDirective();
583
584private:
585
586  void PushIncludeMacroStack() {
587    IncludeMacroStack.push_back(IncludeStackInfo(CurLexer.take(),
588                                                 CurPTHLexer.take(),
589                                                 CurPPLexer,
590                                                 CurTokenLexer.take(),
591                                                 CurDirLookup));
592    CurPPLexer = 0;
593  }
594
595  void PopIncludeMacroStack() {
596    CurLexer.reset(IncludeMacroStack.back().TheLexer);
597    CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer);
598    CurPPLexer = IncludeMacroStack.back().ThePPLexer;
599    CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer);
600    CurDirLookup  = IncludeMacroStack.back().TheDirLookup;
601    IncludeMacroStack.pop_back();
602  }
603
604  /// AllocateMacroInfo - Allocate a new MacroInfo object with the provide
605  ///  SourceLocation.
606  MacroInfo* AllocateMacroInfo(SourceLocation L);
607
608  /// ReleaseMacroInfo - Release the specified MacroInfo.  This memory will
609  ///  be reused for allocating new MacroInfo objects.
610  void ReleaseMacroInfo(MacroInfo* MI);
611
612  /// isInPrimaryFile - Return true if we're in the top-level file, not in a
613  /// #include.
614  bool isInPrimaryFile() const;
615
616  /// ReadMacroName - Lex and validate a macro name, which occurs after a
617  /// #define or #undef.  This emits a diagnostic, sets the token kind to eom,
618  /// and discards the rest of the macro line if the macro name is invalid.
619  void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
620
621  /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
622  /// definition has just been read.  Lex the rest of the arguments and the
623  /// closing ), updating MI with what we learn.  Return true if an error occurs
624  /// parsing the arg list.
625  bool ReadMacroDefinitionArgList(MacroInfo *MI);
626
627  /// SkipExcludedConditionalBlock - We just read a #if or related directive and
628  /// decided that the subsequent tokens are in the #if'd out portion of the
629  /// file.  Lex the rest of the file, until we see an #endif.  If
630  /// FoundNonSkipPortion is true, then we have already emitted code for part of
631  /// this #if directive, so #else/#elif blocks should never be entered. If
632  /// FoundElse is false, then #else directives are ok, if not, then we have
633  /// already seen one so a #else directive is a duplicate.  When this returns,
634  /// the caller can lex the first valid token.
635  void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
636                                    bool FoundNonSkipPortion, bool FoundElse);
637
638  /// PTHSkipExcludedConditionalBlock - A fast PTH version of
639  ///  SkipExcludedConditionalBlock.
640  void PTHSkipExcludedConditionalBlock();
641
642  /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
643  /// may occur after a #if or #elif directive and return it as a bool.  If the
644  /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
645  bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
646
647  /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
648  /// #pragma GCC poison/system_header/dependency and #pragma once.
649  void RegisterBuiltinPragmas();
650
651  /// RegisterBuiltinMacros - Register builtin macros, such as __LINE__ with the
652  /// identifier table.
653  void RegisterBuiltinMacros();
654  IdentifierInfo *RegisterBuiltinMacro(const char *Name);
655
656  /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
657  /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
658  /// the macro should not be expanded return true, otherwise return false.
659  bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI);
660
661  /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
662  /// lexed is a '('.  If so, consume the token and return true, if not, this
663  /// method should have no observable side-effect on the lexed tokens.
664  bool isNextPPTokenLParen();
665
666  /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
667  /// invoked to read all of the formal arguments specified for the macro
668  /// invocation.  This returns null on error.
669  MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
670                                       SourceLocation &InstantiationEnd);
671
672  /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
673  /// as a builtin macro, handle it and return the next token as 'Tok'.
674  void ExpandBuiltinMacro(Token &Tok);
675
676  /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
677  /// return the first token after the directive.  The _Pragma token has just
678  /// been read into 'Tok'.
679  void Handle_Pragma(Token &Tok);
680
681  /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
682  /// start lexing tokens from it instead of the current buffer.
683  void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
684
685  /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
686  /// start getting tokens from it using the PTH cache.
687  void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
688
689  /// GetIncludeFilenameSpelling - Turn the specified lexer token into a fully
690  /// checked and spelled filename, e.g. as an operand of #include. This returns
691  /// true if the input filename was in <>'s or false if it were in ""'s.  The
692  /// caller is expected to provide a buffer that is large enough to hold the
693  /// spelling of the filename, but is also expected to handle the case when
694  /// this method decides to use a different buffer.
695  bool GetIncludeFilenameSpelling(SourceLocation Loc,
696                                  const char *&BufStart, const char *&BufEnd);
697
698  /// LookupFile - Given a "foo" or <foo> reference, look up the indicated file,
699  /// return null on failure.  isAngled indicates whether the file reference is
700  /// for system #include's or not (i.e. using <> instead of "").
701  const FileEntry *LookupFile(const char *FilenameStart,const char *FilenameEnd,
702                              bool isAngled, const DirectoryLookup *FromDir,
703                              const DirectoryLookup *&CurDir);
704
705
706
707  /// IsFileLexer - Returns true if we are lexing from a file and not a
708  ///  pragma or a macro.
709  static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
710    return L ? !L->isPragmaLexer() : P != 0;
711  }
712
713  static bool IsFileLexer(const IncludeStackInfo& I) {
714    return IsFileLexer(I.TheLexer, I.ThePPLexer);
715  }
716
717  bool IsFileLexer() const {
718    return IsFileLexer(CurLexer.get(), CurPPLexer);
719  }
720
721  //===--------------------------------------------------------------------===//
722  // Caching stuff.
723  void CachingLex(Token &Result);
724  bool InCachingLexMode() const { return CurPPLexer == 0 && CurTokenLexer == 0;}
725  void EnterCachingLexMode();
726  void ExitCachingLexMode() {
727    if (InCachingLexMode())
728      RemoveTopOfLexerStack();
729  }
730  const Token &PeekAhead(unsigned N);
731  void AnnotatePreviousCachedTokens(const Token &Tok);
732
733  //===--------------------------------------------------------------------===//
734  /// Handle*Directive - implement the various preprocessor directives.  These
735  /// should side-effect the current preprocessor object so that the next call
736  /// to Lex() will return the appropriate token next.
737  void HandleLineDirective(Token &Tok);
738  void HandleDigitDirective(Token &Tok);
739  void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
740  void HandleIdentSCCSDirective(Token &Tok);
741
742  // File inclusion.
743  void HandleIncludeDirective(Token &Tok,
744                              const DirectoryLookup *LookupFrom = 0,
745                              bool isImport = false);
746  void HandleIncludeNextDirective(Token &Tok);
747  void HandleImportDirective(Token &Tok);
748
749  // Macro handling.
750  void HandleDefineDirective(Token &Tok);
751  void HandleUndefDirective(Token &Tok);
752  // HandleAssertDirective(Token &Tok);
753  // HandleUnassertDirective(Token &Tok);
754
755  // Conditional Inclusion.
756  void HandleIfdefDirective(Token &Tok, bool isIfndef,
757                            bool ReadAnyTokensBeforeDirective);
758  void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
759  void HandleEndifDirective(Token &Tok);
760  void HandleElseDirective(Token &Tok);
761  void HandleElifDirective(Token &Tok);
762
763  // Pragmas.
764  void HandlePragmaDirective();
765public:
766  void HandlePragmaOnce(Token &OnceTok);
767  void HandlePragmaMark();
768  void HandlePragmaPoison(Token &PoisonTok);
769  void HandlePragmaSystemHeader(Token &SysHeaderTok);
770  void HandlePragmaDependency(Token &DependencyTok);
771  void HandlePragmaComment(Token &CommentTok);
772};
773
774/// PreprocessorFactory - A generic factory interface for lazily creating
775///  Preprocessor objects on-demand when they are needed.
776class PreprocessorFactory {
777public:
778  virtual ~PreprocessorFactory();
779  virtual Preprocessor* CreatePreprocessor() = 0;
780};
781
782}  // end namespace clang
783
784#endif
785