Preprocessor.h revision ed5c38682c056c147c8a4abb748b4f285de206dd
1//===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines the Preprocessor interface.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15#define LLVM_CLANG_LEX_PREPROCESSOR_H
16
17#include "clang/Lex/Lexer.h"
18#include "clang/Lex/PPCallbacks.h"
19#include "clang/Lex/TokenLexer.h"
20#include "clang/Basic/IdentifierTable.h"
21#include "clang/Basic/SourceLocation.h"
22#include "llvm/ADT/DenseMap.h"
23
24namespace clang {
25
26class SourceManager;
27class FileManager;
28class FileEntry;
29class HeaderSearch;
30class PragmaNamespace;
31class PragmaHandler;
32class ScratchBuffer;
33class TargetInfo;
34class PPCallbacks;
35class DirectoryLookup;
36
37/// Preprocessor - This object engages in a tight little dance with the lexer to
38/// efficiently preprocess tokens.  Lexers know only about tokens within a
39/// single source file, and don't know anything about preprocessor-level issues
40/// like the #include stack, token expansion, etc.
41///
42class Preprocessor {
43  Diagnostic        &Diags;
44  const LangOptions &Features;
45  TargetInfo        &Target;
46  FileManager       &FileMgr;
47  SourceManager     &SourceMgr;
48  ScratchBuffer     *ScratchBuf;
49  HeaderSearch      &HeaderInfo;
50
51  /// Identifiers for builtin macros and other builtins.
52  IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
53  IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
54  IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
55  IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
56  IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
57  IdentifierInfo *Ident_Pragma, *Ident__VA_ARGS__; // _Pragma, __VA_ARGS__
58
59  SourceLocation DATELoc, TIMELoc;
60
61  enum {
62    /// MaxIncludeStackDepth - Maximum depth of #includes.
63    MaxAllowedIncludeStackDepth = 200
64  };
65
66  // State that is set before the preprocessor begins.
67  bool KeepComments : 1;
68  bool KeepMacroComments : 1;
69
70  // State that changes while the preprocessor runs:
71  bool DisableMacroExpansion : 1;  // True if macro expansion is disabled.
72  bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
73
74  /// CacheTokens - True when the lexed tokens are cached for backtracking.
75  bool CacheTokens : 1;
76
77  /// Identifiers - This is mapping/lookup information for all identifiers in
78  /// the program, including program keywords.
79  IdentifierTable Identifiers;
80
81  /// Selectors - This table contains all the selectors in the program. Unlike
82  /// IdentifierTable above, this table *isn't* populated by the preprocessor.
83  /// It is declared/instantiated here because it's role/lifetime is
84  /// conceptually similar the IdentifierTable. In addition, the current control
85  /// flow (in clang::ParseAST()), make it convenient to put here.
86  /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
87  /// the lifetime fo the preprocessor.
88  SelectorTable Selectors;
89
90  /// PragmaHandlers - This tracks all of the pragmas that the client registered
91  /// with this preprocessor.
92  PragmaNamespace *PragmaHandlers;
93
94  /// CurLexer - This is the current top of the stack that we're lexing from if
95  /// not expanding a macro.  One of CurLexer and CurTokenLexer must be null.
96  Lexer *CurLexer;
97
98  /// CurLookup - The DirectoryLookup structure used to find the current
99  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
100  /// implement #include_next and find directory-specific properties.
101  const DirectoryLookup *CurDirLookup;
102
103  /// CurTokenLexer - This is the current macro we are expanding, if we are
104  /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
105  TokenLexer *CurTokenLexer;
106
107  /// IncludeMacroStack - This keeps track of the stack of files currently
108  /// #included, and macros currently being expanded from, not counting
109  /// CurLexer/CurTokenLexer.
110  struct IncludeStackInfo {
111    Lexer *TheLexer;
112    const DirectoryLookup *TheDirLookup;
113    TokenLexer *TheTokenLexer;
114    IncludeStackInfo(Lexer *L, const DirectoryLookup *D, TokenLexer *TL)
115      : TheLexer(L), TheDirLookup(D), TheTokenLexer(TL) {
116    }
117  };
118  std::vector<IncludeStackInfo> IncludeMacroStack;
119
120  /// Callbacks - These are actions invoked when some preprocessor activity is
121  /// encountered (e.g. a file is #included, etc).
122  PPCallbacks *Callbacks;
123
124  /// Macros - For each IdentifierInfo with 'HasMacro' set, we keep a mapping
125  /// to the actual definition of the macro.
126  llvm::DenseMap<IdentifierInfo*, MacroInfo*> Macros;
127
128  // Various statistics we track for performance analysis.
129  unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
130  unsigned NumIf, NumElse, NumEndif;
131  unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
132  unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
133  unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
134  unsigned NumSkipped;
135
136  /// Predefines - This string is the predefined macros that preprocessor
137  /// should use from the command line etc.
138  std::string Predefines;
139
140  /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
141  enum { TokenLexerCacheSize = 8 };
142  unsigned NumCachedTokenLexers;
143  TokenLexer *TokenLexerCache[TokenLexerCacheSize];
144
145private:  // Cached tokens state.
146  typedef std::vector<Token> CachedTokensTy;
147
148  /// CachedTokens - Cached tokens are stored here when we do backtracking or
149  /// lookahead. They are "lexed" by the CachingLex() method.
150  CachedTokensTy CachedTokens;
151
152  /// CachedLexPos - The position of the cached token that CachingLex() should
153  /// "lex" next. If it points beyond the CachedTokens vector, it means that
154  /// a normal Lex() should be invoked.
155  CachedTokensTy::size_type CachedLexPos;
156
157  /// BacktrackPositions - Stack of backtrack positions, allowing nested
158  /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
159  /// indicate where CachedLexPos should be set when the BackTrack() method is
160  /// invoked (at which point the last position is popped).
161  std::vector<CachedTokensTy::size_type> BacktrackPositions;
162
163public:
164  Preprocessor(Diagnostic &diags, const LangOptions &opts, TargetInfo &target,
165               SourceManager &SM, HeaderSearch &Headers);
166  ~Preprocessor();
167
168  Diagnostic &getDiagnostics() const { return Diags; }
169  const LangOptions &getLangOptions() const { return Features; }
170  TargetInfo &getTargetInfo() const { return Target; }
171  FileManager &getFileManager() const { return FileMgr; }
172  SourceManager &getSourceManager() const { return SourceMgr; }
173  HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
174
175  IdentifierTable &getIdentifierTable() { return Identifiers; }
176  SelectorTable &getSelectorTable() { return Selectors; }
177
178  inline FullSourceLoc getFullLoc(SourceLocation Loc) const {
179    return FullSourceLoc(Loc, getSourceManager());
180  }
181
182  /// SetCommentRetentionState - Control whether or not the preprocessor retains
183  /// comments in output.
184  void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
185    this->KeepComments = KeepComments | KeepMacroComments;
186    this->KeepMacroComments = KeepMacroComments;
187  }
188
189  bool getCommentRetentionState() const { return KeepComments; }
190
191  /// isCurrentLexer - Return true if we are lexing directly from the specified
192  /// lexer.
193  bool isCurrentLexer(const Lexer *L) const {
194    return CurLexer == L;
195  }
196
197  /// getCurrentLexer - Return the current file lexer being lexed from.  Note
198  /// that this ignores any potentially active macro expansions and _Pragma
199  /// expansions going on at the time.
200  Lexer *getCurrentFileLexer() const;
201
202  /// getPPCallbacks/setPPCallbacks - Accessors for preprocessor callbacks.
203  /// Note that this class takes ownership of any PPCallbacks object given to
204  /// it.
205  PPCallbacks *getPPCallbacks() const { return Callbacks; }
206  void setPPCallbacks(PPCallbacks *C) {
207    delete Callbacks;
208    Callbacks = C;
209  }
210
211  /// getMacroInfo - Given an identifier, return the MacroInfo it is #defined to
212  /// or null if it isn't #define'd.
213  MacroInfo *getMacroInfo(IdentifierInfo *II) const {
214    return II->hasMacroDefinition() ? Macros.find(II)->second : 0;
215  }
216
217  /// setMacroInfo - Specify a macro for this identifier.
218  ///
219  void setMacroInfo(IdentifierInfo *II, MacroInfo *MI);
220
221  const std::string &getPredefines() const { return Predefines; }
222  /// setPredefines - Set the predefines for this Preprocessor.  These
223  /// predefines are automatically injected when parsing the main file.
224  void setPredefines(const char *P) { Predefines = P; }
225  void setPredefines(const std::string &P) { Predefines = P; }
226
227  /// getIdentifierInfo - Return information about the specified preprocessor
228  /// identifier token.  The version of this method that takes two character
229  /// pointers is preferred unless the identifier is already available as a
230  /// string (this avoids allocation and copying of memory to construct an
231  /// std::string).
232  IdentifierInfo *getIdentifierInfo(const char *NameStart,
233                                    const char *NameEnd) {
234    return &Identifiers.get(NameStart, NameEnd);
235  }
236  IdentifierInfo *getIdentifierInfo(const char *NameStr) {
237    return getIdentifierInfo(NameStr, NameStr+strlen(NameStr));
238  }
239
240  /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
241  /// If 'Namespace' is non-null, then it is a token required to exist on the
242  /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
243  void AddPragmaHandler(const char *Namespace, PragmaHandler *Handler);
244
245  /// EnterMainSourceFile - Enter the specified FileID as the main source file,
246  /// which implicitly adds the builtin defines etc.
247  void EnterMainSourceFile();
248
249  /// EnterSourceFile - Add a source file to the top of the include stack and
250  /// start lexing tokens from it instead of the current buffer.  If isMainFile
251  /// is true, this is the main file for the translation unit.
252  void EnterSourceFile(unsigned CurFileID, const DirectoryLookup *Dir);
253
254  /// EnterMacro - Add a Macro to the top of the include stack and start lexing
255  /// tokens from it instead of the current buffer.  Args specifies the
256  /// tokens input to a function-like macro.
257  void EnterMacro(Token &Identifier, MacroArgs *Args);
258
259  /// EnterTokenStream - Add a "macro" context to the top of the include stack,
260  /// which will cause the lexer to start returning the specified tokens.
261  ///
262  /// If DisableMacroExpansion is true, tokens lexed from the token stream will
263  /// not be subject to further macro expansion.  Otherwise, these tokens will
264  /// be re-macro-expanded when/if expansion is enabled.
265  ///
266  /// If OwnsTokens is false, this method assumes that the specified stream of
267  /// tokens has a permanent owner somewhere, so they do not need to be copied.
268  /// If it is true, it assumes the array of tokens is allocated with new[] and
269  /// must be freed.
270  ///
271  void EnterTokenStream(const Token *Toks, unsigned NumToks,
272                        bool DisableMacroExpansion, bool OwnsTokens);
273
274  /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
275  /// lexer stack.  This should only be used in situations where the current
276  /// state of the top-of-stack lexer is known.
277  void RemoveTopOfLexerStack();
278
279  /// EnableBacktrackAtThisPos - From the point that this method is called, and
280  /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
281  /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
282  /// make the Preprocessor re-lex the same tokens.
283  ///
284  /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
285  /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
286  /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
287  ///
288  /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
289  /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
290  /// tokens will continue indefinitely.
291  ///
292  void EnableBacktrackAtThisPos();
293
294  /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
295  void CommitBacktrackedTokens();
296
297  /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
298  /// EnableBacktrackAtThisPos() was previously called.
299  void Backtrack();
300
301  /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
302  /// caching of tokens is on.
303  bool isBacktrackEnabled() const { return CacheTokens; }
304
305  /// Lex - To lex a token from the preprocessor, just pull a token from the
306  /// current lexer or macro object.
307  void Lex(Token &Result) {
308    if (CurLexer)
309      CurLexer->Lex(Result);
310    else if (CurTokenLexer)
311      CurTokenLexer->Lex(Result);
312    else
313      CachingLex(Result);
314  }
315
316  /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
317  /// something not a comment.  This is useful in -E -C mode where comments
318  /// would foul up preprocessor directive handling.
319  void LexNonComment(Token &Result) {
320    do
321      Lex(Result);
322    while (Result.getKind() == tok::comment);
323  }
324
325  /// LexUnexpandedToken - This is just like Lex, but this disables macro
326  /// expansion of identifier tokens.
327  void LexUnexpandedToken(Token &Result) {
328    // Disable macro expansion.
329    bool OldVal = DisableMacroExpansion;
330    DisableMacroExpansion = true;
331    // Lex the token.
332    Lex(Result);
333
334    // Reenable it.
335    DisableMacroExpansion = OldVal;
336  }
337
338  /// LookAhead - This peeks ahead N tokens and returns that token without
339  /// consuming any tokens.  LookAhead(0) returns the next token that would be
340  /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
341  /// returns normal tokens after phase 5.  As such, it is equivalent to using
342  /// 'Lex', not 'LexUnexpandedToken'.
343  const Token &LookAhead(unsigned N) {
344    if (CachedLexPos + N < CachedTokens.size())
345      return CachedTokens[CachedLexPos+N];
346    else
347      return PeekAhead(N+1);
348  }
349
350  /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
351  /// the specified Token's location, translating the token's start
352  /// position in the current buffer into a SourcePosition object for rendering.
353  void Diag(SourceLocation Loc, unsigned DiagID);
354  void Diag(SourceLocation Loc, unsigned DiagID, const std::string &Msg);
355  void Diag(SourceLocation Loc, unsigned DiagID, const std::string &Msg,
356            const SourceRange &R1, const SourceRange &R2);
357  void Diag(SourceLocation Loc, unsigned DiagID, const SourceRange &R);
358  void Diag(SourceLocation Loc, unsigned DiagID, const SourceRange &R1,
359            const SourceRange &R2);
360  void Diag(const Token &Tok, unsigned DiagID) {
361    Diag(Tok.getLocation(), DiagID);
362  }
363  void Diag(const Token &Tok, unsigned DiagID, const std::string &Msg) {
364    Diag(Tok.getLocation(), DiagID, Msg);
365  }
366
367  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
368  /// token is the characters used to represent the token in the source file
369  /// after trigraph expansion and escaped-newline folding.  In particular, this
370  /// wants to get the true, uncanonicalized, spelling of things like digraphs
371  /// UCNs, etc.
372  std::string getSpelling(const Token &Tok) const;
373
374  /// getSpelling - This method is used to get the spelling of a token into a
375  /// preallocated buffer, instead of as an std::string.  The caller is required
376  /// to allocate enough space for the token, which is guaranteed to be at least
377  /// Tok.getLength() bytes long.  The length of the actual result is returned.
378  ///
379  /// Note that this method may do two possible things: it may either fill in
380  /// the buffer specified with characters, or it may *change the input pointer*
381  /// to point to a constant buffer with the data already in it (avoiding a
382  /// copy).  The caller is not allowed to modify the returned buffer pointer
383  /// if an internal buffer is returned.
384  unsigned getSpelling(const Token &Tok, const char *&Buffer) const;
385
386
387  /// CreateString - Plop the specified string into a scratch buffer and return
388  /// a location for it.  If specified, the source location provides a source
389  /// location for the token.
390  SourceLocation CreateString(const char *Buf, unsigned Len,
391                              SourceLocation SourceLoc = SourceLocation());
392
393  /// DumpToken - Print the token to stderr, used for debugging.
394  ///
395  void DumpToken(const Token &Tok, bool DumpFlags = false) const;
396  void DumpLocation(SourceLocation Loc) const;
397  void DumpMacro(const MacroInfo &MI) const;
398
399  /// AdvanceToTokenCharacter - Given a location that specifies the start of a
400  /// token, return a new location that specifies a character within the token.
401  SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,unsigned Char);
402
403  /// IncrementPasteCounter - Increment the counters for the number of token
404  /// paste operations performed.  If fast was specified, this is a 'fast paste'
405  /// case we handled.
406  ///
407  void IncrementPasteCounter(bool isFast) {
408    if (isFast)
409      ++NumFastTokenPaste;
410    else
411      ++NumTokenPaste;
412  }
413
414  void PrintStats();
415
416  /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
417  /// comment (/##/) in microsoft mode, this method handles updating the current
418  /// state, returning the token on the next source line.
419  void HandleMicrosoftCommentPaste(Token &Tok);
420
421  //===--------------------------------------------------------------------===//
422  // Preprocessor callback methods.  These are invoked by a lexer as various
423  // directives and events are found.
424
425  /// LookUpIdentifierInfo - Given a tok::identifier token, look up the
426  /// identifier information for the token and install it into the token.
427  IdentifierInfo *LookUpIdentifierInfo(Token &Identifier,
428                                       const char *BufPtr = 0);
429
430  /// HandleIdentifier - This callback is invoked when the lexer reads an
431  /// identifier and has filled in the tokens IdentifierInfo member.  This
432  /// callback potentially macro expands it or turns it into a named token (like
433  /// 'for').
434  void HandleIdentifier(Token &Identifier);
435
436
437  /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
438  /// the current file.  This either returns the EOF token and returns true, or
439  /// pops a level off the include stack and returns false, at which point the
440  /// client should call lex again.
441  bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
442
443  /// HandleEndOfTokenLexer - This callback is invoked when the current
444  /// TokenLexer hits the end of its token stream.
445  bool HandleEndOfTokenLexer(Token &Result);
446
447  /// HandleDirective - This callback is invoked when the lexer sees a # token
448  /// at the start of a line.  This consumes the directive, modifies the
449  /// lexer/preprocessor state, and advances the lexer(s) so that the next token
450  /// read is the correct one.
451  void HandleDirective(Token &Result);
452
453  /// CheckEndOfDirective - Ensure that the next token is a tok::eom token.  If
454  /// not, emit a diagnostic and consume up until the eom.
455  void CheckEndOfDirective(const char *Directive);
456private:
457  /// isInPrimaryFile - Return true if we're in the top-level file, not in a
458  /// #include.
459  bool isInPrimaryFile() const;
460
461  /// isSystemHeader - Return true if F is a system header.
462  bool isSystemHeader(const FileEntry* F) const;
463
464  /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
465  /// current line until the tok::eom token is found.
466  void DiscardUntilEndOfDirective();
467
468  /// ReadMacroName - Lex and validate a macro name, which occurs after a
469  /// #define or #undef.  This emits a diagnostic, sets the token kind to eom,
470  /// and discards the rest of the macro line if the macro name is invalid.
471  void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
472
473  /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
474  /// definition has just been read.  Lex the rest of the arguments and the
475  /// closing ), updating MI with what we learn.  Return true if an error occurs
476  /// parsing the arg list.
477  bool ReadMacroDefinitionArgList(MacroInfo *MI);
478
479  /// SkipExcludedConditionalBlock - We just read a #if or related directive and
480  /// decided that the subsequent tokens are in the #if'd out portion of the
481  /// file.  Lex the rest of the file, until we see an #endif.  If
482  /// FoundNonSkipPortion is true, then we have already emitted code for part of
483  /// this #if directive, so #else/#elif blocks should never be entered. If
484  /// FoundElse is false, then #else directives are ok, if not, then we have
485  /// already seen one so a #else directive is a duplicate.  When this returns,
486  /// the caller can lex the first valid token.
487  void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
488                                    bool FoundNonSkipPortion, bool FoundElse);
489
490  /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
491  /// may occur after a #if or #elif directive and return it as a bool.  If the
492  /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
493  bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
494
495  /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
496  /// #pragma GCC poison/system_header/dependency and #pragma once.
497  void RegisterBuiltinPragmas();
498
499  /// RegisterBuiltinMacros - Register builtin macros, such as __LINE__ with the
500  /// identifier table.
501  void RegisterBuiltinMacros();
502  IdentifierInfo *RegisterBuiltinMacro(const char *Name);
503
504  /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
505  /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
506  /// the macro should not be expanded return true, otherwise return false.
507  bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI);
508
509  /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
510  /// lexed is a '('.  If so, consume the token and return true, if not, this
511  /// method should have no observable side-effect on the lexed tokens.
512  bool isNextPPTokenLParen();
513
514  /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
515  /// invoked to read all of the formal arguments specified for the macro
516  /// invocation.  This returns null on error.
517  MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI);
518
519  /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
520  /// as a builtin macro, handle it and return the next token as 'Tok'.
521  void ExpandBuiltinMacro(Token &Tok);
522
523  /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
524  /// return the first token after the directive.  The _Pragma token has just
525  /// been read into 'Tok'.
526  void Handle_Pragma(Token &Tok);
527
528
529  /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
530  /// start lexing tokens from it instead of the current buffer.
531  void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
532
533  /// GetIncludeFilenameSpelling - Turn the specified lexer token into a fully
534  /// checked and spelled filename, e.g. as an operand of #include. This returns
535  /// true if the input filename was in <>'s or false if it were in ""'s.  The
536  /// caller is expected to provide a buffer that is large enough to hold the
537  /// spelling of the filename, but is also expected to handle the case when
538  /// this method decides to use a different buffer.
539  bool GetIncludeFilenameSpelling(SourceLocation Loc,
540                                  const char *&BufStart, const char *&BufEnd);
541
542  /// LookupFile - Given a "foo" or <foo> reference, look up the indicated file,
543  /// return null on failure.  isAngled indicates whether the file reference is
544  /// for system #include's or not (i.e. using <> instead of "").
545  const FileEntry *LookupFile(const char *FilenameStart,const char *FilenameEnd,
546                              bool isAngled, const DirectoryLookup *FromDir,
547                              const DirectoryLookup *&CurDir);
548
549  //===--------------------------------------------------------------------===//
550  // Caching stuff.
551  void CachingLex(Token &Result);
552  bool InCachingLexMode() const { return CurLexer == 0 && CurTokenLexer == 0; }
553  void EnterCachingLexMode();
554  void ExitCachingLexMode() {
555    if (InCachingLexMode())
556      RemoveTopOfLexerStack();
557  }
558  const Token &PeekAhead(unsigned N);
559
560  //===--------------------------------------------------------------------===//
561  /// Handle*Directive - implement the various preprocessor directives.  These
562  /// should side-effect the current preprocessor object so that the next call
563  /// to Lex() will return the appropriate token next.
564
565  void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
566  void HandleIdentSCCSDirective(Token &Tok);
567
568  // File inclusion.
569  void HandleIncludeDirective(Token &Tok,
570                              const DirectoryLookup *LookupFrom = 0,
571                              bool isImport = false);
572  void HandleIncludeNextDirective(Token &Tok);
573  void HandleImportDirective(Token &Tok);
574
575  // Macro handling.
576  void HandleDefineDirective(Token &Tok);
577  void HandleUndefDirective(Token &Tok);
578  // HandleAssertDirective(Token &Tok);
579  // HandleUnassertDirective(Token &Tok);
580
581  // Conditional Inclusion.
582  void HandleIfdefDirective(Token &Tok, bool isIfndef,
583                            bool ReadAnyTokensBeforeDirective);
584  void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
585  void HandleEndifDirective(Token &Tok);
586  void HandleElseDirective(Token &Tok);
587  void HandleElifDirective(Token &Tok);
588
589  // Pragmas.
590  void HandlePragmaDirective();
591public:
592  void HandlePragmaOnce(Token &OnceTok);
593  void HandlePragmaMark();
594  void HandlePragmaPoison(Token &PoisonTok);
595  void HandlePragmaSystemHeader(Token &SysHeaderTok);
596  void HandlePragmaDependency(Token &DependencyTok);
597};
598
599/// PreprocessorFactory - A generic factory interface for lazily creating
600///  Preprocessor objects on-demand when they are needed.
601class PreprocessorFactory {
602public:
603  virtual ~PreprocessorFactory();
604  virtual Preprocessor* CreatePreprocessor() = 0;
605};
606
607}  // end namespace clang
608
609#endif
610