Preprocessor.h revision 9b36c3f0de0105e903130bbda3c4aea7d792c0af
1//===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines the Preprocessor interface.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15#define LLVM_CLANG_LEX_PREPROCESSOR_H
16
17#include "clang/Lex/Lexer.h"
18#include "clang/Lex/PTHLexer.h"
19#include "clang/Lex/PPCallbacks.h"
20#include "clang/Lex/TokenLexer.h"
21#include "clang/Lex/PTHManager.h"
22#include "clang/Basic/Builtins.h"
23#include "clang/Basic/Diagnostic.h"
24#include "clang/Basic/IdentifierTable.h"
25#include "clang/Basic/SourceLocation.h"
26#include "llvm/ADT/DenseMap.h"
27#include "llvm/ADT/OwningPtr.h"
28#include "llvm/ADT/SmallVector.h"
29#include "llvm/Support/Allocator.h"
30#include <vector>
31
32namespace clang {
33
34class SourceManager;
35class ExternalPreprocessorSource;
36class FileManager;
37class FileEntry;
38class HeaderSearch;
39class PragmaNamespace;
40class PragmaHandler;
41class CommentHandler;
42class ScratchBuffer;
43class TargetInfo;
44class PPCallbacks;
45class DirectoryLookup;
46class PreprocessingRecord;
47
48/// Preprocessor - This object engages in a tight little dance with the lexer to
49/// efficiently preprocess tokens.  Lexers know only about tokens within a
50/// single source file, and don't know anything about preprocessor-level issues
51/// like the #include stack, token expansion, etc.
52///
53class Preprocessor {
54  Diagnostic        *Diags;
55  LangOptions        Features;
56  const TargetInfo  &Target;
57  FileManager       &FileMgr;
58  SourceManager     &SourceMgr;
59  ScratchBuffer     *ScratchBuf;
60  HeaderSearch      &HeaderInfo;
61
62  /// \brief External source of macros.
63  ExternalPreprocessorSource *ExternalSource;
64
65  /// PTH - An optional PTHManager object used for getting tokens from
66  ///  a token cache rather than lexing the original source file.
67  llvm::OwningPtr<PTHManager> PTH;
68
69  /// BP - A BumpPtrAllocator object used to quickly allocate and release
70  ///  objects internal to the Preprocessor.
71  llvm::BumpPtrAllocator BP;
72
73  /// Identifiers for builtin macros and other builtins.
74  IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
75  IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
76  IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
77  IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
78  IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
79  IdentifierInfo *Ident__COUNTER__;                // __COUNTER__
80  IdentifierInfo *Ident_Pragma, *Ident__VA_ARGS__; // _Pragma, __VA_ARGS__
81  IdentifierInfo *Ident__has_feature;              // __has_feature
82  IdentifierInfo *Ident__has_builtin;              // __has_builtin
83  IdentifierInfo *Ident__has_include;              // __has_include
84  IdentifierInfo *Ident__has_include_next;         // __has_include_next
85
86  SourceLocation DATELoc, TIMELoc;
87  unsigned CounterValue;  // Next __COUNTER__ value.
88
89  enum {
90    /// MaxIncludeStackDepth - Maximum depth of #includes.
91    MaxAllowedIncludeStackDepth = 200
92  };
93
94  // State that is set before the preprocessor begins.
95  bool KeepComments : 1;
96  bool KeepMacroComments : 1;
97
98  // State that changes while the preprocessor runs:
99  bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
100
101  /// Whether the preprocessor owns the header search object.
102  bool OwnsHeaderSearch : 1;
103
104  /// DisableMacroExpansion - True if macro expansion is disabled.
105  bool DisableMacroExpansion : 1;
106
107  /// \brief Whether we have already loaded macros from the external source.
108  mutable bool ReadMacrosFromExternalSource : 1;
109
110  /// Identifiers - This is mapping/lookup information for all identifiers in
111  /// the program, including program keywords.
112  mutable IdentifierTable Identifiers;
113
114  /// Selectors - This table contains all the selectors in the program. Unlike
115  /// IdentifierTable above, this table *isn't* populated by the preprocessor.
116  /// It is declared/instantiated here because it's role/lifetime is
117  /// conceptually similar the IdentifierTable. In addition, the current control
118  /// flow (in clang::ParseAST()), make it convenient to put here.
119  /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
120  /// the lifetime fo the preprocessor.
121  SelectorTable Selectors;
122
123  /// BuiltinInfo - Information about builtins.
124  Builtin::Context BuiltinInfo;
125
126  /// PragmaHandlers - This tracks all of the pragmas that the client registered
127  /// with this preprocessor.
128  PragmaNamespace *PragmaHandlers;
129
130  /// \brief Tracks all of the comment handlers that the client registered
131  /// with this preprocessor.
132  std::vector<CommentHandler *> CommentHandlers;
133
134  /// \brief The file that we're performing code-completion for, if any.
135  const FileEntry *CodeCompletionFile;
136
137  /// CurLexer - This is the current top of the stack that we're lexing from if
138  /// not expanding a macro and we are lexing directly from source code.
139  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
140  llvm::OwningPtr<Lexer> CurLexer;
141
142  /// CurPTHLexer - This is the current top of stack that we're lexing from if
143  ///  not expanding from a macro and we are lexing from a PTH cache.
144  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
145  llvm::OwningPtr<PTHLexer> CurPTHLexer;
146
147  /// CurPPLexer - This is the current top of the stack what we're lexing from
148  ///  if not expanding a macro.  This is an alias for either CurLexer or
149  ///  CurPTHLexer.
150  PreprocessorLexer *CurPPLexer;
151
152  /// CurLookup - The DirectoryLookup structure used to find the current
153  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
154  /// implement #include_next and find directory-specific properties.
155  const DirectoryLookup *CurDirLookup;
156
157  /// CurTokenLexer - This is the current macro we are expanding, if we are
158  /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
159  llvm::OwningPtr<TokenLexer> CurTokenLexer;
160
161  /// IncludeMacroStack - This keeps track of the stack of files currently
162  /// #included, and macros currently being expanded from, not counting
163  /// CurLexer/CurTokenLexer.
164  struct IncludeStackInfo {
165    Lexer                 *TheLexer;
166    PTHLexer              *ThePTHLexer;
167    PreprocessorLexer     *ThePPLexer;
168    TokenLexer            *TheTokenLexer;
169    const DirectoryLookup *TheDirLookup;
170
171    IncludeStackInfo(Lexer *L, PTHLexer* P, PreprocessorLexer* PPL,
172                     TokenLexer* TL, const DirectoryLookup *D)
173      : TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL), TheTokenLexer(TL),
174        TheDirLookup(D) {}
175  };
176  std::vector<IncludeStackInfo> IncludeMacroStack;
177
178  /// Callbacks - These are actions invoked when some preprocessor activity is
179  /// encountered (e.g. a file is #included, etc).
180  PPCallbacks *Callbacks;
181
182  /// Macros - For each IdentifierInfo with 'HasMacro' set, we keep a mapping
183  /// to the actual definition of the macro.
184  llvm::DenseMap<IdentifierInfo*, MacroInfo*> Macros;
185
186  /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
187  /// allocation.
188  /// FIXME: why not use a singly linked list?
189  std::vector<MacroInfo*> MICache;
190
191  /// MacroArgCache - This is a "freelist" of MacroArg objects that can be
192  /// reused for quick allocation.
193  MacroArgs *MacroArgCache;
194  friend class MacroArgs;
195
196  // Various statistics we track for performance analysis.
197  unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
198  unsigned NumIf, NumElse, NumEndif;
199  unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
200  unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
201  unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
202  unsigned NumSkipped;
203
204  /// Predefines - This string is the predefined macros that preprocessor
205  /// should use from the command line etc.
206  std::string Predefines;
207
208  /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
209  enum { TokenLexerCacheSize = 8 };
210  unsigned NumCachedTokenLexers;
211  TokenLexer *TokenLexerCache[TokenLexerCacheSize];
212
213  /// \brief A record of the macro definitions and instantiations that
214  /// occurred during preprocessing.
215  ///
216  /// This is an optional side structure that can be enabled with
217  /// \c createPreprocessingRecord() prior to preprocessing.
218  PreprocessingRecord *Record;
219
220private:  // Cached tokens state.
221  typedef llvm::SmallVector<Token, 1> CachedTokensTy;
222
223  /// CachedTokens - Cached tokens are stored here when we do backtracking or
224  /// lookahead. They are "lexed" by the CachingLex() method.
225  CachedTokensTy CachedTokens;
226
227  /// CachedLexPos - The position of the cached token that CachingLex() should
228  /// "lex" next. If it points beyond the CachedTokens vector, it means that
229  /// a normal Lex() should be invoked.
230  CachedTokensTy::size_type CachedLexPos;
231
232  /// BacktrackPositions - Stack of backtrack positions, allowing nested
233  /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
234  /// indicate where CachedLexPos should be set when the BackTrack() method is
235  /// invoked (at which point the last position is popped).
236  std::vector<CachedTokensTy::size_type> BacktrackPositions;
237
238public:
239  Preprocessor(Diagnostic &diags, const LangOptions &opts,
240               const TargetInfo &target,
241               SourceManager &SM, HeaderSearch &Headers,
242               IdentifierInfoLookup *IILookup = 0,
243               bool OwnsHeaderSearch = false);
244
245  ~Preprocessor();
246
247  Diagnostic &getDiagnostics() const { return *Diags; }
248  void setDiagnostics(Diagnostic &D) { Diags = &D; }
249
250  const LangOptions &getLangOptions() const { return Features; }
251  const TargetInfo &getTargetInfo() const { return Target; }
252  FileManager &getFileManager() const { return FileMgr; }
253  SourceManager &getSourceManager() const { return SourceMgr; }
254  HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
255
256  IdentifierTable &getIdentifierTable() { return Identifiers; }
257  SelectorTable &getSelectorTable() { return Selectors; }
258  Builtin::Context &getBuiltinInfo() { return BuiltinInfo; }
259  llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
260
261  void setPTHManager(PTHManager* pm);
262
263  PTHManager *getPTHManager() { return PTH.get(); }
264
265  void setExternalSource(ExternalPreprocessorSource *Source) {
266    ExternalSource = Source;
267  }
268
269  ExternalPreprocessorSource *getExternalSource() const {
270    return ExternalSource;
271  }
272
273  /// SetCommentRetentionState - Control whether or not the preprocessor retains
274  /// comments in output.
275  void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
276    this->KeepComments = KeepComments | KeepMacroComments;
277    this->KeepMacroComments = KeepMacroComments;
278  }
279
280  bool getCommentRetentionState() const { return KeepComments; }
281
282  /// isCurrentLexer - Return true if we are lexing directly from the specified
283  /// lexer.
284  bool isCurrentLexer(const PreprocessorLexer *L) const {
285    return CurPPLexer == L;
286  }
287
288  /// getCurrentLexer - Return the current lexer being lexed from.  Note
289  /// that this ignores any potentially active macro expansions and _Pragma
290  /// expansions going on at the time.
291  PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; }
292
293  /// getCurrentFileLexer - Return the current file lexer being lexed from.
294  /// Note that this ignores any potentially active macro expansions and _Pragma
295  /// expansions going on at the time.
296  PreprocessorLexer *getCurrentFileLexer() const;
297
298  /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks.
299  /// Note that this class takes ownership of any PPCallbacks object given to
300  /// it.
301  PPCallbacks *getPPCallbacks() const { return Callbacks; }
302  void addPPCallbacks(PPCallbacks *C) {
303    if (Callbacks)
304      C = new PPChainedCallbacks(C, Callbacks);
305    Callbacks = C;
306  }
307
308  /// getMacroInfo - Given an identifier, return the MacroInfo it is #defined to
309  /// or null if it isn't #define'd.
310  MacroInfo *getMacroInfo(IdentifierInfo *II) const {
311    return II->hasMacroDefinition() ? Macros.find(II)->second : 0;
312  }
313
314  /// setMacroInfo - Specify a macro for this identifier.
315  ///
316  void setMacroInfo(IdentifierInfo *II, MacroInfo *MI);
317
318  /// macro_iterator/macro_begin/macro_end - This allows you to walk the current
319  /// state of the macro table.  This visits every currently-defined macro.
320  typedef llvm::DenseMap<IdentifierInfo*,
321                         MacroInfo*>::const_iterator macro_iterator;
322  macro_iterator macro_begin(bool IncludeExternalMacros = true) const;
323  macro_iterator macro_end(bool IncludeExternalMacros = true) const;
324
325  const std::string &getPredefines() const { return Predefines; }
326  /// setPredefines - Set the predefines for this Preprocessor.  These
327  /// predefines are automatically injected when parsing the main file.
328  void setPredefines(const char *P) { Predefines = P; }
329  void setPredefines(const std::string &P) { Predefines = P; }
330
331  /// getIdentifierInfo - Return information about the specified preprocessor
332  /// identifier token.  The version of this method that takes two character
333  /// pointers is preferred unless the identifier is already available as a
334  /// string (this avoids allocation and copying of memory to construct an
335  /// std::string).
336  IdentifierInfo *getIdentifierInfo(llvm::StringRef Name) const {
337    return &Identifiers.get(Name);
338  }
339
340  /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
341  /// If 'Namespace' is non-null, then it is a token required to exist on the
342  /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
343  void AddPragmaHandler(llvm::StringRef Namespace, PragmaHandler *Handler);
344  void AddPragmaHandler(PragmaHandler *Handler) {
345    AddPragmaHandler(llvm::StringRef(), Handler);
346  }
347
348  /// RemovePragmaHandler - Remove the specific pragma handler from
349  /// the preprocessor. If \arg Namespace is non-null, then it should
350  /// be the namespace that \arg Handler was added to. It is an error
351  /// to remove a handler that has not been registered.
352  void RemovePragmaHandler(llvm::StringRef Namespace, PragmaHandler *Handler);
353  void RemovePragmaHandler(PragmaHandler *Handler) {
354    RemovePragmaHandler(llvm::StringRef(), Handler);
355  }
356
357  /// \brief Add the specified comment handler to the preprocessor.
358  void AddCommentHandler(CommentHandler *Handler);
359
360  /// \brief Remove the specified comment handler.
361  ///
362  /// It is an error to remove a handler that has not been registered.
363  void RemoveCommentHandler(CommentHandler *Handler);
364
365  /// \brief Retrieve the preprocessing record, or NULL if there is no
366  /// preprocessing record.
367  PreprocessingRecord *getPreprocessingRecord() const { return Record; }
368
369  /// \brief Create a new preprocessing record, which will keep track of
370  /// all macro expansions, macro definitions, etc.
371  void createPreprocessingRecord();
372
373  /// EnterMainSourceFile - Enter the specified FileID as the main source file,
374  /// which implicitly adds the builtin defines etc.
375  void EnterMainSourceFile();
376
377  /// EndSourceFile - Inform the preprocessor callbacks that processing is
378  /// complete.
379  void EndSourceFile();
380
381  /// EnterSourceFile - Add a source file to the top of the include stack and
382  /// start lexing tokens from it instead of the current buffer.  Emit an error
383  /// and don't enter the file on error.
384  void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir,
385                       SourceLocation Loc);
386
387  /// EnterMacro - Add a Macro to the top of the include stack and start lexing
388  /// tokens from it instead of the current buffer.  Args specifies the
389  /// tokens input to a function-like macro.
390  ///
391  /// ILEnd specifies the location of the ')' for a function-like macro or the
392  /// identifier for an object-like macro.
393  void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroArgs *Args);
394
395  /// EnterTokenStream - Add a "macro" context to the top of the include stack,
396  /// which will cause the lexer to start returning the specified tokens.
397  ///
398  /// If DisableMacroExpansion is true, tokens lexed from the token stream will
399  /// not be subject to further macro expansion.  Otherwise, these tokens will
400  /// be re-macro-expanded when/if expansion is enabled.
401  ///
402  /// If OwnsTokens is false, this method assumes that the specified stream of
403  /// tokens has a permanent owner somewhere, so they do not need to be copied.
404  /// If it is true, it assumes the array of tokens is allocated with new[] and
405  /// must be freed.
406  ///
407  void EnterTokenStream(const Token *Toks, unsigned NumToks,
408                        bool DisableMacroExpansion, bool OwnsTokens);
409
410  /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
411  /// lexer stack.  This should only be used in situations where the current
412  /// state of the top-of-stack lexer is known.
413  void RemoveTopOfLexerStack();
414
415  /// EnableBacktrackAtThisPos - From the point that this method is called, and
416  /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
417  /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
418  /// make the Preprocessor re-lex the same tokens.
419  ///
420  /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
421  /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
422  /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
423  ///
424  /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
425  /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
426  /// tokens will continue indefinitely.
427  ///
428  void EnableBacktrackAtThisPos();
429
430  /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
431  void CommitBacktrackedTokens();
432
433  /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
434  /// EnableBacktrackAtThisPos() was previously called.
435  void Backtrack();
436
437  /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
438  /// caching of tokens is on.
439  bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
440
441  /// Lex - To lex a token from the preprocessor, just pull a token from the
442  /// current lexer or macro object.
443  void Lex(Token &Result) {
444    if (CurLexer)
445      CurLexer->Lex(Result);
446    else if (CurPTHLexer)
447      CurPTHLexer->Lex(Result);
448    else if (CurTokenLexer)
449      CurTokenLexer->Lex(Result);
450    else
451      CachingLex(Result);
452  }
453
454  /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
455  /// something not a comment.  This is useful in -E -C mode where comments
456  /// would foul up preprocessor directive handling.
457  void LexNonComment(Token &Result) {
458    do
459      Lex(Result);
460    while (Result.getKind() == tok::comment);
461  }
462
463  /// LexUnexpandedToken - This is just like Lex, but this disables macro
464  /// expansion of identifier tokens.
465  void LexUnexpandedToken(Token &Result) {
466    // Disable macro expansion.
467    bool OldVal = DisableMacroExpansion;
468    DisableMacroExpansion = true;
469    // Lex the token.
470    Lex(Result);
471
472    // Reenable it.
473    DisableMacroExpansion = OldVal;
474  }
475
476  /// LookAhead - This peeks ahead N tokens and returns that token without
477  /// consuming any tokens.  LookAhead(0) returns the next token that would be
478  /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
479  /// returns normal tokens after phase 5.  As such, it is equivalent to using
480  /// 'Lex', not 'LexUnexpandedToken'.
481  const Token &LookAhead(unsigned N) {
482    if (CachedLexPos + N < CachedTokens.size())
483      return CachedTokens[CachedLexPos+N];
484    else
485      return PeekAhead(N+1);
486  }
487
488  /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
489  /// this allows to revert a specific number of tokens.
490  /// Note that the number of tokens being reverted should be up to the last
491  /// backtrack position, not more.
492  void RevertCachedTokens(unsigned N) {
493    assert(isBacktrackEnabled() &&
494           "Should only be called when tokens are cached for backtracking");
495    assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
496         && "Should revert tokens up to the last backtrack position, not more");
497    assert(signed(CachedLexPos) - signed(N) >= 0 &&
498           "Corrupted backtrack positions ?");
499    CachedLexPos -= N;
500  }
501
502  /// EnterToken - Enters a token in the token stream to be lexed next. If
503  /// BackTrack() is called afterwards, the token will remain at the insertion
504  /// point.
505  void EnterToken(const Token &Tok) {
506    EnterCachingLexMode();
507    CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
508  }
509
510  /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
511  /// tokens (because backtrack is enabled) it should replace the most recent
512  /// cached tokens with the given annotation token. This function has no effect
513  /// if backtracking is not enabled.
514  ///
515  /// Note that the use of this function is just for optimization; so that the
516  /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
517  /// invoked.
518  void AnnotateCachedTokens(const Token &Tok) {
519    assert(Tok.isAnnotation() && "Expected annotation token");
520    if (CachedLexPos != 0 && isBacktrackEnabled())
521      AnnotatePreviousCachedTokens(Tok);
522  }
523
524  /// \brief Replace the last token with an annotation token.
525  ///
526  /// Like AnnotateCachedTokens(), this routine replaces an
527  /// already-parsed (and resolved) token with an annotation
528  /// token. However, this routine only replaces the last token with
529  /// the annotation token; it does not affect any other cached
530  /// tokens. This function has no effect if backtracking is not
531  /// enabled.
532  void ReplaceLastTokenWithAnnotation(const Token &Tok) {
533    assert(Tok.isAnnotation() && "Expected annotation token");
534    if (CachedLexPos != 0 && isBacktrackEnabled())
535      CachedTokens[CachedLexPos-1] = Tok;
536  }
537
538  /// \brief Specify the point at which code-completion will be performed.
539  ///
540  /// \param File the file in which code completion should occur. If
541  /// this file is included multiple times, code-completion will
542  /// perform completion the first time it is included. If NULL, this
543  /// function clears out the code-completion point.
544  ///
545  /// \param Line the line at which code completion should occur
546  /// (1-based).
547  ///
548  /// \param Column the column at which code completion should occur
549  /// (1-based).
550  ///
551  /// \returns true if an error occurred, false otherwise.
552  bool SetCodeCompletionPoint(const FileEntry *File,
553                              unsigned Line, unsigned Column);
554
555  /// \brief Determine if this source location refers into the file
556  /// for which we are performing code completion.
557  bool isCodeCompletionFile(SourceLocation FileLoc) const;
558
559  /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
560  /// the specified Token's location, translating the token's start
561  /// position in the current buffer into a SourcePosition object for rendering.
562  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) {
563    return Diags->Report(FullSourceLoc(Loc, getSourceManager()), DiagID);
564  }
565
566  DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) {
567    return Diags->Report(FullSourceLoc(Tok.getLocation(), getSourceManager()),
568                         DiagID);
569  }
570
571  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
572  /// token is the characters used to represent the token in the source file
573  /// after trigraph expansion and escaped-newline folding.  In particular, this
574  /// wants to get the true, uncanonicalized, spelling of things like digraphs
575  /// UCNs, etc.
576  ///
577  /// \param Invalid If non-NULL, will be set \c true if an error occurs.
578  std::string getSpelling(const Token &Tok, bool *Invalid = 0) const;
579
580  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
581  /// token is the characters used to represent the token in the source file
582  /// after trigraph expansion and escaped-newline folding.  In particular, this
583  /// wants to get the true, uncanonicalized, spelling of things like digraphs
584  /// UCNs, etc.
585  static std::string getSpelling(const Token &Tok,
586                                 const SourceManager &SourceMgr,
587                                 const LangOptions &Features,
588                                 bool *Invalid = 0);
589
590  /// getSpelling - This method is used to get the spelling of a token into a
591  /// preallocated buffer, instead of as an std::string.  The caller is required
592  /// to allocate enough space for the token, which is guaranteed to be at least
593  /// Tok.getLength() bytes long.  The length of the actual result is returned.
594  ///
595  /// Note that this method may do two possible things: it may either fill in
596  /// the buffer specified with characters, or it may *change the input pointer*
597  /// to point to a constant buffer with the data already in it (avoiding a
598  /// copy).  The caller is not allowed to modify the returned buffer pointer
599  /// if an internal buffer is returned.
600  unsigned getSpelling(const Token &Tok, const char *&Buffer,
601                       bool *Invalid = 0) const;
602
603  /// getSpelling - This method is used to get the spelling of a token into a
604  /// SmallVector. Note that the returned StringRef may not point to the
605  /// supplied buffer if a copy can be avoided.
606  llvm::StringRef getSpelling(const Token &Tok,
607                              llvm::SmallVectorImpl<char> &Buffer,
608                              bool *Invalid = 0) const;
609
610  /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
611  /// with length 1, return the character.
612  char getSpellingOfSingleCharacterNumericConstant(const Token &Tok,
613                                                   bool *Invalid = 0) const {
614    assert(Tok.is(tok::numeric_constant) &&
615           Tok.getLength() == 1 && "Called on unsupported token");
616    assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
617
618    // If the token is carrying a literal data pointer, just use it.
619    if (const char *D = Tok.getLiteralData())
620      return *D;
621
622    // Otherwise, fall back on getCharacterData, which is slower, but always
623    // works.
624    return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid);
625  }
626
627  /// CreateString - Plop the specified string into a scratch buffer and set the
628  /// specified token's location and length to it.  If specified, the source
629  /// location provides a location of the instantiation point of the token.
630  void CreateString(const char *Buf, unsigned Len,
631                    Token &Tok, SourceLocation SourceLoc = SourceLocation());
632
633  /// \brief Computes the source location just past the end of the
634  /// token at this source location.
635  ///
636  /// This routine can be used to produce a source location that
637  /// points just past the end of the token referenced by \p Loc, and
638  /// is generally used when a diagnostic needs to point just after a
639  /// token where it expected something different that it received. If
640  /// the returned source location would not be meaningful (e.g., if
641  /// it points into a macro), this routine returns an invalid
642  /// source location.
643  ///
644  /// \param Offset an offset from the end of the token, where the source
645  /// location should refer to. The default offset (0) produces a source
646  /// location pointing just past the end of the token; an offset of 1 produces
647  /// a source location pointing to the last character in the token, etc.
648  SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0);
649
650  /// DumpToken - Print the token to stderr, used for debugging.
651  ///
652  void DumpToken(const Token &Tok, bool DumpFlags = false) const;
653  void DumpLocation(SourceLocation Loc) const;
654  void DumpMacro(const MacroInfo &MI) const;
655
656  /// AdvanceToTokenCharacter - Given a location that specifies the start of a
657  /// token, return a new location that specifies a character within the token.
658  SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,unsigned Char);
659
660  /// IncrementPasteCounter - Increment the counters for the number of token
661  /// paste operations performed.  If fast was specified, this is a 'fast paste'
662  /// case we handled.
663  ///
664  void IncrementPasteCounter(bool isFast) {
665    if (isFast)
666      ++NumFastTokenPaste;
667    else
668      ++NumTokenPaste;
669  }
670
671  void PrintStats();
672
673  /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
674  /// comment (/##/) in microsoft mode, this method handles updating the current
675  /// state, returning the token on the next source line.
676  void HandleMicrosoftCommentPaste(Token &Tok);
677
678  //===--------------------------------------------------------------------===//
679  // Preprocessor callback methods.  These are invoked by a lexer as various
680  // directives and events are found.
681
682  /// LookUpIdentifierInfo - Given a tok::identifier token, look up the
683  /// identifier information for the token and install it into the token.
684  IdentifierInfo *LookUpIdentifierInfo(Token &Identifier,
685                                       const char *BufPtr = 0) const;
686
687  /// HandleIdentifier - This callback is invoked when the lexer reads an
688  /// identifier and has filled in the tokens IdentifierInfo member.  This
689  /// callback potentially macro expands it or turns it into a named token (like
690  /// 'for').
691  void HandleIdentifier(Token &Identifier);
692
693
694  /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
695  /// the current file.  This either returns the EOF token and returns true, or
696  /// pops a level off the include stack and returns false, at which point the
697  /// client should call lex again.
698  bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
699
700  /// HandleEndOfTokenLexer - This callback is invoked when the current
701  /// TokenLexer hits the end of its token stream.
702  bool HandleEndOfTokenLexer(Token &Result);
703
704  /// HandleDirective - This callback is invoked when the lexer sees a # token
705  /// at the start of a line.  This consumes the directive, modifies the
706  /// lexer/preprocessor state, and advances the lexer(s) so that the next token
707  /// read is the correct one.
708  void HandleDirective(Token &Result);
709
710  /// CheckEndOfDirective - Ensure that the next token is a tok::eom token.  If
711  /// not, emit a diagnostic and consume up until the eom.  If EnableMacros is
712  /// true, then we consider macros that expand to zero tokens as being ok.
713  void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
714
715  /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
716  /// current line until the tok::eom token is found.
717  void DiscardUntilEndOfDirective();
718
719  /// SawDateOrTime - This returns true if the preprocessor has seen a use of
720  /// __DATE__ or __TIME__ in the file so far.
721  bool SawDateOrTime() const {
722    return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
723  }
724  unsigned getCounterValue() const { return CounterValue; }
725  void setCounterValue(unsigned V) { CounterValue = V; }
726
727  /// AllocateMacroInfo - Allocate a new MacroInfo object with the provide
728  ///  SourceLocation.
729  MacroInfo* AllocateMacroInfo(SourceLocation L);
730
731  /// GetIncludeFilenameSpelling - Turn the specified lexer token into a fully
732  /// checked and spelled filename, e.g. as an operand of #include. This returns
733  /// true if the input filename was in <>'s or false if it were in ""'s.  The
734  /// caller is expected to provide a buffer that is large enough to hold the
735  /// spelling of the filename, but is also expected to handle the case when
736  /// this method decides to use a different buffer.
737  bool GetIncludeFilenameSpelling(SourceLocation Loc,llvm::StringRef &Filename);
738
739  /// LookupFile - Given a "foo" or <foo> reference, look up the indicated file,
740  /// return null on failure.  isAngled indicates whether the file reference is
741  /// for system #include's or not (i.e. using <> instead of "").
742  const FileEntry *LookupFile(llvm::StringRef Filename,
743                              bool isAngled, const DirectoryLookup *FromDir,
744                              const DirectoryLookup *&CurDir);
745
746  /// GetCurLookup - The DirectoryLookup structure used to find the current
747  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
748  /// implement #include_next and find directory-specific properties.
749  const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; }
750
751  /// isInPrimaryFile - Return true if we're in the top-level file, not in a
752  /// #include.
753  bool isInPrimaryFile() const;
754
755  /// ConcatenateIncludeName - Handle cases where the #include name is expanded
756  /// from a macro as multiple tokens, which need to be glued together.  This
757  /// occurs for code like:
758  ///    #define FOO <a/b.h>
759  ///    #include FOO
760  /// because in this case, "<a/b.h>" is returned as 7 tokens, not one.
761  ///
762  /// This code concatenates and consumes tokens up to the '>' token.  It
763  /// returns false if the > was found, otherwise it returns true if it finds
764  /// and consumes the EOM marker.
765  bool ConcatenateIncludeName(llvm::SmallString<128> &FilenameBuffer);
766
767private:
768
769  void PushIncludeMacroStack() {
770    IncludeMacroStack.push_back(IncludeStackInfo(CurLexer.take(),
771                                                 CurPTHLexer.take(),
772                                                 CurPPLexer,
773                                                 CurTokenLexer.take(),
774                                                 CurDirLookup));
775    CurPPLexer = 0;
776  }
777
778  void PopIncludeMacroStack() {
779    CurLexer.reset(IncludeMacroStack.back().TheLexer);
780    CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer);
781    CurPPLexer = IncludeMacroStack.back().ThePPLexer;
782    CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer);
783    CurDirLookup  = IncludeMacroStack.back().TheDirLookup;
784    IncludeMacroStack.pop_back();
785  }
786
787  /// ReleaseMacroInfo - Release the specified MacroInfo.  This memory will
788  ///  be reused for allocating new MacroInfo objects.
789  void ReleaseMacroInfo(MacroInfo* MI);
790
791  /// ReadMacroName - Lex and validate a macro name, which occurs after a
792  /// #define or #undef.  This emits a diagnostic, sets the token kind to eom,
793  /// and discards the rest of the macro line if the macro name is invalid.
794  void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
795
796  /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
797  /// definition has just been read.  Lex the rest of the arguments and the
798  /// closing ), updating MI with what we learn.  Return true if an error occurs
799  /// parsing the arg list.
800  bool ReadMacroDefinitionArgList(MacroInfo *MI);
801
802  /// SkipExcludedConditionalBlock - We just read a #if or related directive and
803  /// decided that the subsequent tokens are in the #if'd out portion of the
804  /// file.  Lex the rest of the file, until we see an #endif.  If
805  /// FoundNonSkipPortion is true, then we have already emitted code for part of
806  /// this #if directive, so #else/#elif blocks should never be entered. If
807  /// FoundElse is false, then #else directives are ok, if not, then we have
808  /// already seen one so a #else directive is a duplicate.  When this returns,
809  /// the caller can lex the first valid token.
810  void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
811                                    bool FoundNonSkipPortion, bool FoundElse);
812
813  /// PTHSkipExcludedConditionalBlock - A fast PTH version of
814  ///  SkipExcludedConditionalBlock.
815  void PTHSkipExcludedConditionalBlock();
816
817  /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
818  /// may occur after a #if or #elif directive and return it as a bool.  If the
819  /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
820  bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
821
822  /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
823  /// #pragma GCC poison/system_header/dependency and #pragma once.
824  void RegisterBuiltinPragmas();
825
826  /// RegisterBuiltinMacros - Register builtin macros, such as __LINE__ with the
827  /// identifier table.
828  void RegisterBuiltinMacros();
829
830  /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
831  /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
832  /// the macro should not be expanded return true, otherwise return false.
833  bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI);
834
835  /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
836  /// lexed is a '('.  If so, consume the token and return true, if not, this
837  /// method should have no observable side-effect on the lexed tokens.
838  bool isNextPPTokenLParen();
839
840  /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
841  /// invoked to read all of the formal arguments specified for the macro
842  /// invocation.  This returns null on error.
843  MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
844                                       SourceLocation &InstantiationEnd);
845
846  /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
847  /// as a builtin macro, handle it and return the next token as 'Tok'.
848  void ExpandBuiltinMacro(Token &Tok);
849
850  /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
851  /// return the first token after the directive.  The _Pragma token has just
852  /// been read into 'Tok'.
853  void Handle_Pragma(Token &Tok);
854
855  /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
856  /// start lexing tokens from it instead of the current buffer.
857  void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
858
859  /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
860  /// start getting tokens from it using the PTH cache.
861  void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
862
863  /// IsFileLexer - Returns true if we are lexing from a file and not a
864  ///  pragma or a macro.
865  static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
866    return L ? !L->isPragmaLexer() : P != 0;
867  }
868
869  static bool IsFileLexer(const IncludeStackInfo& I) {
870    return IsFileLexer(I.TheLexer, I.ThePPLexer);
871  }
872
873  bool IsFileLexer() const {
874    return IsFileLexer(CurLexer.get(), CurPPLexer);
875  }
876
877  //===--------------------------------------------------------------------===//
878  // Caching stuff.
879  void CachingLex(Token &Result);
880  bool InCachingLexMode() const {
881    // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means
882    // that we are past EOF, not that we are in CachingLex mode.
883    return CurPPLexer == 0 && CurTokenLexer == 0 && !IncludeMacroStack.empty();
884  }
885  void EnterCachingLexMode();
886  void ExitCachingLexMode() {
887    if (InCachingLexMode())
888      RemoveTopOfLexerStack();
889  }
890  const Token &PeekAhead(unsigned N);
891  void AnnotatePreviousCachedTokens(const Token &Tok);
892
893  //===--------------------------------------------------------------------===//
894  /// Handle*Directive - implement the various preprocessor directives.  These
895  /// should side-effect the current preprocessor object so that the next call
896  /// to Lex() will return the appropriate token next.
897  void HandleLineDirective(Token &Tok);
898  void HandleDigitDirective(Token &Tok);
899  void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
900  void HandleIdentSCCSDirective(Token &Tok);
901
902  // File inclusion.
903  void HandleIncludeDirective(Token &Tok,
904                              const DirectoryLookup *LookupFrom = 0,
905                              bool isImport = false);
906  void HandleIncludeNextDirective(Token &Tok);
907  void HandleIncludeMacrosDirective(Token &Tok);
908  void HandleImportDirective(Token &Tok);
909
910  // Macro handling.
911  void HandleDefineDirective(Token &Tok);
912  void HandleUndefDirective(Token &Tok);
913
914  // Conditional Inclusion.
915  void HandleIfdefDirective(Token &Tok, bool isIfndef,
916                            bool ReadAnyTokensBeforeDirective);
917  void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
918  void HandleEndifDirective(Token &Tok);
919  void HandleElseDirective(Token &Tok);
920  void HandleElifDirective(Token &Tok);
921
922  // Pragmas.
923  void HandlePragmaDirective();
924public:
925  void HandlePragmaOnce(Token &OnceTok);
926  void HandlePragmaMark();
927  void HandlePragmaPoison(Token &PoisonTok);
928  void HandlePragmaSystemHeader(Token &SysHeaderTok);
929  void HandlePragmaDependency(Token &DependencyTok);
930  void HandlePragmaComment(Token &CommentTok);
931  void HandlePragmaMessage(Token &MessageTok);
932  // Return true and store the first token only if any CommentHandler
933  // has inserted some tokens and getCommentRetentionState() is false.
934  bool HandleComment(Token &Token, SourceRange Comment);
935};
936
937/// \brief Abstract base class that describes a handler that will receive
938/// source ranges for each of the comments encountered in the source file.
939class CommentHandler {
940public:
941  virtual ~CommentHandler();
942
943  // The handler shall return true if it has pushed any tokens
944  // to be read using e.g. EnterToken or EnterTokenStream.
945  virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0;
946};
947
948}  // end namespace clang
949
950#endif
951