1#!/usr/bin/env python
2#
3# Copyright 2008 The Closure Linter Authors. All Rights Reserved.
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9#      http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS-IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16
17"""Classes to represent tokens and positions within them."""
18
19__author__ = ('robbyw@google.com (Robert Walker)',
20              'ajp@google.com (Andy Perelson)')
21
22
23class TokenType(object):
24  """Token types common to all languages."""
25  NORMAL = 'normal'
26  WHITESPACE = 'whitespace'
27  BLANK_LINE = 'blank line'
28
29
30class Token(object):
31  """Token class for intelligent text splitting.
32
33  The token class represents a string of characters and an identifying type.
34
35  Attributes:
36    type: The type of token.
37    string: The characters the token comprises.
38    length: The length of the token.
39    line: The text of the line the token is found in.
40    line_number: The number of the line the token is found in.
41    values: Dictionary of values returned from the tokens regex match.
42    previous: The token before this one.
43    next: The token after this one.
44    start_index: The character index in the line where this token starts.
45    attached_object: Object containing more information about this token.
46    metadata: Object containing metadata about this token.  Must be added by
47        a separate metadata pass.
48  """
49
50  def __init__(self, string, token_type, line, line_number, values=None):
51    """Creates a new Token object.
52
53    Args:
54      string: The string of input the token contains.
55      token_type: The type of token.
56      line: The text of the line this token is in.
57      line_number: The line number of the token.
58      values: A dict of named values within the token.  For instance, a
59        function declaration may have a value called 'name' which captures the
60        name of the function.
61    """
62    self.type = token_type
63    self.string = string
64    self.length = len(string)
65    self.line = line
66    self.line_number = line_number
67    self.values = values
68
69    # These parts can only be computed when the file is fully tokenized
70    self.previous = None
71    self.next = None
72    self.start_index = None
73
74    # This part is set in statetracker.py
75    # TODO(robbyw): Wrap this in to metadata
76    self.attached_object = None
77
78    # This part is set in *metadatapass.py
79    self.metadata = None
80
81  def IsFirstInLine(self):
82    """Tests if this token is the first token in its line.
83
84    Returns:
85      Whether the token is the first token in its line.
86    """
87    return not self.previous or self.previous.line_number != self.line_number
88
89  def IsLastInLine(self):
90    """Tests if this token is the last token in its line.
91
92    Returns:
93      Whether the token is the last token in its line.
94    """
95    return not self.next or self.next.line_number != self.line_number
96
97  def IsType(self, token_type):
98    """Tests if this token is of the given type.
99
100    Args:
101      token_type: The type to test for.
102
103    Returns:
104      True if the type of this token matches the type passed in.
105    """
106    return self.type == token_type
107
108  def IsAnyType(self, *token_types):
109    """Tests if this token is any of the given types.
110
111    Args:
112      token_types: The types to check.  Also accepts a single array.
113
114    Returns:
115      True if the type of this token is any of the types passed in.
116    """
117    if not isinstance(token_types[0], basestring):
118      return self.type in token_types[0]
119    else:
120      return self.type in token_types
121
122  def __repr__(self):
123    return '<Token: %s, "%s", %r, %d, %r>' % (self.type, self.string,
124                                              self.values, self.line_number,
125                                              self.metadata)
126
127  def __iter__(self):
128    """Returns a token iterator."""
129    node = self
130    while node:
131      yield node
132      node = node.next
133
134  def __reversed__(self):
135    """Returns a reverse-direction token iterator."""
136    node = self
137    while node:
138      yield node
139      node = node.previous
140