1#!/usr/bin/env python
2#
3# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9#      http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS-IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16
17"""Token utility functions."""
18
19__author__ = ('robbyw@google.com (Robert Walker)',
20              'ajp@google.com (Andy Perelson)')
21
22import copy
23
24from closure_linter import javascripttokens
25from closure_linter.common import tokens
26
27# Shorthand
28JavaScriptToken = javascripttokens.JavaScriptToken
29Type = tokens.TokenType
30
31
32def GetFirstTokenInSameLine(token):
33  """Returns the first token in the same line as token.
34
35  Args:
36    token: Any token in the line.
37
38  Returns:
39    The first token in the same line as token.
40  """
41  while not token.IsFirstInLine():
42    token = token.previous
43  return token
44
45
46def GetFirstTokenInPreviousLine(token):
47  """Returns the first token in the previous line as token.
48
49  Args:
50    token: Any token in the line.
51
52  Returns:
53    The first token in the previous line as token, or None if token is on the
54    first line.
55  """
56  first_in_line = GetFirstTokenInSameLine(token)
57  if first_in_line.previous:
58    return GetFirstTokenInSameLine(first_in_line.previous)
59
60  return None
61
62
63def GetLastTokenInSameLine(token):
64  """Returns the last token in the same line as token.
65
66  Args:
67    token: Any token in the line.
68
69  Returns:
70    The last token in the same line as token.
71  """
72  while not token.IsLastInLine():
73    token = token.next
74  return token
75
76
77def GetAllTokensInSameLine(token):
78  """Returns all tokens in the same line as the given token.
79
80  Args:
81    token: Any token in the line.
82
83  Returns:
84    All tokens on the same line as the given token.
85  """
86  first_token = GetFirstTokenInSameLine(token)
87  last_token = GetLastTokenInSameLine(token)
88
89  tokens_in_line = []
90  while first_token != last_token:
91    tokens_in_line.append(first_token)
92    first_token = first_token.next
93  tokens_in_line.append(last_token)
94
95  return tokens_in_line
96
97
98def CustomSearch(start_token, func, end_func=None, distance=None,
99                 reverse=False):
100  """Returns the first token where func is True within distance of this token.
101
102  Args:
103    start_token: The token to start searching from
104    func: The function to call to test a token for applicability
105    end_func: The function to call to test a token to determine whether to abort
106          the search.
107    distance: The number of tokens to look through before failing search.  Must
108        be positive.  If unspecified, will search until the end of the token
109        chain
110    reverse: When true, search the tokens before this one instead of the tokens
111        after it
112
113  Returns:
114    The first token matching func within distance of this token, or None if no
115    such token is found.
116  """
117  token = start_token
118  if reverse:
119    while token and (distance is None or distance > 0):
120      previous = token.previous
121      if previous:
122        if func(previous):
123          return previous
124        if end_func and end_func(previous):
125          return None
126
127      token = previous
128      if distance is not None:
129        distance -= 1
130
131  else:
132    while token and (distance is None or distance > 0):
133      next_token = token.next
134      if next_token:
135        if func(next_token):
136          return next_token
137        if end_func and end_func(next_token):
138          return None
139
140      token = next_token
141      if distance is not None:
142        distance -= 1
143
144  return None
145
146
147def Search(start_token, token_types, distance=None, reverse=False):
148  """Returns the first token of type in token_types within distance.
149
150  Args:
151    start_token: The token to start searching from
152    token_types: The allowable types of the token being searched for
153    distance: The number of tokens to look through before failing search.  Must
154        be positive.  If unspecified, will search until the end of the token
155        chain
156    reverse: When true, search the tokens before this one instead of the tokens
157        after it
158
159  Returns:
160    The first token of any type in token_types within distance of this token, or
161    None if no such token is found.
162  """
163  return CustomSearch(start_token, lambda token: token.IsAnyType(token_types),
164                      None, distance, reverse)
165
166
167def SearchExcept(start_token, token_types, distance=None, reverse=False):
168  """Returns the first token not of any type in token_types within distance.
169
170  Args:
171    start_token: The token to start searching from
172    token_types: The unallowable types of the token being searched for
173    distance: The number of tokens to look through before failing search.  Must
174        be positive.  If unspecified, will search until the end of the token
175        chain
176    reverse: When true, search the tokens before this one instead of the tokens
177        after it
178
179  Returns:
180    The first token of any type in token_types within distance of this token, or
181    None if no such token is found.
182  """
183  return CustomSearch(start_token,
184                      lambda token: not token.IsAnyType(token_types),
185                      None, distance, reverse)
186
187
188def SearchUntil(start_token, token_types, end_types, distance=None,
189                reverse=False):
190  """Returns the first token of type in token_types before a token of end_type.
191
192  Args:
193    start_token: The token to start searching from.
194    token_types: The allowable types of the token being searched for.
195    end_types: Types of tokens to abort search if we find.
196    distance: The number of tokens to look through before failing search.  Must
197        be positive.  If unspecified, will search until the end of the token
198        chain
199    reverse: When true, search the tokens before this one instead of the tokens
200        after it
201
202  Returns:
203    The first token of any type in token_types within distance of this token
204    before any tokens of type in end_type, or None if no such token is found.
205  """
206  return CustomSearch(start_token, lambda token: token.IsAnyType(token_types),
207                      lambda token: token.IsAnyType(end_types),
208                      distance, reverse)
209
210
211def DeleteToken(token):
212  """Deletes the given token from the linked list.
213
214  Args:
215    token: The token to delete
216  """
217  if token.previous:
218    token.previous.next = token.next
219
220  if token.next:
221    token.next.previous = token.previous
222
223    following_token = token.next
224    while following_token and following_token.metadata.last_code == token:
225      following_token.metadata.last_code = token.metadata.last_code
226      following_token = following_token.next
227
228
229def DeleteTokens(token, token_count):
230  """Deletes the given number of tokens starting with the given token.
231
232  Args:
233    token: The token to start deleting at.
234    token_count: The total number of tokens to delete.
235  """
236  for i in xrange(1, token_count):
237    DeleteToken(token.next)
238  DeleteToken(token)
239
240
241def InsertTokenAfter(new_token, token):
242  """Insert new_token after token.
243
244  Args:
245    new_token: A token to be added to the stream
246    token: A token already in the stream
247  """
248  new_token.previous = token
249  new_token.next = token.next
250
251  new_token.metadata = copy.copy(token.metadata)
252
253  if token.IsCode():
254    new_token.metadata.last_code = token
255
256  if new_token.IsCode():
257    following_token = token.next
258    while following_token and following_token.metadata.last_code == token:
259      following_token.metadata.last_code = new_token
260      following_token = following_token.next
261
262  token.next = new_token
263  if new_token.next:
264    new_token.next.previous = new_token
265
266  if new_token.start_index is None:
267    if new_token.line_number == token.line_number:
268      new_token.start_index = token.start_index + len(token.string)
269    else:
270      new_token.start_index = 0
271
272    iterator = new_token.next
273    while iterator and iterator.line_number == new_token.line_number:
274      iterator.start_index += len(new_token.string)
275      iterator = iterator.next
276
277
278def InsertTokensAfter(new_tokens, token):
279  """Insert multiple tokens after token.
280
281  Args:
282    new_tokens: An array of tokens to be added to the stream
283    token: A token already in the stream
284  """
285  # TODO(user): It would be nicer to have InsertTokenAfter defer to here
286  # instead of vice-versa.
287  current_token = token
288  for new_token in new_tokens:
289    InsertTokenAfter(new_token, current_token)
290    current_token = new_token
291
292
293def InsertSpaceTokenAfter(token):
294  """Inserts a space token after the given token.
295
296  Args:
297    token: The token to insert a space token after
298
299  Returns:
300    A single space token
301  """
302  space_token = JavaScriptToken(' ', Type.WHITESPACE, token.line,
303                                token.line_number)
304  InsertTokenAfter(space_token, token)
305
306
307def InsertBlankLineAfter(token):
308  """Inserts a blank line after the given token.
309
310  Args:
311    token: The token to insert a blank line after
312
313  Returns:
314    A single space token
315  """
316  blank_token = JavaScriptToken('', Type.BLANK_LINE, '',
317                                token.line_number + 1)
318  InsertLineAfter(token, [blank_token])
319
320
321def InsertLineAfter(token, new_tokens):
322  """Inserts a new line consisting of new_tokens after the given token.
323
324  Args:
325    token: The token to insert after.
326    new_tokens: The tokens that will make up the new line.
327  """
328  insert_location = token
329  for new_token in new_tokens:
330    InsertTokenAfter(new_token, insert_location)
331    insert_location = new_token
332
333  # Update all subsequent line numbers.
334  next_token = new_tokens[-1].next
335  while next_token:
336    next_token.line_number += 1
337    next_token = next_token.next
338
339
340def SplitToken(token, position):
341  """Splits the token into two tokens at position.
342
343  Args:
344    token: The token to split
345    position: The position to split at. Will be the beginning of second token.
346
347  Returns:
348    The new second token.
349  """
350  new_string = token.string[position:]
351  token.string = token.string[:position]
352
353  new_token = JavaScriptToken(new_string, token.type, token.line,
354                              token.line_number)
355  InsertTokenAfter(new_token, token)
356
357  return new_token
358
359
360def Compare(token1, token2):
361  """Compares two tokens and determines their relative order.
362
363  Args:
364    token1: The first token to compare.
365    token2: The second token to compare.
366
367  Returns:
368    A negative integer, zero, or a positive integer as the first token is
369    before, equal, or after the second in the token stream.
370  """
371  if token2.line_number != token1.line_number:
372    return token1.line_number - token2.line_number
373  else:
374    return token1.start_index - token2.start_index
375