1# Copyright 2014 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5import imp
6import os.path
7import sys
8import unittest
9
10def _GetDirAbove(dirname):
11  """Returns the directory "above" this file containing |dirname| (which must
12  also be "above" this file)."""
13  path = os.path.abspath(__file__)
14  while True:
15    path, tail = os.path.split(path)
16    assert tail
17    if tail == dirname:
18      return path
19
20try:
21  imp.find_module("ply")
22except ImportError:
23  sys.path.append(os.path.join(_GetDirAbove("mojo"), "third_party"))
24from ply import lex
25
26try:
27  imp.find_module("mojom")
28except ImportError:
29  sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
30import mojom.parse.lexer
31
32
33# This (monkey-patching LexToken to make comparison value-based) is evil, but
34# we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
35# for object identity.)
36def _LexTokenEq(self, other):
37  return self.type == other.type and self.value == other.value and \
38         self.lineno == other.lineno and self.lexpos == other.lexpos
39setattr(lex.LexToken, '__eq__', _LexTokenEq)
40
41
42def _MakeLexToken(token_type, value, lineno=1, lexpos=0):
43  """Makes a LexToken with the given parameters. (Note that lineno is 1-based,
44  but lexpos is 0-based.)"""
45  rv = lex.LexToken()
46  rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
47  return rv
48
49
50def _MakeLexTokenForKeyword(keyword, **kwargs):
51  """Makes a LexToken for the given keyword."""
52  return _MakeLexToken(keyword.upper(), keyword.lower(), **kwargs)
53
54
55class LexerTest(unittest.TestCase):
56  """Tests |mojom.parse.lexer.Lexer|."""
57
58  def __init__(self, *args, **kwargs):
59    unittest.TestCase.__init__(self, *args, **kwargs)
60    # Clone all lexer instances from this one, since making a lexer is slow.
61    self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom"))
62
63  def testValidKeywords(self):
64    """Tests valid keywords."""
65    self.assertEquals(self._SingleTokenForInput("handle"),
66                      _MakeLexTokenForKeyword("handle"))
67    self.assertEquals(self._SingleTokenForInput("import"),
68                      _MakeLexTokenForKeyword("import"))
69    self.assertEquals(self._SingleTokenForInput("module"),
70                      _MakeLexTokenForKeyword("module"))
71    self.assertEquals(self._SingleTokenForInput("struct"),
72                      _MakeLexTokenForKeyword("struct"))
73    self.assertEquals(self._SingleTokenForInput("interface"),
74                      _MakeLexTokenForKeyword("interface"))
75    self.assertEquals(self._SingleTokenForInput("enum"),
76                      _MakeLexTokenForKeyword("enum"))
77    self.assertEquals(self._SingleTokenForInput("const"),
78                      _MakeLexTokenForKeyword("const"))
79    self.assertEquals(self._SingleTokenForInput("true"),
80                      _MakeLexTokenForKeyword("true"))
81    self.assertEquals(self._SingleTokenForInput("false"),
82                      _MakeLexTokenForKeyword("false"))
83    self.assertEquals(self._SingleTokenForInput("default"),
84                      _MakeLexTokenForKeyword("default"))
85
86  def testValidIdentifiers(self):
87    """Tests identifiers."""
88    self.assertEquals(self._SingleTokenForInput("abcd"),
89                      _MakeLexToken("NAME", "abcd"))
90    self.assertEquals(self._SingleTokenForInput("AbC_d012_"),
91                      _MakeLexToken("NAME", "AbC_d012_"))
92    self.assertEquals(self._SingleTokenForInput("_0123"),
93                      _MakeLexToken("NAME", "_0123"))
94
95  def testInvalidIdentifiers(self):
96    with self.assertRaisesRegexp(
97        mojom.parse.lexer.LexError,
98        r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
99      self._TokensForInput("$abc")
100    with self.assertRaisesRegexp(
101        mojom.parse.lexer.LexError,
102        r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
103      self._TokensForInput("a$bc")
104
105  def testDecimalIntegerConstants(self):
106    self.assertEquals(self._SingleTokenForInput("0"),
107                      _MakeLexToken("INT_CONST_DEC", "0"))
108    self.assertEquals(self._SingleTokenForInput("1"),
109                      _MakeLexToken("INT_CONST_DEC", "1"))
110    self.assertEquals(self._SingleTokenForInput("123"),
111                      _MakeLexToken("INT_CONST_DEC", "123"))
112    self.assertEquals(self._SingleTokenForInput("10"),
113                      _MakeLexToken("INT_CONST_DEC", "10"))
114
115  def testValidTokens(self):
116    """Tests valid tokens (which aren't tested elsewhere)."""
117    # Keywords tested in |testValidKeywords|.
118    # NAME tested in |testValidIdentifiers|.
119    self.assertEquals(self._SingleTokenForInput("@123"),
120                      _MakeLexToken("ORDINAL", "@123"))
121    self.assertEquals(self._SingleTokenForInput("456"),
122                      _MakeLexToken("INT_CONST_DEC", "456"))
123    self.assertEquals(self._SingleTokenForInput("0x01aB2eF3"),
124                      _MakeLexToken("INT_CONST_HEX", "0x01aB2eF3"))
125    self.assertEquals(self._SingleTokenForInput("123.456"),
126                      _MakeLexToken("FLOAT_CONST", "123.456"))
127    self.assertEquals(self._SingleTokenForInput("\"hello\""),
128                      _MakeLexToken("STRING_LITERAL", "\"hello\""))
129    self.assertEquals(self._SingleTokenForInput("+"),
130                      _MakeLexToken("PLUS", "+"))
131    self.assertEquals(self._SingleTokenForInput("-"),
132                      _MakeLexToken("MINUS", "-"))
133    self.assertEquals(self._SingleTokenForInput("&"),
134                      _MakeLexToken("AMP", "&"))
135    self.assertEquals(self._SingleTokenForInput("?"),
136                      _MakeLexToken("QSTN", "?"))
137    self.assertEquals(self._SingleTokenForInput("="),
138                      _MakeLexToken("EQUALS", "="))
139    self.assertEquals(self._SingleTokenForInput("=>"),
140                      _MakeLexToken("RESPONSE", "=>"))
141    self.assertEquals(self._SingleTokenForInput("("),
142                      _MakeLexToken("LPAREN", "("))
143    self.assertEquals(self._SingleTokenForInput(")"),
144                      _MakeLexToken("RPAREN", ")"))
145    self.assertEquals(self._SingleTokenForInput("["),
146                      _MakeLexToken("LBRACKET", "["))
147    self.assertEquals(self._SingleTokenForInput("]"),
148                      _MakeLexToken("RBRACKET", "]"))
149    self.assertEquals(self._SingleTokenForInput("{"),
150                      _MakeLexToken("LBRACE", "{"))
151    self.assertEquals(self._SingleTokenForInput("}"),
152                      _MakeLexToken("RBRACE", "}"))
153    self.assertEquals(self._SingleTokenForInput("<"),
154                      _MakeLexToken("LANGLE", "<"))
155    self.assertEquals(self._SingleTokenForInput(">"),
156                      _MakeLexToken("RANGLE", ">"))
157    self.assertEquals(self._SingleTokenForInput(";"),
158                      _MakeLexToken("SEMI", ";"))
159    self.assertEquals(self._SingleTokenForInput(","),
160                      _MakeLexToken("COMMA", ","))
161    self.assertEquals(self._SingleTokenForInput("."),
162                      _MakeLexToken("DOT", "."))
163
164  def _TokensForInput(self, input_string):
165    """Gets a list of tokens for the given input string."""
166    lexer = self._zygote_lexer.clone()
167    lexer.input(input_string)
168    rv = []
169    while True:
170      tok = lexer.token()
171      if not tok:
172        return rv
173      rv.append(tok)
174
175  def _SingleTokenForInput(self, input_string):
176    """Gets the single token for the given input string. (Raises an exception if
177    the input string does not result in exactly one token.)"""
178    toks = self._TokensForInput(input_string)
179    assert len(toks) == 1
180    return toks[0]
181
182
183if __name__ == "__main__":
184  unittest.main()
185