Searched refs:tokens (Results 1 - 25 of 796) sorted by relevance

1234567891011>>

/external/python/cpython2/Lib/plat-irix5/
H A Dpanelparser.py18 # Return a list of tokens (strings).
21 tokens = []
36 tokens.append(s[:i])
39 tokens.append(c)
47 tokens.append(s[:i])
49 return tokens
53 # Return a list of tokens (strings).
56 tokens = []
60 tokens = tokens
[all...]
/external/python/cpython2/Lib/plat-irix6/
H A Dpanelparser.py18 # Return a list of tokens (strings).
21 tokens = []
36 tokens.append(s[:i])
39 tokens.append(c)
47 tokens.append(s[:i])
49 return tokens
53 # Return a list of tokens (strings).
56 tokens = []
60 tokens = tokens
[all...]
/external/antlr/antlr-3.4/runtime/Python/tests/
H A Dt048rewrite.py25 tokens = self._parse("abc")
26 tokens.insertBefore(0, "0")
28 result = tokens.toString()
34 tokens = self._parse("abc")
35 tokens.insertAfter(2, "x")
37 result = tokens.toString()
43 tokens = self._parse("abc")
44 tokens.insertBefore(1, "x")
45 tokens.insertAfter(1, "x")
47 result = tokens
[all...]
/external/chromium-trace/catapult/common/py_vulcanize/py_vulcanize/
H A Dstrip_js_comments_unittest.py29 tokens = list(strip_js_comments._TokenizeJS(''))
30 self.assertEquals([], tokens)
33 tokens = list(strip_js_comments._TokenizeJS('\n'))
34 self.assertEquals(['\n'], tokens)
37 tokens = list(strip_js_comments._TokenizeJS('A // foo'))
38 self.assertEquals(['A ', '//', ' foo'], tokens)
41 tokens = list(strip_js_comments._TokenizeJS('A // foo\nbar'))
42 self.assertEquals(['A ', '//', ' foo', '\n', 'bar'], tokens)
45 tokens = list(strip_js_comments._TokenizeJS('A /* foo */'))
46 self.assertEquals(['A ', '/*', ' foo ', '*/'], tokens)
[all...]
/external/mesa3d/src/compiler/glsl/glcpp/tests/
H A D000-content-with-spaces.c1 this is four tokens with spaces
/external/antlr/antlr-3.4/runtime/Python/antlr3/
H A D__init__.py152 from tokens import *
/external/mesa3d/src/gallium/tests/graw/
H A Ddisasm.c30 * Small utility to disassemble a memory dump of TGSI tokens.
35 * (gdb) tgsi_dump state->tokens
60 struct tgsi_token *tokens; local
66 tokens = malloc(max_tokens * sizeof *tokens);
67 fread(tokens, sizeof *tokens, max_tokens, fp);
69 tgsi_dump(tokens, 0);
71 free(tokens);
/external/lisa/libs/utils/android/
H A Dgfxinfo.py60 tokens = line.split(':')
62 tokens = [t.strip() for t in tokens]
63 tokens[0] = tokens[0].replace(' ', '_').lower()
66 if tokens[0] == 'janky_frames':
67 (frames, pc) = tokens[1].split(' ')
73 if tokens[1].endswith('ms'):
74 tokens[0] = tokens[
[all...]
/external/mesa3d/src/gallium/targets/graw-null/
H A Dgraw_util.c19 struct tgsi_token tokens[1024]; local
22 if (!tgsi_text_translate(text, tokens, ARRAY_SIZE(tokens)))
26 state.tokens = tokens;
34 struct tgsi_token tokens[1024]; local
37 if (!tgsi_text_translate(text, tokens, ARRAY_SIZE(tokens)))
41 state.tokens = tokens;
49 struct tgsi_token tokens[1024]; local
[all...]
/external/clang/bindings/python/tests/cindex/
H A Dtest_tokens.py15 tokens = list(tu.get_tokens(extent=r))
17 assert len(tokens) == 5
18 assert tokens[1].spelling == 'i'
19 assert tokens[1].kind == TokenKind.IDENTIFIER
21 cursor = tokens[1].cursor
23 assert tokens[1].cursor == tokens[2].cursor
31 tokens = list(tu.get_tokens(extent=r))
32 eq_(len(tokens), 4)
34 loc = tokens[
[all...]
/external/antlr/antlr-3.4/tool/src/test/java/org/antlr/test/
H A DTestCommonTokenStream.java52 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine);
54 String result = tokens.LT(1).getText();
73 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine);
75 String result = tokens.LT(2).getText();
94 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine);
97 Token t = tokens.LT(i);
100 t = tokens.LT(i);
102 tokens.LT(i++); // push it past end
103 tokens.LT(i++);
105 String result = tokens
[all...]
H A DTestTokenRewriteStream.java51 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
52 tokens.insertBefore(0, "0");
53 String result = tokens.toString();
66 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
67 tokens.insertAfter(2, "x");
68 String result = tokens.toString();
81 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
82 tokens.fill();
83 tokens.insertBefore(1, "x");
84 tokens
[all...]
/external/mesa3d/src/gallium/auxiliary/tgsi/
H A Dtgsi_emulate.h36 tgsi_emulate(const struct tgsi_token *tokens, unsigned flags);
H A Dtgsi_sanity.h46 const struct tgsi_token *tokens );
H A Dtgsi_text.h42 struct tgsi_token *tokens,
/external/mesa3d/src/mesa/state_tracker/
H A Dst_tgsi_lower_yuv.h29 extern const struct tgsi_token * st_tgsi_lower_yuv(const struct tgsi_token *tokens,
/external/google-breakpad/src/processor/
H A Dtokenize.cc48 vector<char*> *tokens) {
49 tokens->clear();
50 tokens->reserve(max_tokens);
54 // Split tokens on the separator character.
59 tokens->push_back(token);
66 tokens->push_back(token);
69 return tokens->size() == static_cast<unsigned int>(max_tokens);
45 Tokenize(char *line, const char *separators, int max_tokens, vector<char*> *tokens) argument
/external/ply/ply/test/
H A Dlex_empty.py10 tokens = [ variable
H A Dyacc_rr_unused.py12 tokens = ('A', 'B', 'C') variable
H A Dlex_many_tokens.py3 # Test lex's ability to handle a large number of tokens (beyond the
11 tokens = ["TOK%d" % i for i in range(1000)] variable
13 for tok in tokens:
/external/python/cpython3/Tools/scripts/
H A Dabitype.py16 for t,v in tokens:
40 if tokens[pos][1] == 'static':
45 name = tokens[pos][1]
47 while tokens[pos][1] != '{':
51 while tokens[pos][0] in ('ws', 'comment'):
53 if tokens[pos][1] != 'PyVarObject_HEAD_INIT':
55 while tokens[pos][1] != ')':
58 # field definitions: various tokens, comma-separated
61 while tokens[pos][0] in ('ws', 'comment'):
64 while tokens[en
169 tokens = [] variable
[all...]
/external/antlr/antlr-3.4/runtime/Python/unittests/
H A Dtestrecognizers.py33 self.tokens = [
42 return self.tokens.pop(0)
46 tokens = []
48 tokens.append(token.type)
50 self.failUnlessEqual(tokens, [1, 2, 3, 4])
/external/antlr/antlr-3.4/runtime/JavaScript/tests/functional/
H A Drhino-python.extensions33 tokens should be sent to the parser sometimes without a corresponding
48 This TokenStream normally just passes tokens through to the parser.
63 A queue of tokens is built up to hold multiple DEDENT tokens that
77 /** The queue of tokens */
78 this.tokens = [];
117 if (this.tokens.length>0 ) {
118 var t = this.tokens[0];
119 this.tokens.splice(0,1);
137 this.tokens
[all...]
/external/snakeyaml/src/test/java/org/pyyaml/
H A DCanonicalScanner.java26 import org.yaml.snakeyaml.tokens.AliasToken;
27 import org.yaml.snakeyaml.tokens.AnchorToken;
28 import org.yaml.snakeyaml.tokens.DirectiveToken;
29 import org.yaml.snakeyaml.tokens.DocumentStartToken;
30 import org.yaml.snakeyaml.tokens.FlowEntryToken;
31 import org.yaml.snakeyaml.tokens.FlowMappingEndToken;
32 import org.yaml.snakeyaml.tokens.FlowMappingStartToken;
33 import org.yaml.snakeyaml.tokens.FlowSequenceEndToken;
34 import org.yaml.snakeyaml.tokens.FlowSequenceStartToken;
35 import org.yaml.snakeyaml.tokens
52 public ArrayList<Token> tokens; field in class:CanonicalScanner
[all...]
/external/webp/src/utils/
H A Dhuffman_encode_utils.c262 HuffmanTreeToken* tokens,
266 tokens->code = value;
267 tokens->extra_bits = 0;
268 ++tokens;
275 tokens->code = value;
276 tokens->extra_bits = 0;
277 ++tokens;
281 tokens->code = 16;
282 tokens->extra_bits = repetitions - 3;
283 ++tokens;
261 CodeRepeatedValues(int repetitions, HuffmanTreeToken* tokens, int value, int prev_value) argument
295 CodeRepeatedZeros(int repetitions, HuffmanTreeToken* tokens) argument
326 VP8LCreateCompressedHuffmanTree(const HuffmanTreeCode* const tree, HuffmanTreeToken* tokens, int max_tokens) argument
[all...]

Completed in 554 milliseconds

1234567891011>>