Searched refs:tokens (Results 1 - 25 of 599) sorted by relevance

1234567891011>>

/external/mesa3d/src/glsl/glcpp/tests/
H A D000-content-with-spaces.c1 this is four tokens
/external/antlr/antlr-3.4/runtime/Python/tests/
H A Dt048rewrite.py25 tokens = self._parse("abc")
26 tokens.insertBefore(0, "0")
28 result = tokens.toString()
34 tokens = self._parse("abc")
35 tokens.insertAfter(2, "x")
37 result = tokens.toString()
43 tokens = self._parse("abc")
44 tokens.insertBefore(1, "x")
45 tokens.insertAfter(1, "x")
47 result = tokens
[all...]
/external/chromium-trace/catapult/third_party/py_vulcanize/py_vulcanize/
H A Dstrip_js_comments_unittest.py29 tokens = list(strip_js_comments._TokenizeJS(''))
30 self.assertEquals([], tokens)
33 tokens = list(strip_js_comments._TokenizeJS('\n'))
34 self.assertEquals(['\n'], tokens)
37 tokens = list(strip_js_comments._TokenizeJS('A // foo'))
38 self.assertEquals(['A ', '//', ' foo'], tokens)
41 tokens = list(strip_js_comments._TokenizeJS('A // foo\nbar'))
42 self.assertEquals(['A ', '//', ' foo', '\n', 'bar'], tokens)
45 tokens = list(strip_js_comments._TokenizeJS('A /* foo */'))
46 self.assertEquals(['A ', '/*', ' foo ', '*/'], tokens)
[all...]
/external/antlr/antlr-3.4/runtime/Python/antlr3/
H A D__init__.py152 from tokens import *
/external/mesa3d/src/gallium/targets/graw-null/
H A Dgraw_util.c18 struct tgsi_token tokens[1024]; local
21 if (!tgsi_text_translate(text, tokens, Elements(tokens)))
24 state.tokens = tokens;
32 struct tgsi_token tokens[1024]; local
35 if (!tgsi_text_translate(text, tokens, Elements(tokens)))
38 state.tokens = tokens;
46 struct tgsi_token tokens[1024]; local
[all...]
/external/clang/bindings/python/tests/cindex/
H A Dtest_tokens.py15 tokens = list(tu.get_tokens(extent=r))
17 assert len(tokens) == 5
18 assert tokens[1].spelling == 'i'
19 assert tokens[1].kind == TokenKind.IDENTIFIER
21 cursor = tokens[1].cursor
23 assert tokens[1].cursor == tokens[2].cursor
31 tokens = list(tu.get_tokens(extent=r))
32 eq_(len(tokens), 4)
34 loc = tokens[
[all...]
/external/antlr/antlr-3.4/tool/src/test/java/org/antlr/test/
H A DTestCommonTokenStream.java52 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine);
54 String result = tokens.LT(1).getText();
73 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine);
75 String result = tokens.LT(2).getText();
94 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine);
97 Token t = tokens.LT(i);
100 t = tokens.LT(i);
102 tokens.LT(i++); // push it past end
103 tokens.LT(i++);
105 String result = tokens
[all...]
H A DTestTokenRewriteStream.java51 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
52 tokens.insertBefore(0, "0");
53 String result = tokens.toString();
66 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
67 tokens.insertAfter(2, "x");
68 String result = tokens.toString();
81 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
82 tokens.fill();
83 tokens.insertBefore(1, "x");
84 tokens
[all...]
/external/mesa3d/src/gallium/auxiliary/tgsi/
H A Dtgsi_sanity.h46 const struct tgsi_token *tokens );
H A Dtgsi_build.h62 struct tgsi_token *tokens,
76 struct tgsi_token *tokens,
90 struct tgsi_token *tokens,
107 struct tgsi_token *tokens,
H A Dtgsi_text.h42 struct tgsi_token *tokens,
/external/mesa3d/src/gallium/drivers/r300/compiler/tests/
H A Drc_test_helpers.c95 struct src_tokens tokens; local
106 tokens.Negate.String = src_str + matches[1].rm_so;
107 tokens.Negate.Length = match_length(matches, 1);
108 tokens.Abs.String = src_str + matches[2].rm_so;
109 tokens.Abs.Length = match_length(matches, 2);
110 tokens.File.String = src_str + matches[3].rm_so;
111 tokens.File.Length = match_length(matches, 3);
112 tokens.Index.String = src_str + matches[4].rm_so;
113 tokens.Index.Length = match_length(matches, 4);
114 tokens
222 struct dst_tokens tokens; local
321 struct inst_tokens tokens; local
[all...]
/external/chromium-trace/catapult/third_party/polymer/components/web-animations-js/src/
H A Dposition-handler.js40 var tokens = result[0];
41 tokens[0] = tokens[0] || 'center';
42 tokens[1] = tokens[1] || 'center';
44 tokens[2] = tokens[2] || {px: 0};
46 if (tokens.length != slots) {
50 if (/top|bottom/.test(tokens[0]) || /left|right/.test(tokens[
[all...]
/external/google-breakpad/src/processor/
H A Dtokenize.cc48 vector<char*> *tokens) {
49 tokens->clear();
50 tokens->reserve(max_tokens);
54 // Split tokens on the separator character.
59 tokens->push_back(token);
66 tokens->push_back(token);
69 return tokens->size() == static_cast<unsigned int>(max_tokens);
45 Tokenize(char *line, const char *separators, int max_tokens, vector<char*> *tokens) argument
/external/antlr/antlr-3.4/runtime/Python/unittests/
H A Dtestrecognizers.py33 self.tokens = [
42 return self.tokens.pop(0)
46 tokens = []
48 tokens.append(token.type)
50 self.failUnlessEqual(tokens, [1, 2, 3, 4])
/external/antlr/antlr-3.4/runtime/JavaScript/tests/functional/
H A Drhino-python.extensions33 tokens should be sent to the parser sometimes without a corresponding
48 This TokenStream normally just passes tokens through to the parser.
63 A queue of tokens is built up to hold multiple DEDENT tokens that
77 /** The queue of tokens */
78 this.tokens = [];
117 if (this.tokens.length>0 ) {
118 var t = this.tokens[0];
119 this.tokens.splice(0,1);
137 this.tokens
[all...]
/external/snakeyaml/src/test/java/org/pyyaml/
H A DCanonicalScanner.java26 import org.yaml.snakeyaml.tokens.AliasToken;
27 import org.yaml.snakeyaml.tokens.AnchorToken;
28 import org.yaml.snakeyaml.tokens.DirectiveToken;
29 import org.yaml.snakeyaml.tokens.DocumentStartToken;
30 import org.yaml.snakeyaml.tokens.FlowEntryToken;
31 import org.yaml.snakeyaml.tokens.FlowMappingEndToken;
32 import org.yaml.snakeyaml.tokens.FlowMappingStartToken;
33 import org.yaml.snakeyaml.tokens.FlowSequenceEndToken;
34 import org.yaml.snakeyaml.tokens.FlowSequenceStartToken;
35 import org.yaml.snakeyaml.tokens
52 public ArrayList<Token> tokens; field in class:CanonicalScanner
[all...]
/external/opencv3/3rdparty/libwebp/utils/
H A Dhuffman_encode.c283 HuffmanTreeToken* tokens,
287 tokens->code = value;
288 tokens->extra_bits = 0;
289 ++tokens;
296 tokens->code = value;
297 tokens->extra_bits = 0;
298 ++tokens;
302 tokens->code = 16;
303 tokens->extra_bits = repetitions - 3;
304 ++tokens;
282 CodeRepeatedValues(int repetitions, HuffmanTreeToken* tokens, int value, int prev_value) argument
316 CodeRepeatedZeros(int repetitions, HuffmanTreeToken* tokens) argument
347 VP8LCreateCompressedHuffmanTree(const HuffmanTreeCode* const tree, HuffmanTreeToken* tokens, int max_tokens) argument
[all...]
/external/webp/src/utils/
H A Dhuffman_encode.c262 HuffmanTreeToken* tokens,
266 tokens->code = value;
267 tokens->extra_bits = 0;
268 ++tokens;
275 tokens->code = value;
276 tokens->extra_bits = 0;
277 ++tokens;
281 tokens->code = 16;
282 tokens->extra_bits = repetitions - 3;
283 ++tokens;
261 CodeRepeatedValues(int repetitions, HuffmanTreeToken* tokens, int value, int prev_value) argument
295 CodeRepeatedZeros(int repetitions, HuffmanTreeToken* tokens) argument
326 VP8LCreateCompressedHuffmanTree(const HuffmanTreeCode* const tree, HuffmanTreeToken* tokens, int max_tokens) argument
[all...]
/external/webp/src/enc/
H A Dtoken.c114 VP8TBuffer* const tokens) {
117 if (!AddToken(tokens, last >= 0, base_id + 0)) {
125 if (!AddToken(tokens, v != 0, base_id + 1)) {
129 if (!AddToken(tokens, v > 1, base_id + 2)) {
132 if (!AddToken(tokens, v > 4, base_id + 3)) {
133 if (AddToken(tokens, v != 2, base_id + 4))
134 AddToken(tokens, v == 4, base_id + 5);
135 } else if (!AddToken(tokens, v > 10, base_id + 6)) {
136 if (!AddToken(tokens, v > 6, base_id + 7)) {
137 AddConstantToken(tokens,
111 VP8RecordCoeffTokens(const int ctx, const int coeff_type, int first, int last, const int16_t* const coeffs, VP8TBuffer* const tokens) argument
232 const token_t* const tokens = TOKEN_DATA(p); local
258 const token_t* const tokens = TOKEN_DATA(p); local
[all...]
/external/antlr/antlr-3.4/runtime/CSharp2/Sources/Antlr3.Runtime/Antlr.Runtime/
H A DLegacyCommonTokenStream.cs43 * The most common stream of tokens is one where every token is buffered up
44 * and tokens are prefiltered for a certain channel (the parser will only
45 * see these tokens and cannot change the filter channel number during the
49 * <remarks>TODO: how to access the full token stream? How to track all tokens matched per rule?</remarks>
61 protected List<IToken> tokens; field in class:Antlr.Runtime.LegacyCommonTokenStream
66 /** <summary>Set<tokentype>; discard any tokens with this type</summary> */
69 /** <summary>Skip tokens on any channel but this one; this is how we skip whitespace...</summary> */
72 /** <summary>By default, track all incoming tokens</summary> */
79 * The index into the tokens list of the current token (next token
80 * to consume). p==-1 indicates that the tokens lis
[all...]
/external/antlr/antlr-3.4/runtime/CSharp3/Sources/Antlr3.Runtime/
H A DLegacyCommonTokenStream.cs41 * The most common stream of tokens is one where every token is buffered up
42 * and tokens are prefiltered for a certain channel (the parser will only
43 * see these tokens and cannot change the filter channel number during the
47 * <remarks>TODO: how to access the full token stream? How to track all tokens matched per rule?</remarks>
60 protected List<IToken> tokens; field in class:Antlr.Runtime.LegacyCommonTokenStream
65 /** <summary>Set<tokentype>; discard any tokens with this type</summary> */
68 /** <summary>Skip tokens on any channel but this one; this is how we skip whitespace...</summary> */
71 /** <summary>By default, track all incoming tokens</summary> */
78 * The index into the tokens list of the current token (next token
79 * to consume). p==-1 indicates that the tokens lis
[all...]
/external/antlr/antlr-3.4/runtime/Java/src/main/java/org/antlr/runtime/
H A DBufferedTokenStream.java35 /** Buffer all input tokens but do on-demand fetching of new tokens from
37 * proper lexing of future tokens. The ST template parser needs this,
43 * tokens to the parser. The stream can't ignore off-channel tokens.
47 * to confuse small moving window of tokens it uses for the full buffer.
57 protected List<Token> tokens = new ArrayList<Token>(100); field in class:BufferedTokenStream
62 /** The index into the tokens list of the current token (next token
63 * to consume). tokens[p] should be LT(1). p=-1 indicates need
108 public int size() { return tokens
[all...]
H A DLegacyCommonTokenStream.java32 /** The most common stream of tokens is one where every token is buffered up
33 * and tokens are prefiltered for a certain channel (the parser will only
34 * see these tokens and cannot change the filter channel number during the
37 * TODO: how to access the full token stream? How to track all tokens matched per rule?
45 protected List tokens; field in class:LegacyCommonTokenStream
50 /** Set<tokentype>; discard any tokens with this type */
53 /** Skip tokens on any channel but this one; this is how we skip whitespace... */
56 /** By default, track all incoming tokens */
64 /** The index into the tokens list of the current token (next token
65 * to consume). p==-1 indicates that the tokens lis
[all...]
/external/clang/test/Preprocessor/
H A Dmacro_paste_msextensions.c12 #define comment /##/ dead tokens live here
19 #define nested(x) int x comment cute little dead tokens...
21 nested(baz) rise of the dead tokens

Completed in 1289 milliseconds

1234567891011>>