/external/parameter-framework/upstream/test/tokenizer/ |
H A D | Test.cpp | 46 GIVEN ("A default tokenizer") { 49 Tokenizer tokenizer("a bcd ef"); 53 CHECK(tokenizer.split() == expected); 58 Tokenizer tokenizer(""); 62 CHECK(tokenizer.split() == expected); 67 Tokenizer tokenizer(" a \n\t bc "); 71 CHECK(tokenizer.split() == expected); 76 GIVEN ("A slash-separated string and tokenizer") { 77 Tokenizer tokenizer("/a/bcd/ef g/h/", "/"); 81 CHECK(tokenizer [all...] |
/external/chromium-trace/catapult/third_party/vinn/third_party/parse5/lib/tokenization/ |
H A D | location_info_mixin.js | 3 exports.assign = function (tokenizer) { 5 var tokenizerProto = Object.getPrototypeOf(tokenizer); 7 tokenizer.tokenStartLoc = -1; 10 tokenizer._attachLocationInfo = function (token) { 18 tokenizer._createStartTagToken = function (tagNameFirstCh) { 23 tokenizer._createEndTagToken = function (tagNameFirstCh) { 28 tokenizer._createCommentToken = function () { 33 tokenizer._createDoctypeToken = function (doctypeNameFirstCh) { 38 tokenizer._createCharacterToken = function (type, ch) { 44 tokenizer [all...] |
/external/jacoco/org.jacoco.examples/build/src/main/java/org/jacoco/examples/parser/ |
H A D | ExpressionParser.java | 30 private final StreamTokenizer tokenizer; field in class:ExpressionParser 33 tokenizer = new StreamTokenizer(new StringReader(s)); 34 tokenizer.ordinaryChar('('); 35 tokenizer.ordinaryChar(')'); 36 tokenizer.ordinaryChar('+'); 37 tokenizer.ordinaryChar('-'); 38 tokenizer.ordinaryChar('*'); 39 tokenizer.ordinaryChar('/'); 43 tokenizer.nextToken(); 82 e = new Const(tokenizer [all...] |
/external/antlr/antlr-3.4/runtime/CSharp2/Sources/Antlr3.Runtime/Antlr.Runtime.Tree/ |
H A D | TreePatternParser.cs | 37 protected TreePatternLexer tokenizer; field in class:Antlr.Runtime.Tree.TreePatternParser 42 public TreePatternParser(TreePatternLexer tokenizer, TreeWizard wizard, ITreeAdaptor adaptor) { argument 43 this.tokenizer = tokenizer; 46 ttype = tokenizer.NextToken(); // kickstart 66 ttype = tokenizer.NextToken(); 90 ttype = tokenizer.NextToken(); 98 ttype = tokenizer.NextToken(); 102 label = tokenizer.sval.ToString(); 103 ttype = tokenizer [all...] |
/external/antlr/antlr-3.4/runtime/CSharp3/Sources/Antlr3.Runtime/Tree/ |
H A D | TreePatternParser.cs | 39 protected TreePatternLexer tokenizer; field in class:Antlr.Runtime.Tree.TreePatternParser 44 public TreePatternParser( TreePatternLexer tokenizer, TreeWizard wizard, ITreeAdaptor adaptor ) argument 46 this.tokenizer = tokenizer; 49 ttype = tokenizer.NextToken(); // kickstart 75 ttype = tokenizer.NextToken(); 105 ttype = tokenizer.NextToken(); 115 ttype = tokenizer.NextToken(); 120 label = tokenizer.sval.ToString(); 121 ttype = tokenizer [all...] |
/external/deqp/framework/opengl/ |
H A D | gluVarTypeUtil.cpp | 92 VarTokenizer tokenizer(nameWithPath); 93 TCU_CHECK(tokenizer.getToken() == VarTokenizer::TOKEN_IDENTIFIER); 94 return tokenizer.getIdentifier(); 99 VarTokenizer tokenizer(nameWithPath); 101 if (tokenizer.getToken() == VarTokenizer::TOKEN_IDENTIFIER) 102 tokenizer.advance(); 105 while (tokenizer.getToken() != VarTokenizer::TOKEN_END) 109 if (tokenizer.getToken() == VarTokenizer::TOKEN_PERIOD) 111 tokenizer.advance(); 112 TCU_CHECK(tokenizer [all...] |
/external/doclava/src/com/google/doclava/apicheck/ |
H A D | ApiFile.java | 70 final Tokenizer tokenizer = new Tokenizer(filename, (new String(buf, 0, size)).toCharArray()); 74 String token = tokenizer.getToken(); 79 parsePackage(api, tokenizer); 81 throw new ApiParseException("expected package got " + token, tokenizer.getLine()); 91 private static void parsePackage(ApiInfo api, Tokenizer tokenizer) argument 97 token = tokenizer.requireToken(); 98 assertIdent(tokenizer, token); 100 pkg = new PackageInfo(name, tokenizer.pos()); 101 token = tokenizer.requireToken(); 103 throw new ApiParseException("expected '{' got " + token, tokenizer 116 parseClass(ApiInfo api, PackageInfo pkg, Tokenizer tokenizer, String token) argument 228 parseConstructor(Tokenizer tokenizer, ClassInfo cl, String token) argument 279 parseMethod(Tokenizer tokenizer, ClassInfo cl, String token) argument 366 parseField(Tokenizer tokenizer, ClassInfo cl, String token, boolean isEnum) argument 491 parseTypeParameterList(Tokenizer tokenizer, List<TypeInfo> methodTypeParameters, ClassInfo cl) argument 530 parseParameterList(Tokenizer tokenizer, AbstractMethodInfo method, HashSet<String> typeParameters, String token) argument 567 parseThrows(Tokenizer tokenizer, AbstractMethodInfo method) argument 599 assertIdent(Tokenizer tokenizer, String token) argument [all...] |
/external/apache-xml/src/main/java/org/apache/xml/utils/ |
H A D | StylesheetPIHandler.java | 152 StringTokenizer tokenizer = new StringTokenizer(data, " \t=\n", true); 157 while (tokenizer.hasMoreTokens()) 160 token = tokenizer.nextToken(); 163 if (tokenizer.hasMoreTokens() && 170 token = tokenizer.nextToken(); 171 while (tokenizer.hasMoreTokens() && 173 token = tokenizer.nextToken(); 179 token = tokenizer.nextToken(); 180 while (tokenizer.hasMoreTokens() && 182 token = tokenizer [all...] |
/external/protobuf/src/google/protobuf/io/ |
H A D | tokenizer_unittest.cc | 40 #include <google/protobuf/io/tokenizer.h> 249 // Set up the tokenizer. 254 Tokenizer tokenizer(&input, &error_collector); 257 EXPECT_EQ(Tokenizer::TYPE_START, tokenizer.current().type); 258 EXPECT_EQ("", tokenizer.current().text); 259 EXPECT_EQ(0, tokenizer.current().line); 260 EXPECT_EQ(0, tokenizer.current().column); 261 EXPECT_EQ(0, tokenizer.current().end_column); 264 ASSERT_TRUE(tokenizer.Next()); 267 EXPECT_EQ(kSimpleTokenCases_case.type, tokenizer [all...] |
/external/antlr/antlr-3.4/runtime/Java/src/main/java/org/antlr/runtime/tree/ |
H A D | TreePatternParser.java | 34 protected TreePatternLexer tokenizer; field in class:TreePatternParser 39 public TreePatternParser(TreePatternLexer tokenizer, TreeWizard wizard, TreeAdaptor adaptor) { argument 40 this.tokenizer = tokenizer; 43 ttype = tokenizer.nextToken(); // kickstart 64 ttype = tokenizer.nextToken(); 89 ttype = tokenizer.nextToken(); 97 ttype = tokenizer.nextToken(); 101 label = tokenizer.sval.toString(); 102 ttype = tokenizer [all...] |
/external/emma/core/java12/com/vladium/emma/ |
H A D | AppLoggers.java | 59 final StringTokenizer tokenizer = new StringTokenizer (_filter, COMMA_DELIMITERS); 60 if (tokenizer.countTokens () > 0) 62 temp = new HashSet (tokenizer.countTokens ()); 63 while (tokenizer.hasMoreTokens ()) 65 temp.add (tokenizer.nextToken ());
|
/external/antlr/antlr-3.4/runtime/ObjC/Framework/ |
H A D | ANTLRTreePatternParser.h | 39 ANTLRTreePatternLexer *tokenizer; variable 50 - (id) initWithTokenizer:(ANTLRTreePatternLexer *)tokenizer 59 @property (retain) ANTLRTreePatternLexer *tokenizer; variable
|
/external/protobuf/python/google/protobuf/internal/ |
H A D | text_format_test.py | 608 tokenizer = text_format._Tokenizer(text.splitlines()) 609 methods = [(tokenizer.ConsumeIdentifier, 'identifier1'), 611 (tokenizer.ConsumeString, 'string1'), 612 (tokenizer.ConsumeIdentifier, 'identifier2'), 614 (tokenizer.ConsumeInt32, 123), 615 (tokenizer.ConsumeIdentifier, 'identifier3'), 617 (tokenizer.ConsumeString, 'string'), 618 (tokenizer.ConsumeIdentifier, 'identifiER_4'), 620 (tokenizer.ConsumeFloat, 1.1e+2), 621 (tokenizer [all...] |
/external/antlr/antlr-3.4/runtime/ObjC/ANTLR.framework/Headers/ |
H A D | ANTLRTreePatternParser.h | 39 ANTLRTreePatternLexer *tokenizer; variable 50 - (id) initWithTokenizer:(ANTLRTreePatternLexer *)tokenizer
|
/external/antlr/antlr-3.4/runtime/ObjC/ANTLR.framework/Versions/A/Headers/ |
H A D | ANTLRTreePatternParser.h | 39 ANTLRTreePatternLexer *tokenizer; variable 50 - (id) initWithTokenizer:(ANTLRTreePatternLexer *)tokenizer
|
/external/antlr/antlr-3.4/runtime/ObjC/ANTLR.framework/Versions/Current/Headers/ |
H A D | ANTLRTreePatternParser.h | 39 ANTLRTreePatternLexer *tokenizer; variable 50 - (id) initWithTokenizer:(ANTLRTreePatternLexer *)tokenizer
|
/external/mockftpserver/MockFtpServer/src/main/java/org/mockftpserver/stub/command/ |
H A D | AlloCommandHandler.java | 64 StringTokenizer tokenizer = new StringTokenizer(parametersString, RECORD_SIZE_DELIMITER);
65 invocationRecord.set(NUMBER_OF_BYTES_KEY, Integer.valueOf(tokenizer.nextToken()));
66 Assert.isTrue(tokenizer.hasMoreTokens(), "Missing record size: [" + parametersString + "]");
67 invocationRecord.set(RECORD_SIZE_KEY, Integer.valueOf(tokenizer.nextToken()));
|
/external/mockftpserver/branches/1.x_Branch/src/main/java/org/mockftpserver/stub/command/ |
H A D | AlloCommandHandler.java | 66 StringTokenizer tokenizer = new StringTokenizer(parametersString, RECORD_SIZE_DELIMITER);
67 invocationRecord.set(NUMBER_OF_BYTES_KEY, Integer.valueOf(tokenizer.nextToken()));
68 Assert.isTrue(tokenizer.hasMoreTokens(), "Missing record size: [" + parametersString + "]");
69 invocationRecord.set(RECORD_SIZE_KEY, Integer.valueOf(tokenizer.nextToken()));
|
/external/mockftpserver/tags/1.0/src/main/java/org/mockftpserver/stub/command/ |
H A D | AlloCommandHandler.java | 66 StringTokenizer tokenizer = new StringTokenizer(parametersString, RECORD_SIZE_DELIMITER);
67 invocationRecord.set(NUMBER_OF_BYTES_KEY, Integer.valueOf(tokenizer.nextToken()));
68 Assert.isTrue(tokenizer.hasMoreTokens(), "Missing record size: [" + parametersString + "]");
69 invocationRecord.set(RECORD_SIZE_KEY, Integer.valueOf(tokenizer.nextToken()));
|
/external/mockftpserver/tags/1.1/src/main/java/org/mockftpserver/stub/command/ |
H A D | AlloCommandHandler.java | 66 StringTokenizer tokenizer = new StringTokenizer(parametersString, RECORD_SIZE_DELIMITER);
67 invocationRecord.set(NUMBER_OF_BYTES_KEY, Integer.valueOf(tokenizer.nextToken()));
68 Assert.isTrue(tokenizer.hasMoreTokens(), "Missing record size: [" + parametersString + "]");
69 invocationRecord.set(RECORD_SIZE_KEY, Integer.valueOf(tokenizer.nextToken()));
|
/external/mockftpserver/tags/1.2/src/main/java/org/mockftpserver/stub/command/ |
H A D | AlloCommandHandler.java | 66 StringTokenizer tokenizer = new StringTokenizer(parametersString, RECORD_SIZE_DELIMITER);
67 invocationRecord.set(NUMBER_OF_BYTES_KEY, Integer.valueOf(tokenizer.nextToken()));
68 Assert.isTrue(tokenizer.hasMoreTokens(), "Missing record size: [" + parametersString + "]");
69 invocationRecord.set(RECORD_SIZE_KEY, Integer.valueOf(tokenizer.nextToken()));
|
/external/mockftpserver/tags/1.2.1/src/main/java/org/mockftpserver/stub/command/ |
H A D | AlloCommandHandler.java | 66 StringTokenizer tokenizer = new StringTokenizer(parametersString, RECORD_SIZE_DELIMITER);
67 invocationRecord.set(NUMBER_OF_BYTES_KEY, Integer.valueOf(tokenizer.nextToken()));
68 Assert.isTrue(tokenizer.hasMoreTokens(), "Missing record size: [" + parametersString + "]");
69 invocationRecord.set(RECORD_SIZE_KEY, Integer.valueOf(tokenizer.nextToken()));
|
/external/mockftpserver/tags/1.2.2/src/main/java/org/mockftpserver/stub/command/ |
H A D | AlloCommandHandler.java | 66 StringTokenizer tokenizer = new StringTokenizer(parametersString, RECORD_SIZE_DELIMITER);
67 invocationRecord.set(NUMBER_OF_BYTES_KEY, Integer.valueOf(tokenizer.nextToken()));
68 Assert.isTrue(tokenizer.hasMoreTokens(), "Missing record size: [" + parametersString + "]");
69 invocationRecord.set(RECORD_SIZE_KEY, Integer.valueOf(tokenizer.nextToken()));
|
/external/mockftpserver/tags/1.2.3/src/main/java/org/mockftpserver/stub/command/ |
H A D | AlloCommandHandler.java | 66 StringTokenizer tokenizer = new StringTokenizer(parametersString, RECORD_SIZE_DELIMITER);
67 invocationRecord.set(NUMBER_OF_BYTES_KEY, Integer.valueOf(tokenizer.nextToken()));
68 Assert.isTrue(tokenizer.hasMoreTokens(), "Missing record size: [" + parametersString + "]");
69 invocationRecord.set(RECORD_SIZE_KEY, Integer.valueOf(tokenizer.nextToken()));
|
/external/mockftpserver/tags/1.2.4/src/main/java/org/mockftpserver/stub/command/ |
H A D | AlloCommandHandler.java | 66 StringTokenizer tokenizer = new StringTokenizer(parametersString, RECORD_SIZE_DELIMITER);
67 invocationRecord.set(NUMBER_OF_BYTES_KEY, Integer.valueOf(tokenizer.nextToken()));
68 Assert.isTrue(tokenizer.hasMoreTokens(), "Missing record size: [" + parametersString + "]");
69 invocationRecord.set(RECORD_SIZE_KEY, Integer.valueOf(tokenizer.nextToken()));
|