Searched refs:tokenize (Results 1 - 25 of 92) sorted by relevance

1234

/external/selinux/libsepol/include/sepol/policydb/
H A Dutil.h38 * The tokenize function may be used to
41 extern int tokenize(char *line_buf, char delim, int num_args, ...);
/external/deqp/framework/randomshaders/
H A DrsgStatement.hpp44 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL;
62 void tokenize (GeneratorState& state, TokenStream& str) const;
78 void tokenize (GeneratorState& state, TokenStream& str) const;
98 void tokenize (GeneratorState& state, TokenStream& str) const;
119 void tokenize (GeneratorState& state, TokenStream& str) const;
145 void tokenize (GeneratorState& state, TokenStream& str) const;
H A DrsgExpression.hpp57 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL;
74 void tokenize (GeneratorState& state, TokenStream& str) const { DE_UNREF(state); str << Token(m_variable->getName()); } function in class:rsg::VariableAccess
113 void tokenize (GeneratorState& state, TokenStream& str) const;
131 void tokenize (GeneratorState& state, TokenStream& str) const;
149 void tokenize (GeneratorState& state, TokenStream& str) const;
167 void tokenize (GeneratorState& state, TokenStream& str) const;
189 void tokenize (GeneratorState& state, TokenStream& str) const;
214 void tokenize (GeneratorState& state, TokenStream& str) const;
233 void tokenize (GeneratorState& state, TokenStream& str) const;
255 void tokenize (GeneratorStat
[all...]
H A DrsgShader.cpp93 void Shader::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::Shader
101 m_globalStatements[ndx]->tokenize(state, str);
107 m_functions[ndx]->tokenize(state, str);
112 m_mainFunction.tokenize(state, str);
125 void Function::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::Function
147 m_functionBlock.tokenize(state, str);
H A DrsgStatement.cpp203 void BlockStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::BlockStatement
208 (*i)->tokenize(state, str);
219 void ExpressionStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::ExpressionStatement
222 m_expression->tokenize(state, str);
333 void DeclarationStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::DeclarationStatement
340 m_expression->tokenize(state, str);
456 void ConditionalStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::ConditionalStatement
462 m_condition->tokenize(state, str);
469 m_trueStatement->tokenize(state, str);
473 m_trueStatement->tokenize(stat
556 void AssignStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::AssignStatement
[all...]
H A DrsgShader.hpp54 void tokenize (GeneratorState& state, TokenStream& stream) const;
106 void tokenize (GeneratorState& state, TokenStream& str) const;
/external/google-breakpad/src/testing/scripts/generator/cpp/
H A Dast.py46 from cpp import tokenize namespace
549 if parts[-1].token_type == tokenize.NAME:
579 if (type_name and type_name[-1].token_type == tokenize.NAME and
580 p.token_type == tokenize.NAME):
581 type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0))
738 if token.token_type == tokenize.NAME:
749 if next.token_type == tokenize.SYNTAX and next.name == '(':
754 syntax = tokenize.SYNTAX
763 new_temp = self._GetTokensUpTo(tokenize
[all...]
/external/chromium-trace/catapult/common/py_utils/py_utils/refactor/
H A Doffset_token.py8 import tokenize namespace
66 tokenize_tokens = tokenize.generate_tokens(f.readline)
90 while offset_tokens[0].type == tokenize.NL:
100 # Convert OffsetTokens to tokenize tokens.
113 # tokenize can't handle whitespace before line continuations.
115 return tokenize.untokenize(tokenize_tokens).replace('\\\n', ' \\\n')
/external/minijail/
H A Dutil.h56 char *tokenize(char **stringp, const char *delim);
H A Dutil.c99 while ((group = tokenize(&constant_str, "|")) != NULL) {
141 char *tokenize(char **stringp, const char *delim) function
/external/autotest/utils/
H A Dreindent.py44 import tokenize namespace
162 # that we can use tokenize's 1-based line numbering easily.
171 # signal that tokenize doesn't know what to do about them;
176 tokenize.tokenize(self.getline, self.tokeneater)
247 # Line-getter for tokenize.
256 # Line-eater for tokenize.
258 INDENT=tokenize.INDENT,
259 DEDENT=tokenize.DEDENT,
260 NEWLINE=tokenize
[all...]
/external/chromium-trace/catapult/third_party/coverage/coverage/
H A Dphystokens.py11 import tokenize namespace
21 tokenize.generate_tokens() doesn't return a token for the backslash that
50 if last_ttype == tokenize.COMMENT:
91 ws_tokens = set([token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL])
115 tok_class = tokenize.tok_name.get(ttype, 'xx').lower()[:3]
129 """A one-element cache around tokenize.generate_tokens.
136 actually tokenize twice.
145 """A stand-in for `tokenize.generate_tokens`."""
149 self.last_tokens = list(tokenize.generate_tokens(readline))
172 # This is mostly code adapted from Py3.2's tokenize modul
[all...]
/external/webrtc/webrtc/base/
H A Dstringencode_unittest.cc220 EXPECT_EQ(5ul, tokenize("one two three four five", ' ', &fields));
222 EXPECT_EQ(1ul, tokenize("one", ' ', &fields));
226 EXPECT_EQ(5ul, tokenize(" one two three four five ", ' ', &fields));
228 EXPECT_EQ(1ul, tokenize(" one ", ' ', &fields));
230 EXPECT_EQ(0ul, tokenize(" ", ' ', &fields));
237 tokenize("find middle one", ' ', &fields);
243 tokenize(" find middle one ", ' ', &fields);
247 tokenize(" ", ' ', &fields);
267 ASSERT_EQ(0ul, tokenize("D \"A B", ' ', '(', ')', NULL));
270 tokenize("
[all...]
/external/apache-xml/src/main/java/org/apache/xpath/compiler/
H A DLexer.java96 void tokenize(String pat) throws javax.xml.transform.TransformerException method in class:Lexer
98 tokenize(pat, null);
109 void tokenize(String pat, Vector targetStrings) method in class:Lexer
/external/chromium-trace/catapult/third_party/vinn/third_party/parse5/test/fixtures/
H A Dtokenizer_test.js6 function tokenize(html, initialState, lastStartTag) { function
173 var out = tokenize(test.input, test.initialState, test.lastStartTag);
/external/mockftpserver/MockFtpServer/src/test/groovy/org/mockftpserver/fake/command/
H A DNlstCommandHandlerTest.groovy51 def actualLines = session.sentData[0].tokenize(endOfLine()) as Set
/external/mockftpserver/tags/2.0/src/test/groovy/org/mockftpserver/fake/command/
H A DNlstCommandHandlerTest.groovy51 def actualLines = session.sentData[0].tokenize(endOfLine()) as Set
/external/mockftpserver/tags/2.0-rc1/MockFtpServer/src/test/groovy/org/mockftpserver/fake/command/
H A DNlstCommandHandlerTest.groovy51 def actualLines = session.sentData[0].tokenize(endOfLine()) as Set
/external/mockftpserver/tags/2.0-rc1/src/test/groovy/org/mockftpserver/fake/command/
H A DNlstCommandHandlerTest.groovy51 def actualLines = session.sentData[0].tokenize(endOfLine()) as Set
/external/mockftpserver/tags/2.0-rc3/MockFtpServer/src/test/groovy/org/mockftpserver/fake/command/
H A DNlstCommandHandlerTest.groovy51 def actualLines = session.sentData[0].tokenize(endOfLine()) as Set
/external/mockftpserver/tags/2.0.1/src/test/groovy/org/mockftpserver/fake/command/
H A DNlstCommandHandlerTest.groovy51 def actualLines = session.sentData[0].tokenize(endOfLine()) as Set
/external/mockftpserver/tags/2.0.2/src/test/groovy/org/mockftpserver/fake/command/
H A DNlstCommandHandlerTest.groovy51 def actualLines = session.sentData[0].tokenize(endOfLine()) as Set
/external/mockftpserver/tags/2.1/src/test/groovy/org/mockftpserver/fake/command/
H A DNlstCommandHandlerTest.groovy51 def actualLines = session.sentData[0].tokenize(endOfLine()) as Set
/external/mockftpserver/tags/2.2/src/test/groovy/org/mockftpserver/fake/command/
H A DNlstCommandHandlerTest.groovy51 def actualLines = session.sentData[0].tokenize(endOfLine()) as Set
/external/mockftpserver/tags/2.3/src/test/groovy/org/mockftpserver/fake/command/
H A DNlstCommandHandlerTest.groovy51 def actualLines = session.sentData[0].tokenize(endOfLine()) as Set

Completed in 1778 milliseconds

1234