Searched refs:Tokenizer (Results 1 - 25 of 85) sorted by relevance

1234

/external/protobuf/src/google/protobuf/io/
H A Dtokenizer_unittest.cc184 EXPECT_TRUE(Tokenizer::ParseInteger(text, kuint64max, &result));
199 Tokenizer::TokenType type;
209 { "hello", Tokenizer::TYPE_IDENTIFIER },
212 { "123", Tokenizer::TYPE_INTEGER },
213 { "0xab6", Tokenizer::TYPE_INTEGER },
214 { "0XAB6", Tokenizer::TYPE_INTEGER },
215 { "0X1234567", Tokenizer::TYPE_INTEGER },
216 { "0x89abcdef", Tokenizer::TYPE_INTEGER },
217 { "0x89ABCDEF", Tokenizer::TYPE_INTEGER },
218 { "01234567", Tokenizer
[all...]
H A Dtokenizer.h53 class Tokenizer;
91 class LIBPROTOBUF_EXPORT Tokenizer { class in namespace:google::protobuf::io
93 // Construct a Tokenizer that reads and tokenizes text from the given
96 Tokenizer(ZeroCopyInputStream* input, ErrorCollector* error_collector);
97 ~Tokenizer();
200 // comes from a TYPE_FLOAT token parsed by Tokenizer. If it doesn't, the
205 // comes from a TYPE_STRING token parsed by Tokenizer. If it doesn't, the
215 // parsed by a Tokenizer, the result is undefined (possibly an assert
257 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer);
394 inline const Tokenizer
[all...]
H A Dtokenizer.cc110 // For instance, Tokenizer::ConsumeZeroOrMore<Whitespace>() will eat
187 Tokenizer::Tokenizer(ZeroCopyInputStream* input, function in class:google::protobuf::io::Tokenizer
212 Tokenizer::~Tokenizer() {
223 void Tokenizer::NextChar() {
244 void Tokenizer::Refresh() {
274 inline void Tokenizer::RecordTo(string* target) {
279 inline void Tokenizer::StopRecording() {
291 inline void Tokenizer
[all...]
/external/parameter-framework/upstream/utility/
H A DTokenizer.h37 /** Tokenizer class
40 * of delimiters (@see Tokenizer::defaultDelimiters).
42 class Tokenizer : private utility::NonCopyable class in inherits:utility::NonCopyable
45 /** Constructs a Tokenizer
54 Tokenizer(const std::string &input, const std::string &delimiters = defaultDelimiters,
56 ~Tokenizer(){};
H A DTokenizer.cpp30 #include "Tokenizer.h"
35 const string Tokenizer::defaultDelimiters = " \n\r\t\v\f";
37 Tokenizer::Tokenizer(const string &input, const string &delimiters, bool mergeDelimiters) function in class:Tokenizer
42 vector<string> Tokenizer::split()
/external/swiftshader/src/OpenGL/compiler/preprocessor/
H A DTokenizer.h27 class Tokenizer : public Lexer class in namespace:pp
44 Tokenizer(Diagnostics *diagnostics);
45 ~Tokenizer() override;
56 PP_DISALLOW_COPY_AND_ASSIGN(Tokenizer);
H A Dgenerate_parser.sh35 run_flex Tokenizer.l Tokenizer.cpp
H A DDirectiveParser.h28 class Tokenizer;
33 DirectiveParser(Tokenizer *tokenizer,
87 Tokenizer *mTokenizer;
H A DTokenizer.l46 #include "Tokenizer.h"
103 %option extra-type="pp::Tokenizer::Context*"
315 Tokenizer::Tokenizer(Diagnostics *diagnostics) : mHandle(nullptr), mMaxTokenSize(1024)
320 Tokenizer::~Tokenizer()
325 bool Tokenizer::init(size_t count, const char * const string[], const int length[])
334 void Tokenizer::setFileNumber(int file)
341 void Tokenizer::setLineNumber(int line)
346 void Tokenizer
[all...]
H A DPreprocessor.cpp24 #include "Tokenizer.h"
33 Tokenizer tokenizer;
/external/parameter-framework/upstream/test/tokenizer/
H A DTest.cpp31 #include "Tokenizer.h"
44 SCENARIO("Tokenizer tests")
49 Tokenizer tokenizer("a bcd ef");
58 Tokenizer tokenizer("");
67 Tokenizer tokenizer(" a \n\t bc ");
77 Tokenizer tokenizer("/a/bcd/ef g/h/", "/");
88 Tokenizer tokenizer("", Tokenizer::defaultDelimiters, false);
97 Tokenizer tokenizer(",", ",", false);
106 Tokenizer tokenize
[all...]
/external/deqp/executor/
H A DxeXMLParser.cpp57 Tokenizer::Tokenizer (void) function in class:xe::xml::Tokenizer
65 Tokenizer::~Tokenizer (void)
69 void Tokenizer::clear (void)
77 void Tokenizer::error (const std::string& what)
82 void Tokenizer::feed (const deUint8* bytes, int numBytes)
98 int Tokenizer::getChar (int offset) const
108 void Tokenizer::advance (void)
338 void Tokenizer
[all...]
H A DxeXMLParser.hpp83 class Tokenizer class in namespace:xe::xml
86 Tokenizer (void);
87 ~Tokenizer (void);
103 Tokenizer (const Tokenizer& other);
104 Tokenizer& operator= (const Tokenizer& other);
190 Tokenizer m_tokenizer;
203 inline void Tokenizer::getTokenStr (std::string& dst) const
211 inline void Tokenizer
[all...]
/external/clang/lib/ASTMatchers/Dynamic/
H A DParser.cpp294 const TokenInfo NameToken = Tokenizer->consumeNextToken();
296 if (Tokenizer->nextTokenKind() != TokenInfo::TK_OpenParen) {
306 if ((Tokenizer->nextTokenKind() == TokenInfo::TK_Comma ||
307 Tokenizer->nextTokenKind() == TokenInfo::TK_CloseParen ||
308 Tokenizer->nextTokenKind() == TokenInfo::TK_Eof) &&
328 const TokenInfo OpenToken = Tokenizer->consumeNextToken();
349 while (Tokenizer->nextTokenKind() != TokenInfo::TK_Eof) {
350 if (Tokenizer->nextTokenKind() == TokenInfo::TK_CloseParen) {
352 EndToken = Tokenizer->consumeNextToken();
357 const TokenInfo CommaToken = Tokenizer
524 Parser(CodeTokenizer *Tokenizer, Sema *S, const NamedValueMap *NamedValues, Diagnostics *Error) argument
[all...]
/external/libtextclassifier/
H A Dtokenizer.h33 // Tokenizer splits the input string into a sequence of tokens, according to the
35 class Tokenizer { class in namespace:libtextclassifier2
37 explicit Tokenizer(
H A Dtokenizer.cc26 Tokenizer::Tokenizer( function in class:libtextclassifier2::Tokenizer
41 const TokenizationCodepointRangeT* Tokenizer::FindTokenizationRange(
66 void Tokenizer::GetScriptAndRole(char32 codepoint,
79 std::vector<Token> Tokenizer::Tokenize(const std::string& text) const {
84 std::vector<Token> Tokenizer::Tokenize(const UnicodeText& text_unicode) const {
/external/ImageMagick/MagickCore/
H A Dtoken.h32 Tokenizer(TokenInfo *,const unsigned int,char *,const size_t,const char *,
/external/libedit/src/
H A Dhistedit.h234 typedef struct tokenizer Tokenizer; typedef in typeref:struct:tokenizer
239 Tokenizer *tok_init(const char *);
240 void tok_end(Tokenizer *);
241 void tok_reset(Tokenizer *);
242 int tok_line(Tokenizer *, const LineInfo *,
244 int tok_str(Tokenizer *, const char *,
H A Dtokenizer.c86 private void FUN(tok,finish)(TYPE(Tokenizer) *);
93 FUN(tok,finish)(TYPE(Tokenizer) *tok)
109 public TYPE(Tokenizer) *
112 TYPE(Tokenizer) *tok = tok_malloc(sizeof(*tok));
151 FUN(tok,reset)(TYPE(Tokenizer) *tok)
166 FUN(tok,end)(TYPE(Tokenizer) *tok)
195 FUN(tok,line)(TYPE(Tokenizer) *tok, const TYPE(LineInfo) *line,
444 FUN(tok,str)(TYPE(Tokenizer) *tok, const Char *line, int *argc,
/external/tensorflow/tensorflow/contrib/keras/api/keras/preprocessing/text/
H A D__init__.py23 from tensorflow.python.keras._impl.keras.preprocessing.text import Tokenizer namespace
/external/tensorflow/tensorflow/python/keras/preprocessing/text/
H A D__init__.py23 from tensorflow.python.keras._impl.keras.preprocessing.text import Tokenizer namespace
/external/tensorflow/tensorflow/python/keras/_impl/keras/preprocessing/
H A Dtext_test.py49 tokenizer = keras.preprocessing.text.Tokenizer(num_words=10)
84 tokenizer = keras.preprocessing.text.Tokenizer()
90 tokenizer = keras.preprocessing.text.Tokenizer(oov_token='<unk>')
/external/protobuf/src/google/protobuf/compiler/
H A Dparser.cc141 inline bool Parser::LookingAtType(io::Tokenizer::TokenType token_type) {
146 return LookingAtType(io::Tokenizer::TYPE_END);
177 if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) {
188 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) {
190 if (!io::Tokenizer::ParseInteger(input_->current().text,
220 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) {
221 if (!io::Tokenizer::ParseInteger(input_->current().text, max_value,
236 if (LookingAtType(io::Tokenizer::TYPE_FLOAT)) {
237 *output = io::Tokenizer::ParseFloat(input_->current().text);
240 } else if (LookingAtType(io::Tokenizer
[all...]
/external/parameter-framework/upstream/parameter/
H A DMappingData.cpp31 #include "Tokenizer.h"
37 Tokenizer mappingTok(rawMapping, ",");
H A DPathNavigator.cpp31 #include "Tokenizer.h"
40 Tokenizer tokenizer(strPath, "/");

Completed in 667 milliseconds

1234