1/* 2 [The "BSD license"] 3 Copyright (c) 2005-2009 Jim Idle, Temporal Wave LLC 4 http://www.temporal-wave.com 5 http://www.linkedin.com/in/jimidle 6 7 All rights reserved. 8 9 Redistribution and use in source and binary forms, with or without 10 modification, are permitted provided that the following conditions 11 are met: 12 1. Redistributions of source code must retain the above copyright 13 notice, this list of conditions and the following disclaimer. 14 2. Redistributions in binary form must reproduce the above copyright 15 notice, this list of conditions and the following disclaimer in the 16 documentation and/or other materials provided with the distribution. 17 3. The name of the author may not be used to endorse or promote products 18 derived from this software without specific prior written permission. 19 20 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 21 IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 22 OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 23 IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 24 INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 25 NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 29 THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30*/ 31 32/* 33 * This code generating template and the associated C runtime was produced by: 34 * Jim Idle jimi|hereisanat|idle|dotgoeshere|ws. 35 * If it causes the destruction of the Universe, it will be pretty cool so long as 36 * I am in a different one at the time. 37 */ 38cTypeInitMap ::= [ 39 "int" : "0", // Integers start out being 0 40 "long" : "0", // Longs start out being 0 41 "float" : "0.0", // Floats start out being 0 42 "double" : "0.0", // Doubles start out being 0 43 "ANTLR3_BOOLEAN" : "ANTLR3_FALSE", // Booleans start out being Antlr C for false 44 "byte" : "0", // Bytes start out being 0 45 "short" : "0", // Shorts start out being 0 46 "char" : "0" // Chars start out being 0 47] 48 49leadIn(type) ::= 50<< 51/** \file 52 * This <type> file was generated by $ANTLR version <ANTLRVersion> 53 * 54 * - From the grammar source file : <fileName> 55 * - On : <generatedTimestamp> 56<if(LEXER)> 57 * - for the lexer : <name>Lexer 58<endif> 59<if(PARSER)> 60 * - for the parser : <name>Parser 61<endif> 62<if(TREE_PARSER)> 63 * - for the tree parser : <name>TreeParser 64<endif> 65 * 66 * Editing it, at least manually, is not wise. 67 * 68 * C language generator and runtime by Jim Idle, jimi|hereisanat|idle|dotgoeshere|ws. 69 * 70 * 71>> 72 73/** The overall file structure of a recognizer; stores methods for rules 74 * and cyclic DFAs plus support code. 75 */ 76outputFile( LEXER, 77 PARSER, 78 TREE_PARSER, 79 actionScope, 80 actions, 81 docComment, 82 recognizer, 83 name, 84 tokens, 85 tokenNames, 86 rules, 87 cyclicDFAs, 88 bitsets, 89 buildTemplate, 90 buildAST, 91 rewriteMode, 92 profile, 93 backtracking, 94 synpreds, 95 memoize, 96 numRules, 97 fileName, 98 ANTLRVersion, 99 generatedTimestamp, 100 trace, 101 scopes, 102 superClass, 103 literals 104 ) ::= 105<< 106<leadIn("C source")> 107*/ 108// [The "BSD license"] 109// Copyright (c) 2005-2009 Jim Idle, Temporal Wave LLC 110// http://www.temporal-wave.com 111// http://www.linkedin.com/in/jimidle 112// 113// All rights reserved. 114// 115// Redistribution and use in source and binary forms, with or without 116// modification, are permitted provided that the following conditions 117// are met: 118// 1. Redistributions of source code must retain the above copyright 119// notice, this list of conditions and the following disclaimer. 120// 2. Redistributions in binary form must reproduce the above copyright 121// notice, this list of conditions and the following disclaimer in the 122// documentation and/or other materials provided with the distribution. 123// 3. The name of the author may not be used to endorse or promote products 124// derived from this software without specific prior written permission. 125// 126// THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 127// IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 128// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 129// IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 130// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 131// NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 132// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 133// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 134// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 135// THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 136 137<if(actions.(actionScope).header)> 138 139/* ============================================================================= 140 * This is what the grammar programmer asked us to put at the top of every file. 141 */ 142<actions.(actionScope).header> 143/* End of Header action. 144 * ============================================================================= 145 */ 146<endif> 147 148/* ----------------------------------------- 149 * Include the ANTLR3 generated header file. 150 */ 151#include "<name>.h" 152<actions.(actionScope).postinclude> 153/* ----------------------------------------- */ 154 155<docComment> 156 157<if(literals)> 158/** String literals used by <name> that we must do things like MATCHS() with. 159 * C will normally just lay down 8 bit characters, and you can use L"xxx" to 160 * get wchar_t, but wchar_t is 16 bits on Windows, which is not UTF32 and so 161 * we perform this little trick of defining the literals as arrays of UINT32 162 * and passing in the address of these. 163 */ 164<literals:{it | static ANTLR3_UCHAR lit_<i>[] = <it>;}; separator="\n"> 165 166<endif> 167 168 169 170 171/* MACROS that hide the C interface implementations from the 172 * generated code, which makes it a little more understandable to the human eye. 173 * I am very much against using C pre-processor macros for function calls and bits 174 * of code as you cannot see what is happening when single stepping in debuggers 175 * and so on. The exception (in my book at least) is for generated code, where you are 176 * not maintaining it, but may wish to read and understand it. If you single step it, you know that input() 177 * hides some indirect calls, but is always referring to the input stream. This is 178 * probably more readable than ctx->input->istream->input(snarfle0->blarg) and allows me to rejig 179 * the runtime interfaces without changing the generated code too often, without 180 * confusing the reader of the generated output, who may not wish to know the gory 181 * details of the interface inheritance. 182 */ 183 184#define CTX ctx 185 186/* Aids in accessing scopes for grammar programmers 187 */ 188#undef SCOPE_TYPE 189#undef SCOPE_STACK 190#undef SCOPE_TOP 191#define SCOPE_TYPE(scope) p<name>_##scope##_SCOPE 192#define SCOPE_STACK(scope) p<name>_##scope##Stack 193#define SCOPE_TOP(scope) ctx->p<name>_##scope##Top 194#define SCOPE_SIZE(scope) ctx->p<name>_##scope##Stack_limit 195#define SCOPE_INSTANCE(scope, i) (ctx->SCOPE_STACK(scope)->get(ctx->SCOPE_STACK(scope),i)) 196 197<if(LEXER)> 198 199/* Macros for accessing things in a lexer 200 */ 201#undef LEXER 202#undef RECOGNIZER 203#undef RULEMEMO 204#undef GETCHARINDEX 205#undef GETLINE 206#undef GETCHARPOSITIONINLINE 207#undef EMIT 208#undef EMITNEW 209#undef MATCHC 210#undef MATCHS 211#undef MATCHRANGE 212#undef LTOKEN 213#undef HASFAILED 214#undef FAILEDFLAG 215#undef INPUT 216#undef STRSTREAM 217#undef LA 218#undef HASEXCEPTION 219#undef EXCEPTION 220#undef CONSTRUCTEX 221#undef CONSUME 222#undef LRECOVER 223#undef MARK 224#undef REWIND 225#undef REWINDLAST 226#undef BACKTRACKING 227#undef MATCHANY 228#undef MEMOIZE 229#undef HAVEPARSEDRULE 230#undef GETTEXT 231#undef INDEX 232#undef SEEK 233#undef PUSHSTREAM 234#undef POPSTREAM 235#undef SETTEXT 236#undef SETTEXT8 237 238#define LEXER ctx->pLexer 239#define RECOGNIZER LEXER->rec 240#define LEXSTATE RECOGNIZER->state 241#define TOKSOURCE LEXSTATE->tokSource 242#define GETCHARINDEX() LEXER->getCharIndex(LEXER) 243#define GETLINE() LEXER->getLine(LEXER) 244#define GETTEXT() LEXER->getText(LEXER) 245#define GETCHARPOSITIONINLINE() LEXER->getCharPositionInLine(LEXER) 246#define EMIT() LEXSTATE->type = _type; LEXER->emit(LEXER) 247#define EMITNEW(t) LEXER->emitNew(LEXER, t) 248#define MATCHC(c) LEXER->matchc(LEXER, c) 249#define MATCHS(s) LEXER->matchs(LEXER, s) 250#define MATCHRANGE(c1,c2) LEXER->matchRange(LEXER, c1, c2) 251#define MATCHANY() LEXER->matchAny(LEXER) 252#define LTOKEN LEXSTATE->token 253#define HASFAILED() (LEXSTATE->failed == ANTLR3_TRUE) 254#define BACKTRACKING LEXSTATE->backtracking 255#define FAILEDFLAG LEXSTATE->failed 256#define INPUT LEXER->input 257#define STRSTREAM INPUT 258#define ISTREAM INPUT->istream 259#define INDEX() ISTREAM->index(ISTREAM) 260#define SEEK(n) ISTREAM->seek(ISTREAM, n) 261#define EOF_TOKEN &(LEXSTATE->tokSource->eofToken) 262#define HASEXCEPTION() (LEXSTATE->error == ANTLR3_TRUE) 263#define EXCEPTION LEXSTATE->exception 264#define CONSTRUCTEX() RECOGNIZER->exConstruct(RECOGNIZER) 265#define LRECOVER() LEXER->recover(LEXER) 266#define MARK() ISTREAM->mark(ISTREAM) 267#define REWIND(m) ISTREAM->rewind(ISTREAM, m) 268#define REWINDLAST() ISTREAM->rewindLast(ISTREAM) 269#define MEMOIZE(ri,si) RECOGNIZER->memoize(RECOGNIZER, ri, si) 270#define HAVEPARSEDRULE(r) RECOGNIZER->alreadyParsedRule(RECOGNIZER, r) 271#define PUSHSTREAM(str) LEXER->pushCharStream(LEXER, str) 272#define POPSTREAM() LEXER->popCharStream(LEXER) 273#define SETTEXT(str) LEXSTATE->text = str 274#define SKIP() LEXSTATE->token = &(TOKSOURCE->skipToken) 275#define USER1 LEXSTATE->user1 276#define USER2 LEXSTATE->user2 277#define USER3 LEXSTATE->user3 278#define CUSTOM LEXSTATE->custom 279#define RULEMEMO LEXSTATE->ruleMemo 280#define DBG RECOGNIZER->debugger 281 282/* If we have been told we can rely on the standard 8 bit or UTF16 input 283 * stream, then we can define our macros to use the direct pointers 284 * in the input object, which is much faster than indirect calls. This 285 * is really only significant to lexers with a lot of fragment rules (which 286 * do not place LA(1) in a temporary at the moment) and even then 287 * only if there is a lot of input (order of say 1M or so). 288 */ 289#if defined(ANTLR3_INLINE_INPUT_8BIT) || defined(ANTLR3_INLINE_INPUT_UTF16) 290 291# ifdef ANTLR3_INLINE_INPUT_8BIT 292 293/* 8 bit character set */ 294 295# define NEXTCHAR ((pANTLR3_UINT8)(INPUT->nextChar)) 296# define DATAP ((pANTLR3_UINT8)(INPUT->data)) 297 298# else 299 300# define NEXTCHAR ((pANTLR3_UINT16)(INPUT->nextChar)) 301# define DATAP ((pANTLR3_UINT16)(INPUT->data)) 302 303# endif 304 305# define LA(n) ((NEXTCHAR + n) > (DATAP + INPUT->sizeBuf) ? ANTLR3_CHARSTREAM_EOF : (ANTLR3_UCHAR)(*(NEXTCHAR + n - 1))) 306# define CONSUME() \\ 307{ \\ 308 if (NEXTCHAR \< (DATAP + INPUT->sizeBuf)) \\ 309 { \\ 310 INPUT->charPositionInLine++; \\ 311 if ((ANTLR3_UCHAR)(*NEXTCHAR) == INPUT->newlineChar) \\ 312 { \\ 313 INPUT->line++; \\ 314 INPUT->charPositionInLine = 0; \\ 315 INPUT->currentLine = (void *)(NEXTCHAR + 1); \\ 316 } \\ 317 INPUT->nextChar = (void *)(NEXTCHAR + 1); \\ 318 } \\ 319} 320 321#else 322 323// Pick up the input character by calling the input stream implementation. 324// 325#define CONSUME() INPUT->istream->consume(INPUT->istream) 326#define LA(n) INPUT->istream->_LA(INPUT->istream, n) 327 328#endif 329<endif> 330 331<if(PARSER)> 332/* Macros for accessing things in the parser 333 */ 334 335#undef PARSER 336#undef RECOGNIZER 337#undef HAVEPARSEDRULE 338#undef MEMOIZE 339#undef INPUT 340#undef STRSTREAM 341#undef HASEXCEPTION 342#undef EXCEPTION 343#undef MATCHT 344#undef MATCHANYT 345#undef FOLLOWSTACK 346#undef FOLLOWPUSH 347#undef FOLLOWPOP 348#undef PRECOVER 349#undef PREPORTERROR 350#undef LA 351#undef LT 352#undef CONSTRUCTEX 353#undef CONSUME 354#undef MARK 355#undef REWIND 356#undef REWINDLAST 357#undef PERRORRECOVERY 358#undef HASFAILED 359#undef FAILEDFLAG 360#undef RECOVERFROMMISMATCHEDSET 361#undef RECOVERFROMMISMATCHEDELEMENT 362#undef INDEX 363#undef ADAPTOR 364#undef SEEK 365#undef RULEMEMO 366#undef DBG 367 368#define PARSER ctx->pParser 369#define RECOGNIZER PARSER->rec 370#define PSRSTATE RECOGNIZER->state 371#define HAVEPARSEDRULE(r) RECOGNIZER->alreadyParsedRule(RECOGNIZER, r) 372#define MEMOIZE(ri,si) RECOGNIZER->memoize(RECOGNIZER, ri, si) 373#define INPUT PARSER->tstream 374#define STRSTREAM INPUT 375#define ISTREAM INPUT->istream 376#define INDEX() ISTREAM->index(INPUT->istream) 377#define HASEXCEPTION() (PSRSTATE->error == ANTLR3_TRUE) 378#define EXCEPTION PSRSTATE->exception 379#define MATCHT(t, fs) RECOGNIZER->match(RECOGNIZER, t, fs) 380#define MATCHANYT() RECOGNIZER->matchAny(RECOGNIZER) 381#define FOLLOWSTACK PSRSTATE->following 382#ifdef SKIP_FOLLOW_SETS 383#define FOLLOWPUSH(x) 384#define FOLLOWPOP() 385#else 386#define FOLLOWPUSH(x) FOLLOWSTACK->push(FOLLOWSTACK, ((void *)(&(x))), NULL) 387#define FOLLOWPOP() FOLLOWSTACK->pop(FOLLOWSTACK) 388#endif 389#define PRECOVER() RECOGNIZER->recover(RECOGNIZER) 390#define PREPORTERROR() RECOGNIZER->reportError(RECOGNIZER) 391#define LA(n) INPUT->istream->_LA(ISTREAM, n) 392#define LT(n) INPUT->_LT(INPUT, n) 393#define CONSTRUCTEX() RECOGNIZER->exConstruct(RECOGNIZER) 394#define CONSUME() ISTREAM->consume(ISTREAM) 395#define MARK() ISTREAM->mark(ISTREAM) 396#define REWIND(m) ISTREAM->rewind(ISTREAM, m) 397#define REWINDLAST() ISTREAM->rewindLast(ISTREAM) 398#define SEEK(n) ISTREAM->seek(ISTREAM, n) 399#define PERRORRECOVERY PSRSTATE->errorRecovery 400#define FAILEDFLAG PSRSTATE->failed 401#define HASFAILED() (FAILEDFLAG == ANTLR3_TRUE) 402#define BACKTRACKING PSRSTATE->backtracking 403#define RECOVERFROMMISMATCHEDSET(s) RECOGNIZER->recoverFromMismatchedSet(RECOGNIZER, s) 404#define RECOVERFROMMISMATCHEDELEMENT(e) RECOGNIZER->recoverFromMismatchedElement(RECOGNIZER, s) 405#define ADAPTOR ctx->adaptor 406#define RULEMEMO PSRSTATE->ruleMemo 407#define DBG RECOGNIZER->debugger 408 409<endif> 410 411<if(TREE_PARSER)> 412/* Macros for accessing things in the parser 413 */ 414 415#undef PARSER 416#undef RECOGNIZER 417#undef HAVEPARSEDRULE 418#undef INPUT 419#undef STRSTREAM 420#undef HASEXCEPTION 421#undef EXCEPTION 422#undef MATCHT 423#undef MATCHANYT 424#undef FOLLOWSTACK 425#undef FOLLOWPUSH 426#undef FOLLOWPOP 427#undef PRECOVER 428#undef PREPORTERROR 429#undef LA 430#undef LT 431#undef CONSTRUCTEX 432#undef CONSUME 433#undef MARK 434#undef REWIND 435#undef REWINDLAST 436#undef PERRORRECOVERY 437#undef HASFAILED 438#undef FAILEDFLAG 439#undef RECOVERFROMMISMATCHEDSET 440#undef RECOVERFROMMISMATCHEDELEMENT 441#undef BACKTRACKING 442#undef ADAPTOR 443#undef RULEMEMO 444#undef SEEK 445#undef INDEX 446#undef DBG 447 448#define PARSER ctx->pTreeParser 449#define RECOGNIZER PARSER->rec 450#define PSRSTATE RECOGNIZER->state 451#define HAVEPARSEDRULE(r) RECOGNIZER->alreadyParsedRule(RECOGNIZER, r) 452#define INPUT PARSER->ctnstream 453#define ISTREAM INPUT->tnstream->istream 454#define STRSTREAM INPUT->tnstream 455#define HASEXCEPTION() (PSRSTATE->error == ANTLR3_TRUE) 456#define EXCEPTION PSRSTATE->exception 457#define MATCHT(t, fs) RECOGNIZER->match(RECOGNIZER, t, fs) 458#define MATCHANYT() RECOGNIZER->matchAny(RECOGNIZER) 459#define FOLLOWSTACK PSRSTATE->following 460#define FOLLOWPUSH(x) FOLLOWSTACK->push(FOLLOWSTACK, ((void *)(&(x))), NULL) 461#define FOLLOWPOP() FOLLOWSTACK->pop(FOLLOWSTACK) 462#define PRECOVER() RECOGNIZER->recover(RECOGNIZER) 463#define PREPORTERROR() RECOGNIZER->reportError(RECOGNIZER) 464#define LA(n) ISTREAM->_LA(ISTREAM, n) 465#define LT(n) INPUT->tnstream->_LT(INPUT->tnstream, n) 466#define CONSTRUCTEX() RECOGNIZER->exConstruct(RECOGNIZER) 467#define CONSUME() ISTREAM->consume(ISTREAM) 468#define MARK() ISTREAM->mark(ISTREAM) 469#define REWIND(m) ISTREAM->rewind(ISTREAM, m) 470#define REWINDLAST() ISTREAM->rewindLast(ISTREAM) 471#define PERRORRECOVERY PSRSTATE->errorRecovery 472#define FAILEDFLAG PSRSTATE->failed 473#define HASFAILED() (FAILEDFLAG == ANTLR3_TRUE) 474#define BACKTRACKING PSRSTATE->backtracking 475#define RECOVERFROMMISMATCHEDSET(s) RECOGNIZER->recoverFromMismatchedSet(RECOGNIZER, s) 476#define RECOVERFROMMISMATCHEDELEMENT(e) RECOGNIZER->recoverFromMismatchedElement(RECOGNIZER, s) 477#define ADAPTOR INPUT->adaptor 478#define RULEMEMO PSRSTATE->ruleMemo 479#define SEEK(n) ISTREAM->seek(ISTREAM, n) 480#define INDEX() ISTREAM->index(ISTREAM) 481#define DBG RECOGNIZER->debugger 482 483 484<endif> 485 486#define TOKTEXT(tok, txt) tok, (pANTLR3_UINT8)txt 487 488/* The 4 tokens defined below may well clash with your own #defines or token types. If so 489 * then for the present you must use different names for your defines as these are hard coded 490 * in the code generator. It would be better not to use such names internally, and maybe 491 * we can change this in a forthcoming release. I deliberately do not #undef these 492 * here as this will at least give you a redefined error somewhere if they clash. 493 */ 494#define UP ANTLR3_TOKEN_UP 495#define DOWN ANTLR3_TOKEN_DOWN 496#define EOR ANTLR3_TOKEN_EOR 497#define INVALID ANTLR3_TOKEN_INVALID 498 499 500/* ============================================================================= 501 * Functions to create and destroy scopes. First come the rule scopes, followed 502 * by the global declared scopes. 503 */ 504 505<rules: {r |<if(r.ruleDescriptor.ruleScope)> 506<ruleAttributeScopeFuncDecl(scope=r.ruleDescriptor.ruleScope)> 507<ruleAttributeScopeFuncs(scope=r.ruleDescriptor.ruleScope)> 508<endif>}> 509 510<recognizer.scopes:{it | <if(it.isDynamicGlobalScope)> 511<globalAttributeScopeFuncDecl(it)> 512<globalAttributeScopeFuncs(it)> 513<endif>}> 514 515/* ============================================================================= */ 516 517/* ============================================================================= 518 * Start of recognizer 519 */ 520 521<recognizer> 522 523/* End of code 524 * ============================================================================= 525 */ 526 527>> 528headerFileExtension() ::= ".h" 529 530headerFile( LEXER, 531 PARSER, 532 TREE_PARSER, 533 actionScope, 534 actions, 535 docComment, 536 recognizer, 537 name, 538 tokens, 539 tokenNames, 540 rules, 541 cyclicDFAs, 542 bitsets, 543 buildTemplate, 544 buildAST, 545 rewriteMode, 546 profile, 547 backtracking, 548 synpreds, 549 memoize, 550 numRules, 551 fileName, 552 ANTLRVersion, 553 generatedTimestamp, 554 trace, 555 scopes, 556 superClass, 557 literals 558 ) ::= 559<< 560<leadIn("C header")> 561<if(PARSER)> 562 * The parser <mainName()> 563<endif> 564<if(LEXER)> 565 * The lexer <mainName()> 566<endif> 567<if(TREE_PARSER)> 568 * The tree parser <mainName()> 569<endif> 570has the callable functions (rules) shown below, 571 * which will invoke the code for the associated rule in the source grammar 572 * assuming that the input stream is pointing to a token/text stream that could begin 573 * this rule. 574 * 575 * For instance if you call the first (topmost) rule in a parser grammar, you will 576 * get the results of a full parse, but calling a rule half way through the grammar will 577 * allow you to pass part of a full token stream to the parser, such as for syntax checking 578 * in editors and so on. 579 * 580 * The parser entry points are called indirectly (by function pointer to function) via 581 * a parser context typedef p<name>, which is returned from a call to <name>New(). 582 * 583<if(LEXER)> 584 * As this is a generated lexer, it is unlikely you will call it 'manually'. However 585 * the methods are provided anyway. 586 * 587<endif> 588 * The methods in p<name> are as follows: 589 * 590 * <rules:{r | <if(!r.ruleDescriptor.isSynPred)> - <headerReturnType(ruleDescriptor=r.ruleDescriptor,...)> p<name>-><r.ruleDescriptor.name>(p<name>)<endif>}; separator="\n * "> 591 * 592 * The return type for any particular rule is of course determined by the source 593 * grammar file. 594 */ 595// [The "BSD license"] 596// Copyright (c) 2005-2009 Jim Idle, Temporal Wave LLC 597// http://www.temporal-wave.com 598// http://www.linkedin.com/in/jimidle 599// 600// All rights reserved. 601// 602// Redistribution and use in source and binary forms, with or without 603// modification, are permitted provided that the following conditions 604// are met: 605// 1. Redistributions of source code must retain the above copyright 606// notice, this list of conditions and the following disclaimer. 607// 2. Redistributions in binary form must reproduce the above copyright 608// notice, this list of conditions and the following disclaimer in the 609// documentation and/or other materials provided with the distribution. 610// 3. The name of the author may not be used to endorse or promote products 611// derived from this software without specific prior written permission. 612// 613// THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 614// IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 615// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 616// IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 617// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 618// NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 619// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 620// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 621// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 622// THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 623 624#ifndef _<name>_H 625#define _<name>_H 626<actions.(actionScope).preincludes> 627/* ============================================================================= 628 * Standard antlr3 C runtime definitions 629 */ 630#include \<antlr3.h> 631 632/* End of standard antlr 3 runtime definitions 633 * ============================================================================= 634 */ 635<actions.(actionScope).includes> 636 637#ifdef __cplusplus 638extern "C" { 639#endif 640 641// Forward declare the context typedef so that we can use it before it is 642// properly defined. Delegators and delegates (from import statements) are 643// interdependent and their context structures contain pointers to each other 644// C only allows such things to be declared if you pre-declare the typedef. 645// 646typedef struct <name>_Ctx_struct <name>, * p<name>; 647 648<if(recognizer.grammar.delegates)> 649// Include delegate definition header files 650// 651<recognizer.grammar.delegates: {g|#include \<<g.recognizerName>.h>}; separator="\n"> 652 653<endif> 654 655 656<actions.(actionScope).header> 657 658#ifdef ANTLR3_WINDOWS 659// Disable: Unreferenced parameter, - Rules with parameters that are not used 660// constant conditional, - ANTLR realizes that a prediction is always true (synpred usually) 661// initialized but unused variable - tree rewrite variables declared but not needed 662// Unreferenced local variable - lexer rule declares but does not always use _type 663// potentially unitialized variable used - retval always returned from a rule 664// unreferenced local function has been removed - susually getTokenNames or freeScope, they can go without warnigns 665// 666// These are only really displayed at warning level /W4 but that is the code ideal I am aiming at 667// and the codegen must generate some of these warnings by necessity, apart from 4100, which is 668// usually generated when a parser rule is given a parameter that it does not use. Mostly though 669// this is a matter of orthogonality hence I disable that one. 670// 671#pragma warning( disable : 4100 ) 672#pragma warning( disable : 4101 ) 673#pragma warning( disable : 4127 ) 674#pragma warning( disable : 4189 ) 675#pragma warning( disable : 4505 ) 676#pragma warning( disable : 4701 ) 677#endif 678<if(backtracking)> 679 680/* ======================== 681 * BACKTRACKING IS ENABLED 682 * ======================== 683 */ 684<endif> 685 686<rules:{r |<headerReturnScope(ruleDescriptor=r.ruleDescriptor,...)>}> 687 688<scopes:{it | <if(it.isDynamicGlobalScope)><globalAttributeScopeDecl(it)><endif>}> 689<rules:{r |<ruleAttributeScopeDecl(scope=r.ruleDescriptor.ruleScope)>}> 690<if(recognizer.grammar.delegators)> 691// Include delegator definition header files 692// 693<recognizer.grammar.delegators: {g|#include \<<g.recognizerName>.h>}; separator="\n"> 694 695<endif> 696 697/** Context tracking structure for <mainName()> 698 */ 699struct <name>_Ctx_struct 700{ 701 /** Built in ANTLR3 context tracker contains all the generic elements 702 * required for context tracking. 703 */ 704<if(PARSER)> 705 pANTLR3_PARSER pParser; 706<endif> 707<if(LEXER)> 708 pANTLR3_LEXER pLexer; 709<endif> 710<if(TREE_PARSER)> 711 pANTLR3_TREE_PARSER pTreeParser; 712<endif> 713 714<if(recognizer.grammar.delegates)> 715 <recognizer.grammar.delegates: 716 {g|p<g.recognizerName> <g:delegateName()>;}; separator="\n"> 717<endif> 718<if(recognizer.grammar.delegators)> 719 <recognizer.grammar.delegators: 720 {g|p<g.recognizerName> <g:delegateName()>;}; separator="\n"> 721<endif> 722<scopes:{it | <if(it.isDynamicGlobalScope)> 723 <globalAttributeScopeDef(it)> 724<endif>}; separator="\n\n"> 725<rules: {r |<if(r.ruleDescriptor.ruleScope)> 726 <ruleAttributeScopeDef(scope=r.ruleDescriptor.ruleScope)> 727<endif>}> 728 729<if(LEXER)> 730 <rules:{r | <if(!r.ruleDescriptor.isSynPred)><headerReturnType(ruleDescriptor=r.ruleDescriptor)> (*m<r.ruleDescriptor.name>) (struct <name>_Ctx_struct * ctx<if(r.ruleDescriptor.parameterScope)>, <endif><r.ruleDescriptor.parameterScope:parameterScope()>);<endif>}; separator="\n"> 731<endif> 732<if(!LEXER)> 733 <rules:{r | <headerReturnType(ruleDescriptor=r.ruleDescriptor)> (*<r.ruleDescriptor.name>) (struct <name>_Ctx_struct * ctx<if(r.ruleDescriptor.parameterScope)>, <endif><r.ruleDescriptor.parameterScope:parameterScope()>);}; separator="\n"> 734<! generate rule/method definitions for imported rules so they 735 appear to be defined in this recognizer. !> 736 // Delegated rules 737<recognizer.grammar.delegatedRules:{ruleDescriptor| 738 <headerReturnType(ruleDescriptor)> (*<ruleDescriptor.name>)(struct <name>_Ctx_struct * ctx<if(ruleDescriptor.parameterScope)>, <endif><ruleDescriptor.parameterScope:parameterScope()>);}; separator="\n"> 739<endif> 740 741 const char * (*getGrammarFileName)(); 742 void (*reset) (struct <name>_Ctx_struct * ctx); 743 void (*free) (struct <name>_Ctx_struct * ctx); 744 <@members> 745 <@end> 746 <actions.(actionScope).context> 747}; 748 749// Function protoypes for the constructor functions that external translation units 750// such as delegators and delegates may wish to call. 751// 752ANTLR3_API p<name> <name>New (<inputType()> instream<recognizer.grammar.delegators:{g|, p<g.recognizerName> <g:delegateName()>}>); 753ANTLR3_API p<name> <name>NewSSD (<inputType()> instream, pANTLR3_RECOGNIZER_SHARED_STATE state<recognizer.grammar.delegators:{g|, p<g.recognizerName> <g:delegateName()>}>); 754<if(!recognizer.grammar.grammarIsRoot)> 755extern pANTLR3_UINT8 <recognizer.grammar.composite.rootGrammar.recognizerName>TokenNames[]; 756<endif> 757 758 759/** Symbolic definitions of all the tokens that the <grammarType()> will work with. 760 * \{ 761 * 762 * Antlr will define EOF, but we can't use that as it it is too common in 763 * in C header files and that would be confusing. There is no way to filter this out at the moment 764 * so we just undef it here for now. That isn't the value we get back from C recognizers 765 * anyway. We are looking for ANTLR3_TOKEN_EOF. 766 */ 767#ifdef EOF 768#undef EOF 769#endif 770#ifdef Tokens 771#undef Tokens 772#endif 773<tokens:{it | #define <it.name> <it.type>}; separator="\n"> 774#ifdef EOF 775#undef EOF 776#define EOF ANTLR3_TOKEN_EOF 777#endif 778 779#ifndef TOKENSOURCE 780#define TOKENSOURCE(lxr) lxr->pLexer->rec->state->tokSource 781#endif 782 783/* End of token definitions for <name> 784 * ============================================================================= 785 */ 786/** \} */ 787 788#ifdef __cplusplus 789} 790#endif 791 792#endif 793 794/* END - Note:Keep extra line feed to satisfy UNIX systems */ 795 796>> 797 798inputType() ::=<< 799<if(LEXER)> 800pANTLR3_INPUT_STREAM 801<endif> 802<if(PARSER)> 803pANTLR3_COMMON_TOKEN_STREAM 804<endif> 805<if(TREE_PARSER)> 806pANTLR3_COMMON_TREE_NODE_STREAM 807<endif> 808>> 809 810grammarType() ::= << 811<if(PARSER)> 812parser 813<endif> 814<if(LEXER)> 815lexer 816<endif> 817<if(TREE_PARSER)> 818tree parser 819<endif> 820>> 821 822mainName() ::= << 823<if(PARSER)> 824<name> 825<endif> 826<if(LEXER)> 827<name> 828<endif> 829<if(TREE_PARSER)> 830<name> 831<endif> 832>> 833 834headerReturnScope(ruleDescriptor) ::= "<returnScope(...)>" 835 836headerReturnType(ruleDescriptor) ::= << 837<if(LEXER)> 838<if(!ruleDescriptor.isSynPred)> 839 void 840<else> 841 <returnType()> 842<endif> 843<else> 844 <returnType()> 845<endif> 846>> 847 848// Produce the lexer output 849// 850lexer( grammar, 851 name, 852 tokens, 853 scopes, 854 rules, 855 numRules, 856 filterMode, 857 superClass, 858 labelType="pANTLR3_COMMON_TOKEN") ::= << 859 860<if(filterMode)> 861/* Forward declare implementation function for ANTLR3_TOKEN_SOURCE interface when 862 * this is a filter mode lexer. 863 */ 864static pANTLR3_COMMON_TOKEN <name>NextToken (pANTLR3_TOKEN_SOURCE toksource); 865 866/* Override the normal MEMOIZE and HAVEALREADYPARSED macros as this is a filtering 867 * lexer. In filter mode, the memoizing and backtracking are gated at BACKTRACKING > 1 rather 868 * than just BACKTRACKING. In some cases this might generate code akin to: 869 * if (BACKTRACKING) if (BACKTRACKING > 1) memoize. 870 * However, I assume that the C compilers/optimizers are smart enough to work this one out 871 * these days - Jim 872 */ 873#undef MEMOIZE 874#define MEMOIZE(ri,si) if (BACKTRACKING>1) { RECOGNIZER->memoize(RECOGNIZER, ri, si) } 875#undef HAVEPARSEDRULE 876#define HAVEPARSEDRULE(r) if (BACKTRACKING>1) { RECOGNIZER->alreadyParsedRule(RECOGNIZER, r) } 877<endif> 878 879/* Forward declare the locally static matching functions we have generated and any predicate functions. 880 */ 881<rules:{r | static ANTLR3_INLINE <headerReturnType(ruleDescriptor=r.ruleDescriptor)> <if(!r.ruleDescriptor.isSynPred)>m<endif><r.ruleDescriptor.name> (p<name> ctx<if(r.ruleDescriptor.parameterScope)>, <endif><r.ruleDescriptor.parameterScope:parameterScope()>);}; separator="\n"> 882static void <name>Free(p<name> ctx); 883 884/* ========================================================================= 885 * Lexer matching rules end. 886 * ========================================================================= 887 */ 888 889<scopes:{it |<if(it.isDynamicGlobalScope)><globalAttributeScope(it)><endif>}> 890 891<actions.lexer.members> 892 893static void 894<name>Free (p<name> ctx) 895{ 896<if(memoize)> 897 if (RULEMEMO != NULL) 898 { 899 RULEMEMO->free(RULEMEMO); 900 RULEMEMO = NULL; 901 } 902<endif> 903<if(grammar.directDelegates)> 904 // Free the lexers that we delegated to 905 // functions to. NULL the state so we only free it once. 906 // 907 <grammar.directDelegates: 908 {g|ctx-><g:delegateName()>->pLexer->rec->state = NULL; 909 ctx-><g:delegateName()>->free(ctx-><g:delegateName()>);}; separator="\n"> 910<endif> 911 LEXER->free(LEXER); 912 913 ANTLR3_FREE(ctx); 914} 915 916static void 917<name>Reset (p<name> ctx) 918{ 919 RECOGNIZER->reset(RECOGNIZER); 920} 921 922/** \brief Name of the grammar file that generated this code 923 */ 924static const char fileName[] = "<fileName>"; 925 926/** \brief Return the name of the grammar file that generated this code. 927 */ 928static const char * getGrammarFileName() 929{ 930 return fileName; 931} 932 933<if(filterMode)> 934 <filteringNextToken()> 935<endif> 936 937/** \brief Create a new lexer called <name> 938 * 939 * \param[in] instream Pointer to an initialized input stream 940 * \return 941 * - Success p<name> initialized for the lex start 942 * - Fail NULL 943 */ 944ANTLR3_API p<name> <name>New 945(<inputType()> instream<grammar.delegators:{g|, p<g.recognizerName> <g:delegateName()>}>) 946{ 947 // See if we can create a new lexer with the standard constructor 948 // 949 return <name>NewSSD(instream, NULL<grammar.delegators:{g|, <g:delegateName()>}>); 950} 951 952/** \brief Create a new lexer called <name> 953 * 954 * \param[in] instream Pointer to an initialized input stream 955 * \param[state] state Previously created shared recognizer stat 956 * \return 957 * - Success p<name> initialized for the lex start 958 * - Fail NULL 959 */ 960ANTLR3_API p<name> <name>NewSSD 961(pANTLR3_INPUT_STREAM instream, pANTLR3_RECOGNIZER_SHARED_STATE state<grammar.delegators:{g|, p<g.recognizerName> <g:delegateName()>}>) 962{ 963 p<name> ctx; // Context structure we will build and return 964 965 ctx = (p<name>) ANTLR3_CALLOC(1, sizeof(<name>)); 966 967 if (ctx == NULL) 968 { 969 // Failed to allocate memory for lexer context 970 return NULL; 971 } 972 973 /* ------------------------------------------------------------------- 974 * Memory for basic structure is allocated, now to fill in 975 * in base ANTLR3 structures. We initialize the function pointers 976 * for the standard ANTLR3 lexer function set, but upon return 977 * from here, the programmer may set the pointers to provide custom 978 * implementations of each function. 979 * 980 * We don't use the macros defined in <name>.h here so you can get a sense 981 * of what goes where. 982 */ 983 984 /* Create a base lexer, using the supplied input stream 985 */ 986 ctx->pLexer = antlr3LexerNewStream(ANTLR3_SIZE_HINT, instream, state); 987 988 /* Check that we allocated the memory correctly 989 */ 990 if (ctx->pLexer == NULL) 991 { 992 ANTLR3_FREE(ctx); 993 return NULL; 994 } 995<if(memoize)> 996<if(grammar.grammarIsRoot)> 997 // Create a LIST for recording rule memos. 998 // 999 ctx->pLexer->rec->ruleMemo = antlr3IntTrieNew(15); /* 16 bit depth is enough for 32768 rules! */ 1000<endif> 1001<endif> 1002 1003 /* Install the implementation of our <name> interface 1004 */ 1005 <rules:{r | <if(!r.ruleDescriptor.isSynPred)>ctx->m<r.ruleDescriptor.name> = m<r.ruleDescriptor.name>;<endif>}; separator="\n"> 1006 1007 /** When the nextToken() call is made to this lexer's pANTLR3_TOKEN_SOURCE 1008 * it will call mTokens() in this generated code, and will pass it the ctx 1009 * pointer of this lexer, not the context of the base lexer, so store that now. 1010 */ 1011 ctx->pLexer->ctx = ctx; 1012 1013 /**Install the token matching function 1014 */ 1015 ctx->pLexer->mTokens = (void (*) (void *))(mTokens); 1016 1017 ctx->getGrammarFileName = getGrammarFileName; 1018 ctx->free = <name>Free; 1019 ctx->reset = <name>Reset; 1020 1021<if(grammar.directDelegates)> 1022 // Initialize the lexers that we are going to delegate some 1023 // functions to. 1024 // 1025 <grammar.directDelegates: 1026 {g|ctx-><g:delegateName()> = <g.recognizerName>NewSSD(instream, ctx->pLexer->rec->state, ctx<grammar.delegators:{g|, <g:delegateName()>}>);}; separator="\n"> 1027<endif> 1028<if(grammar.delegators)> 1029 // Install the pointers back to lexers that will delegate us to perform certain functions 1030 // for them. 1031 // 1032 <grammar.delegators: 1033 {g|ctx-><g:delegateName()> = <g:delegateName()>;}; separator="\n"> 1034<endif> 1035<if(filterMode)> 1036 /* We have filter mode turned on, so install the filtering nextToken function 1037 */ 1038 ctx->pLexer->rec->state->tokSource->nextToken = <name>NextToken; 1039<endif> 1040 <actions.lexer.apifuncs> 1041 1042 /* Return the newly built lexer to the caller 1043 */ 1044 return ctx; 1045} 1046<if(cyclicDFAs)> 1047 1048/* ========================================================================= 1049 * DFA tables for the lexer 1050 */ 1051<cyclicDFAs:cyclicDFA()> <! dump tables for all DFA !> 1052/* ========================================================================= 1053 * End of DFA tables for the lexer 1054 */ 1055<endif> 1056 1057/* ========================================================================= 1058 * Functions to match the lexer grammar defined tokens from the input stream 1059 */ 1060 1061<rules; separator="\n\n"> 1062 1063/* ========================================================================= 1064 * Lexer matching rules end. 1065 * ========================================================================= 1066 */ 1067<if(synpreds)> 1068 1069/* ========================================================================= 1070 * Lexer syntactic predicates 1071 */ 1072<synpreds:{p | <lexerSynpred(predname=p)>}> 1073/* ========================================================================= 1074 * Lexer syntactic predicates end. 1075 * ========================================================================= 1076 */ 1077<endif> 1078 1079/* End of Lexer code 1080 * ================================================ 1081 * ================================================ 1082 */ 1083 1084>> 1085 1086 1087filteringNextToken() ::= << 1088/** An override of the lexer's nextToken() method that backtracks over mTokens() looking 1089 * for matches in lexer filterMode. No error can be generated upon error; just rewind, consume 1090 * a token and then try again. BACKTRACKING needs to be set as well. 1091 * Make rule memoization happen only at levels above 1 as we start mTokens 1092 * at BACKTRACKING==1. 1093 */ 1094static pANTLR3_COMMON_TOKEN 1095<name>NextToken(pANTLR3_TOKEN_SOURCE toksource) 1096{ 1097 pANTLR3_LEXER lexer; 1098 pANTLR3_RECOGNIZER_SHARED_STATE state; 1099 1100 lexer = (pANTLR3_LEXER)(toksource->super); 1101 state = lexer->rec->state; 1102 1103 /* Get rid of any previous token (token factory takes care of 1104 * any deallocation when this token is finally used up. 1105 */ 1106 state ->token = NULL; 1107 state ->error = ANTLR3_FALSE; /* Start out without an exception */ 1108 state ->failed = ANTLR3_FALSE; 1109 1110 /* Record the start of the token in our input stream. 1111 */ 1112 state->tokenStartCharIndex = lexer->input->istream->index(lexer->input->istream); 1113 state->tokenStartCharPositionInLine = lexer->input->getCharPositionInLine(lexer->input); 1114 state->tokenStartLine = lexer->input->getLine(lexer->input); 1115 state->text = NULL; 1116 1117 /* Now call the matching rules and see if we can generate a new token 1118 */ 1119 for (;;) 1120 { 1121 if (lexer->input->istream->_LA(lexer->input->istream, 1) == ANTLR3_CHARSTREAM_EOF) 1122 { 1123 /* Reached the end of the stream, nothing more to do. 1124 */ 1125 pANTLR3_COMMON_TOKEN teof = &(toksource->eofToken); 1126 1127 teof->setStartIndex (teof, lexer->getCharIndex(lexer)); 1128 teof->setStopIndex (teof, lexer->getCharIndex(lexer)); 1129 teof->setLine (teof, lexer->getLine(lexer)); 1130 return teof; 1131 } 1132 1133 state->token = NULL; 1134 state->error = ANTLR3_FALSE; /* Start out without an exception */ 1135 1136 { 1137 ANTLR3_MARKER m; 1138 1139 m = lexer->input->istream->mark(lexer->input->istream); 1140 state->backtracking = 1; /* No exceptions */ 1141 state->failed = ANTLR3_FALSE; 1142 1143 /* Call the generated lexer, see if it can get a new token together. 1144 */ 1145 lexer->mTokens(lexer->ctx); 1146 state->backtracking = 0; 1147 1148 <! mTokens backtracks with synpred at BACKTRACKING==2 1149 and we set the synpredgate to allow actions at level 1. !> 1150 1151 if (state->failed == ANTLR3_TRUE) 1152 { 1153 lexer->input->istream->rewind(lexer->input->istream, m); 1154 lexer->input->istream->consume(lexer->input->istream); <! advance one char and try again !> 1155 } 1156 else 1157 { 1158 lexer->emit(lexer); /* Assemble the token and emit it to the stream */ 1159 return state->token; 1160 } 1161 } 1162 } 1163} 1164>> 1165 1166actionGate() ::= "BACKTRACKING==0" 1167 1168filteringActionGate() ::= "BACKTRACKING==1" 1169 1170/** How to generate a parser */ 1171genericParser( grammar, 1172 name, 1173 scopes, 1174 tokens, 1175 tokenNames, 1176 rules, 1177 numRules, 1178 bitsets, 1179 inputStreamType, 1180 superClass, 1181 labelType, 1182 members, 1183 rewriteElementType, filterMode, 1184 ASTLabelType="pANTLR3_BASE_TREE" 1185 ) ::= << 1186 1187 1188<if(grammar.grammarIsRoot)> 1189/** \brief Table of all token names in symbolic order, mainly used for 1190 * error reporting. 1191 */ 1192pANTLR3_UINT8 <name>TokenNames[<length(tokenNames)>+4] 1193 = { 1194 (pANTLR3_UINT8) "\<invalid>", /* String to print to indicate an invalid token */ 1195 (pANTLR3_UINT8) "\<EOR>", 1196 (pANTLR3_UINT8) "\<DOWN>", 1197 (pANTLR3_UINT8) "\<UP>", 1198 <tokenNames:{it |(pANTLR3_UINT8) <it>}; separator=",\n"> 1199 }; 1200<endif> 1201 1202 <@members> 1203 1204 <@end> 1205<rules:{r |<ruleAttributeScopeFuncMacro(scope=r.ruleDescriptor.ruleScope)>}> 1206<scopes:{it |<if(it.isDynamicGlobalScope)><globalAttributeScopeFuncMacro(it)><endif>}> 1207 1208// Forward declare the locally static matching functions we have generated. 1209// 1210<rules:{r | static <headerReturnType(ruleDescriptor=r.ruleDescriptor)> <r.ruleDescriptor.name> (p<name> ctx<if(r.ruleDescriptor.parameterScope)>, <endif><r.ruleDescriptor.parameterScope:parameterScope()>);}; separator="\n"> 1211static void <name>Free(p<name> ctx); 1212static void <name>Reset (p<name> ctx); 1213 1214<if(!LEXER)> 1215<! generate rule/method definitions for imported rules so they 1216 appear to be defined in this recognizer. !> 1217<if(recognizer.grammar.delegatedRules)> 1218// Delegated rules 1219// 1220<recognizer.grammar.delegatedRules:{ruleDescriptor|static <headerReturnType(ruleDescriptor)> <ruleDescriptor.name>(p<name> ctx<if(ruleDescriptor.parameterScope)>, <endif><ruleDescriptor.parameterScope:parameterScope()>);}; separator="\n"> 1221 1222<endif> 1223<endif> 1224 1225/* For use in tree output where we are accumulating rule labels via label += ruleRef 1226 * we need a function that knows how to free a return scope when the list is destroyed. 1227 * We cannot just use ANTLR3_FREE because in debug tracking mode, this is a macro. 1228 */ 1229static void ANTLR3_CDECL freeScope(void * scope) 1230{ 1231 ANTLR3_FREE(scope); 1232} 1233 1234/** \brief Name of the grammar file that generated this code 1235 */ 1236static const char fileName[] = "<fileName>"; 1237 1238/** \brief Return the name of the grammar file that generated this code. 1239 */ 1240static const char * getGrammarFileName() 1241{ 1242 return fileName; 1243} 1244/** \brief Create a new <name> parser and return a context for it. 1245 * 1246 * \param[in] instream Pointer to an input stream interface. 1247 * 1248 * \return Pointer to new parser context upon success. 1249 */ 1250ANTLR3_API p<name> 1251<name>New (<inputStreamType> instream<grammar.delegators:{g|, p<g.recognizerName> <g:delegateName()>}>) 1252{ 1253 // See if we can create a new parser with the standard constructor 1254 // 1255 return <name>NewSSD(instream, NULL<grammar.delegators:{g|, <g:delegateName()>}>); 1256} 1257 1258/** \brief Create a new <name> parser and return a context for it. 1259 * 1260 * \param[in] instream Pointer to an input stream interface. 1261 * 1262 * \return Pointer to new parser context upon success. 1263 */ 1264ANTLR3_API p<name> 1265<name>NewSSD (<inputStreamType> instream, pANTLR3_RECOGNIZER_SHARED_STATE state<grammar.delegators:{g|, p<g.recognizerName> <g:delegateName()>}>) 1266{ 1267 p<name> ctx; /* Context structure we will build and return */ 1268 1269 ctx = (p<name>) ANTLR3_CALLOC(1, sizeof(<name>)); 1270 1271 if (ctx == NULL) 1272 { 1273 // Failed to allocate memory for parser context 1274 // 1275 return NULL; 1276 } 1277 1278 /* ------------------------------------------------------------------- 1279 * Memory for basic structure is allocated, now to fill in 1280 * the base ANTLR3 structures. We initialize the function pointers 1281 * for the standard ANTLR3 parser function set, but upon return 1282 * from here, the programmer may set the pointers to provide custom 1283 * implementations of each function. 1284 * 1285 * We don't use the macros defined in <name>.h here, in order that you can get a sense 1286 * of what goes where. 1287 */ 1288 1289<if(PARSER)> 1290 /* Create a base parser/recognizer, using the supplied token stream 1291 */ 1292 ctx->pParser = antlr3ParserNewStream(ANTLR3_SIZE_HINT, instream->tstream, state); 1293<endif> 1294<if(TREE_PARSER)> 1295 /* Create a base Tree parser/recognizer, using the supplied tree node stream 1296 */ 1297 ctx->pTreeParser = antlr3TreeParserNewStream(ANTLR3_SIZE_HINT, instream, state); 1298<endif> 1299 1300 /* Install the implementation of our <name> interface 1301 */ 1302 <rules:{r | ctx-><r.ruleDescriptor.name> = <r.ruleDescriptor.name>;}; separator="\n"> 1303<if(grammar.delegatedRules)> 1304 // Install the delegated methods so that they appear to be a part of this 1305 // parser 1306 // 1307 <grammar.delegatedRules:{ruleDescriptor | ctx-><ruleDescriptor.name> = <ruleDescriptor.name>;}; separator="\n"> 1308<endif> 1309 1310 ctx->free = <name>Free; 1311 ctx->reset = <name>Reset; 1312 ctx->getGrammarFileName = getGrammarFileName; 1313 1314 /* Install the scope pushing methods. 1315 */ 1316 <rules: {r |<if(r.ruleDescriptor.ruleScope)> 1317<ruleAttributeScope(scope=r.ruleDescriptor.ruleScope)><\n> 1318<endif>}> 1319 <recognizer.scopes:{it |<if(it.isDynamicGlobalScope)> 1320<globalAttributeScope(it)><\n> 1321<endif>}> 1322 <@apifuncs> 1323 1324 <@end> 1325<if(grammar.directDelegates)> 1326 // Initialize the parsers that we are going to delegate some 1327 // functions to. 1328 // 1329 <grammar.directDelegates: 1330 {g|ctx-><g:delegateName()> = <g.recognizerName>NewSSD(instream, PSRSTATE, ctx<grammar.delegators:{g|, <g:delegateName()>}>);}; separator="\n"> 1331<endif> 1332<if(grammar.delegators)> 1333 // Install the pointers back to parsers that will delegate us to perform certain functions 1334 // for them. 1335 // 1336 <grammar.delegators: 1337 {g|ctx-><g:delegateName()> = <g:delegateName()>;}; separator="\n"> 1338<endif> 1339 <actions.parser.apifuncs> 1340 <actions.treeparser.apifuncs> 1341<if(memoize)> 1342<if(grammar.grammarIsRoot)> 1343 /* Create a LIST for recording rule memos. 1344 */ 1345 RULEMEMO = antlr3IntTrieNew(15); /* 16 bit depth is enough for 32768 rules! */<\n> 1346<endif> 1347<endif> 1348 /* Install the token table 1349 */ 1350 PSRSTATE->tokenNames = <grammar.composite.rootGrammar.recognizerName>TokenNames; 1351 1352 <@debugStuff()> 1353 1354 /* Return the newly built parser to the caller 1355 */ 1356 return ctx; 1357} 1358 1359static void 1360<name>Reset (p<name> ctx) 1361{ 1362 RECOGNIZER->reset(RECOGNIZER); 1363} 1364 1365/** Free the parser resources 1366 */ 1367 static void 1368 <name>Free(p<name> ctx) 1369 { 1370 /* Free any scope memory 1371 */ 1372 <rules: {r |<if(r.ruleDescriptor.ruleScope)><ruleAttributeScopeFree(scope=r.ruleDescriptor.ruleScope)><\n><endif>}> 1373 <recognizer.scopes:{it |<if(it.isDynamicGlobalScope)><globalAttributeScopeFree(it)><\n><endif>}> 1374 1375 <@cleanup> 1376 <@end> 1377<if(grammar.directDelegates)> 1378 // Free the parsers that we delegated to 1379 // functions to.NULL the state so we only free it once. 1380 // 1381 <grammar.directDelegates: 1382 {g| ctx-><g:delegateName()>-><if(TREE_PARSER)>pTreeParser<else>pParser<endif>->rec->state = NULL; 1383 ctx-><g:delegateName()>->free(ctx-><g:delegateName()>);}; separator="\n"> 1384<endif> 1385<if(memoize)> 1386<if(grammar.grammarIsRoot)> 1387 if (RULEMEMO != NULL) 1388 { 1389 RULEMEMO->free(RULEMEMO); 1390 RULEMEMO = NULL; 1391 } 1392<endif> 1393<endif> 1394 // Free this parser 1395 // 1396<if(TREE_PARSER)> 1397 ctx->pTreeParser->free(ctx->pTreeParser);<\n> 1398<else> 1399 ctx->pParser->free(ctx->pParser);<\n> 1400<endif> 1401 1402 ANTLR3_FREE(ctx); 1403 1404 /* Everything is released, so we can return 1405 */ 1406 return; 1407 } 1408 1409/** Return token names used by this <grammarType()> 1410 * 1411 * The returned pointer is used as an index into the token names table (using the token 1412 * number as the index). 1413 * 1414 * \return Pointer to first char * in the table. 1415 */ 1416static pANTLR3_UINT8 *getTokenNames() 1417{ 1418 return <grammar.composite.rootGrammar.recognizerName>TokenNames; 1419} 1420 1421 <members> 1422 1423/* Declare the bitsets 1424 */ 1425<bitsets:{it | <bitsetDeclare(name={FOLLOW_<it.name>_in_<it.inName><it.tokenIndex>}, 1426 words64=it.bits)>}> 1427 1428 1429<if(cyclicDFAs)> 1430 1431/* ========================================================================= 1432 * DFA tables for the parser 1433 */ 1434<cyclicDFAs:cyclicDFA()> <! dump tables for all DFA !> 1435/* ========================================================================= 1436 * End of DFA tables for the parser 1437 */ 1438<endif> 1439 1440/* ============================================== 1441 * Parsing rules 1442 */ 1443<rules; separator="\n\n"> 1444<if(grammar.delegatedRules)> 1445 // Delegated methods that appear to be a part of this 1446 // parser 1447 // 1448<grammar.delegatedRules:{ruleDescriptor| 1449 <returnType()> <ruleDescriptor.name>(p<name> ctx<if(ruleDescriptor.parameterScope.attributes)>, <endif><ruleDescriptor.parameterScope:parameterScope()>) 1450 { 1451 <if(ruleDescriptor.hasReturnValue)>return <endif> ctx-><ruleDescriptor.grammar:delegateName()>-><ruleDescriptor.name>(ctx-><ruleDescriptor.grammar:delegateName()><if(ruleDescriptor.parameterScope.attributes)>, <endif><ruleDescriptor.parameterScope.attributes:{a|<a.name>}; separator=", ">); 1452 \}}; separator="\n"> 1453 1454<endif> 1455/* End of parsing rules 1456 * ============================================== 1457 */ 1458 1459/* ============================================== 1460 * Syntactic predicates 1461 */ 1462<synpreds:{p | <synpred(predname=p)>}> 1463/* End of syntactic predicates 1464 * ============================================== 1465 */ 1466 1467 1468 1469 1470 1471>> 1472 1473parser( grammar, 1474 name, 1475 scopes, 1476 tokens, 1477 tokenNames, 1478 rules, 1479 numRules, 1480 bitsets, 1481 ASTLabelType, 1482 superClass="Parser", 1483 labelType="pANTLR3_COMMON_TOKEN", 1484 members={<actions.parser.members>} 1485 ) ::= << 1486<genericParser(inputStreamType="pANTLR3_COMMON_TOKEN_STREAM", rewriteElementType="TOKEN", ...)> 1487>> 1488 1489/** How to generate a tree parser; same as parser except the input 1490 * stream is a different type. 1491 */ 1492treeParser( grammar, 1493 name, 1494 scopes, 1495 tokens, 1496 tokenNames, 1497 globalAction, 1498 rules, 1499 numRules, 1500 bitsets, 1501 filterMode, 1502 labelType={<ASTLabelType>}, 1503 ASTLabelType="pANTLR3_BASE_TREE", 1504 superClass="TreeParser", 1505 members={<actions.treeparser.members>} 1506 ) ::= << 1507<genericParser(inputStreamType="pANTLR3_COMMON_TREE_NODE_STREAM", rewriteElementType="NODE", ...)> 1508>> 1509 1510/** A simpler version of a rule template that is specific to the imaginary 1511 * rules created for syntactic predicates. As they never have return values 1512 * nor parameters etc..., just give simplest possible method. Don't do 1513 * any of the normal memoization stuff in here either; it's a waste. 1514 * As predicates cannot be inlined into the invoking rule, they need to 1515 * be in a rule by themselves. 1516 */ 1517synpredRule(ruleName, ruleDescriptor, block, description, nakedBlock) ::= 1518<< 1519// $ANTLR start <ruleName> 1520static void <ruleName>_fragment(p<name> ctx <ruleDescriptor.parameterScope:parameterScope()>) 1521{ 1522 <ruleLabelDefs()> 1523 <ruleLabelInitializations()> 1524<if(trace)> 1525 ANTLR3_PRINTF("enter <ruleName> %d failed = %d, backtracking = %d\\n",LT(1),failed,BACKTRACKING); 1526 <block> 1527 ANTLR3_PRINTF("exit <ruleName> %d, failed = %d, backtracking = %d\\n",LT(1),failed,BACKTRACKING); 1528 1529<else> 1530 <block> 1531<endif> 1532<ruleCleanUp()> 1533} 1534// $ANTLR end <ruleName> 1535>> 1536 1537synpred(predname) ::= << 1538static ANTLR3_BOOLEAN <predname>(p<name> ctx) 1539{ 1540 ANTLR3_MARKER start; 1541 ANTLR3_BOOLEAN success; 1542 1543 BACKTRACKING++; 1544 <@start()> 1545 start = MARK(); 1546 <predname>_fragment(ctx); // can never throw exception 1547 success = !(FAILEDFLAG); 1548 REWIND(start); 1549 <@stop()> 1550 BACKTRACKING--; 1551 FAILEDFLAG = ANTLR3_FALSE; 1552 return success; 1553}<\n> 1554>> 1555 1556lexerSynpred(predname) ::= << 1557<synpred(predname)> 1558>> 1559 1560ruleMemoization(rname) ::= << 1561<if(memoize)> 1562if ( (BACKTRACKING>0) && (HAVEPARSEDRULE(<ruleDescriptor.index>)) ) 1563{ 1564<if(ruleDescriptor.hasMultipleReturnValues)> 1565<if(!ruleDescriptor.isSynPred)> 1566 retval.start = 0;<\n> 1567<endif> 1568<endif> 1569 <(ruleDescriptor.actions.after):execAfter()> 1570 <finalCode(finalBlock=finally)> 1571<if(!ruleDescriptor.isSynPred)> 1572 <scopeClean()><\n> 1573<endif> 1574 return <ruleReturnValue()>; 1575} 1576<endif> 1577>> 1578 1579/** How to test for failure and return from rule */ 1580checkRuleBacktrackFailure() ::= << 1581if (HASEXCEPTION()) 1582{ 1583 goto rule<ruleDescriptor.name>Ex; 1584} 1585<if(backtracking)> 1586if (HASFAILED()) 1587{ 1588 <scopeClean()> 1589 <@debugClean()> 1590 return <ruleReturnValue()>; 1591} 1592<endif> 1593>> 1594 1595/** This rule has failed, exit indicating failure during backtrack */ 1596ruleBacktrackFailure() ::= << 1597<if(backtracking)> 1598if (BACKTRACKING>0) 1599{ 1600 FAILEDFLAG = <true_value()>; 1601 <scopeClean()> 1602 return <ruleReturnValue()>; 1603} 1604<endif> 1605>> 1606 1607/** How to generate code for a rule. This includes any return type 1608 * data aggregates required for multiple return values. 1609 */ 1610rule(ruleName,ruleDescriptor,block,emptyRule,description,exceptions,finally,memoize) ::= << 1611/** 1612 * $ANTLR start <ruleName> 1613 * <fileName>:<description> 1614 */ 1615static <returnType()> 1616<ruleName>(p<name> ctx<if(ruleDescriptor.parameterScope)>, <endif><ruleDescriptor.parameterScope:parameterScope()>) 1617{ 1618 <if(trace)>ANTLR3_PRINTF("enter <ruleName> %s failed=%d, backtracking=%d\n", LT(1), BACKTRACKING);<endif> 1619 <ruleDeclarations()> 1620 <ruleDescriptor.actions.declarations> 1621 <ruleLabelDefs()> 1622 <ruleInitializations()> 1623 <ruleDescriptor.actions.init> 1624 <ruleMemoization(rname=ruleName)> 1625 <ruleLabelInitializations()> 1626 <@preamble()> 1627 { 1628 <block> 1629 } 1630 1631 <ruleCleanUp()> 1632<if(exceptions)> 1633 if (HASEXCEPTION()) 1634 { 1635 <exceptions:{e|<catch(decl=e.decl,action=e.action)><\n>}> 1636 } 1637 else 1638 { 1639 <(ruleDescriptor.actions.after):execAfter()> 1640 } 1641<else> 1642 <if(!emptyRule)> 1643 <if(actions.(actionScope).rulecatch)> 1644 <actions.(actionScope).rulecatch> 1645 <else> 1646 if (HASEXCEPTION()) 1647 { 1648 PREPORTERROR(); 1649 PRECOVER(); 1650 <@setErrorReturnValue()> 1651 } 1652 <if(ruleDescriptor.actions.after)> 1653 else 1654 { 1655 <(ruleDescriptor.actions.after):execAfter()> 1656 }<\n> 1657 <endif> 1658 <endif> 1659 <endif> 1660<endif> 1661 1662 <if(trace)>ANTLR3_PRINTF("exit <ruleName> %d failed=%s backtracking=%s\n", LT(1), failed, BACKTRACKING);<endif> 1663 <memoize()> 1664<if(finally)> 1665 <finalCode(finalBlock=finally)> 1666<endif> 1667 <scopeClean()> 1668 <@postamble()> 1669 return <ruleReturnValue()>; 1670} 1671/* $ANTLR end <ruleName> */ 1672>> 1673 1674finalCode(finalBlock) ::= << 1675{ 1676 <finalBlock> 1677} 1678 1679>> 1680 1681catch(decl,action) ::= << 1682/* catch(decl,action) 1683 */ 1684{ 1685 <e.action> 1686} 1687 1688>> 1689 1690ruleDeclarations() ::= << 1691<if(ruleDescriptor.hasMultipleReturnValues)> 1692<returnType()> retval;<\n> 1693<else> 1694<ruleDescriptor.returnScope.attributes:{ a | 1695<a.type> <a.name>; 1696}> 1697<endif> 1698<if(memoize)> 1699ANTLR3_UINT32 <ruleDescriptor.name>_StartIndex; 1700<endif> 1701>> 1702 1703ruleInitializations() ::= << 1704/* Initialize rule variables 1705 */ 1706<if(ruleDescriptor.hasMultipleReturnValues)> 1707<ruleDescriptor.returnScope.attributes:{ a | 1708<if(a.initValue)>retval.<a.name> = <a.initValue>;<endif> 1709}> 1710<else> 1711<ruleDescriptor.returnScope.attributes:{ a | 1712<if(a.initValue)><a.name> = <a.initValue>;<endif> 1713}> 1714<endif> 1715<if(memoize)> 1716<ruleDescriptor.name>_StartIndex = INDEX();<\n> 1717<endif> 1718<ruleDescriptor.useScopes:{it |<scopeTop(it)> = <scopePush(it)>;}; separator="\n"> 1719<ruleDescriptor.ruleScope:{it |<scopeTop(it.name)> = <scopePush(it.name)>;}; separator="\n"> 1720>> 1721 1722ruleLabelDefs() ::= << 1723<[ruleDescriptor.tokenLabels,ruleDescriptor.tokenListLabels] 1724 :{it |<labelType> <it.label.text>;}; separator="\n" 1725> 1726<[ruleDescriptor.tokenListLabels,ruleDescriptor.ruleListLabels] 1727 :{it |pANTLR3_VECTOR list_<it.label.text>;}; separator="\n" 1728> 1729<[ruleDescriptor.ruleLabels,ruleDescriptor.ruleListLabels] 1730 :ruleLabelDef(); separator="\n" 1731> 1732>> 1733 1734ruleLabelInitializations() ::= << 1735<[ruleDescriptor.tokenLabels,ruleDescriptor.tokenListLabels] 1736 :{it |<it.label.text> = NULL;}; separator="\n" 1737> 1738<[ruleDescriptor.tokenListLabels,ruleDescriptor.ruleListLabels] 1739 :{it |list_<it.label.text> = NULL;}; separator="\n" 1740> 1741<[ruleDescriptor.ruleLabels,ruleDescriptor.ruleListLabels] 1742 :ruleLabelInitVal(); separator="\n" 1743> 1744<if(ruleDescriptor.hasMultipleReturnValues)> 1745<if(!ruleDescriptor.isSynPred)> 1746retval.start = LT(1); retval.stop = retval.start;<\n> 1747<endif> 1748<endif> 1749>> 1750 1751lexerRuleLabelDefs() ::= << 1752<[ruleDescriptor.tokenLabels, 1753 ruleDescriptor.tokenListLabels, 1754 ruleDescriptor.ruleLabels] 1755 :{it |<labelType> <it.label.text>;}; separator="\n" 1756> 1757<ruleDescriptor.charLabels:{it |ANTLR3_UINT32 <it.label.text>;}; separator="\n"> 1758<[ruleDescriptor.tokenListLabels, 1759 ruleDescriptor.ruleListLabels, 1760 ruleDescriptor.ruleListLabels] 1761 :{it |pANTLR3_INT_TRIE list_<it.label.text>;}; separator="\n" 1762> 1763>> 1764 1765lexerRuleLabelInit() ::= << 1766<[ruleDescriptor.tokenLabels, 1767 ruleDescriptor.tokenListLabels, 1768 ruleDescriptor.ruleLabels] 1769 :{it |<it.label.text> = NULL;}; separator="\n" 1770> 1771<[ruleDescriptor.tokenListLabels, 1772 ruleDescriptor.ruleListLabels, 1773 ruleDescriptor.ruleListLabels] 1774 :{it |list_<it.label.text> = antlr3IntTrieNew(31);}; separator="\n" 1775> 1776>> 1777 1778lexerRuleLabelFree() ::= << 1779<[ruleDescriptor.tokenLabels, 1780 ruleDescriptor.tokenListLabels, 1781 ruleDescriptor.ruleLabels] 1782 :{it |<it.label.text> = NULL;}; separator="\n" 1783> 1784<[ruleDescriptor.tokenListLabels, 1785 ruleDescriptor.ruleListLabels, 1786 ruleDescriptor.ruleListLabels] 1787 :{it |list_<it.label.text>->free(list_<it.label.text>);}; separator="\n" 1788> 1789>> 1790 1791ruleReturnValue() ::= <% 1792<if(!ruleDescriptor.isSynPred)> 1793<if(ruleDescriptor.hasReturnValue)> 1794<if(ruleDescriptor.hasSingleReturnValue)> 1795<ruleDescriptor.singleValueReturnName> 1796<else> 1797retval 1798<endif> 1799<endif> 1800<endif> 1801%> 1802 1803memoize() ::= << 1804<if(memoize)> 1805<if(backtracking)> 1806if ( BACKTRACKING>0 ) { MEMOIZE(<ruleDescriptor.index>, <ruleDescriptor.name>_StartIndex); } 1807<endif> 1808<endif> 1809>> 1810 1811ruleCleanUp() ::= << 1812 1813// This is where rules clean up and exit 1814// 1815goto rule<ruleDescriptor.name>Ex; /* Prevent compiler warnings */ 1816rule<ruleDescriptor.name>Ex: ; 1817<if(ruleDescriptor.hasMultipleReturnValues)> 1818<if(!TREE_PARSER)> 1819<if(!ruleDescriptor.isSynPred)> 1820retval.stop = LT(-1);<\n> 1821<endif> 1822<endif> 1823<endif> 1824>> 1825 1826scopeClean() ::= << 1827<ruleDescriptor.useScopes:{it |<scopePop(it)>}; separator="\n"> 1828<ruleDescriptor.ruleScope:{it |<scopePop(it.name)>}; separator="\n"> 1829 1830>> 1831/** How to generate a rule in the lexer; naked blocks are used for 1832 * fragment rules, which do not produce tokens. 1833 */ 1834lexerRule(ruleName,nakedBlock,ruleDescriptor,block,memoize) ::= << 1835// Comes from: <block.description> 1836/** \brief Lexer rule generated by ANTLR3 1837 * 1838 * $ANTLR start <ruleName> 1839 * 1840 * Looks to match the characters the constitute the token <ruleName> 1841 * from the attached input stream. 1842 * 1843 * 1844 * \remark 1845 * - lexer->error == ANTLR3_TRUE if an exception was thrown. 1846 */ 1847static ANTLR3_INLINE 1848void m<ruleName>(p<name> ctx<if(ruleDescriptor.parameterScope)>, <endif><ruleDescriptor.parameterScope:parameterScope()>) 1849{ 1850 ANTLR3_UINT32 _type; 1851 <ruleDeclarations()> 1852 <ruleDescriptor.actions.declarations> 1853 <lexerRuleLabelDefs()> 1854 <if(trace)>System.out.println("enter <ruleName> '"+(char)LA(1)+"' line="+GETLINE()+":"+GETCHARPOSITIONINLINE()+" failed="+failed+" backtracking="+BACKTRACKING);<endif> 1855 1856<if(nakedBlock)> 1857 <ruleMemoization(rname=ruleName)> 1858 <lexerRuleLabelInit()> 1859 <ruleDescriptor.actions.init> 1860 1861 <block><\n> 1862<else> 1863 <ruleMemoization(rname=ruleName)> 1864 <lexerRuleLabelInit()> 1865 _type = <ruleName>; 1866 1867 <ruleDescriptor.actions.init> 1868 1869 <block> 1870 LEXSTATE->type = _type; 1871<endif> 1872 <if(trace)> ANTLR3_FPRINTF(stderr, "exit <ruleName> '%c' line=%d:%d failed = %d, backtracking =%d\n",LA(1),GETLINE(),GETCHARPOSITIONINLINE(),failed,BACKTRACKING);<endif> 1873 <ruleCleanUp()> 1874 <lexerRuleLabelFree()> 1875 <(ruleDescriptor.actions.after):execAfter()> 1876 <memoize> 1877} 1878// $ANTLR end <ruleName> 1879>> 1880 1881/** How to generate code for the implicitly-defined lexer grammar rule 1882 * that chooses between lexer rules. 1883 */ 1884tokensRule(ruleName,nakedBlock,args,block,ruleDescriptor) ::= << 1885/** This is the entry point in to the lexer from an object that 1886 * wants to generate the next token, such as a pCOMMON_TOKEN_STREAM 1887 */ 1888static void 1889mTokens(p<name> ctx) 1890{ 1891 <block><\n> 1892 1893 goto ruleTokensEx; /* Prevent compiler warnings */ 1894ruleTokensEx: ; 1895} 1896>> 1897 1898// S U B R U L E S 1899 1900/** A (...) subrule with multiple alternatives */ 1901block(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= << 1902 1903// <fileName>:<description> 1904{ 1905 int alt<decisionNumber>=<maxAlt>; 1906 <decls> 1907 <@predecision()> 1908 <decision> 1909 <@postdecision()> 1910 <@prebranch()> 1911 switch (alt<decisionNumber>) 1912 { 1913 <alts:{a | <altSwitchCase(i,a)>}> 1914 } 1915 <@postbranch()> 1916} 1917>> 1918 1919/** A rule block with multiple alternatives */ 1920ruleBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= << 1921{ 1922 // <fileName>:<description> 1923 1924 ANTLR3_UINT32 alt<decisionNumber>; 1925 1926 alt<decisionNumber>=<maxAlt>; 1927 1928 <decls> 1929 <@predecision()> 1930 <decision> 1931 <@postdecision()> 1932 switch (alt<decisionNumber>) 1933 { 1934 <alts:{a | <altSwitchCase(i,a)>}> 1935 } 1936} 1937>> 1938 1939ruleBlockSingleAlt(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,description) ::= << 1940// <fileName>:<description> 1941<decls> 1942<@prealt()> 1943<alts> 1944<@postalt()> 1945>> 1946 1947/** A special case of a (...) subrule with a single alternative */ 1948blockSingleAlt(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,description) ::= << 1949// <fileName>:<description> 1950<decls> 1951<@prealt()> 1952<alts> 1953<@postalt()> 1954>> 1955 1956/** A (..)+ block with 1 or more alternatives */ 1957positiveClosureBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= << 1958// <fileName>:<description> 1959{ 1960 int cnt<decisionNumber>=0; 1961 <decls> 1962 <@preloop()> 1963 1964 for (;;) 1965 { 1966 int alt<decisionNumber>=<maxAlt>; 1967 <@predecision()> 1968 <decision> 1969 <@postdecision()> 1970 switch (alt<decisionNumber>) 1971 { 1972 <alts:{a | <altSwitchCase(i,a)>}> 1973 default: 1974 1975 if ( cnt<decisionNumber> >= 1 ) 1976 { 1977 goto loop<decisionNumber>; 1978 } 1979 <ruleBacktrackFailure()> 1980 <earlyExitEx()> 1981 <@earlyExitException()> 1982 goto rule<ruleDescriptor.name>Ex; 1983 } 1984 cnt<decisionNumber>++; 1985 } 1986 loop<decisionNumber>: ; /* Jump to here if this rule does not match */ 1987 <@postloop()> 1988} 1989>> 1990 1991earlyExitEx() ::= << 1992/* mismatchedSetEx() 1993 */ 1994CONSTRUCTEX(); 1995EXCEPTION->type = ANTLR3_EARLY_EXIT_EXCEPTION; 1996EXCEPTION->name = (void *)ANTLR3_EARLY_EXIT_NAME; 1997<\n> 1998>> 1999positiveClosureBlockSingleAlt ::= positiveClosureBlock 2000 2001/** A (..)* block with 1 or more alternatives */ 2002closureBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= << 2003 2004// <fileName>:<description> 2005<decls> 2006 2007<@preloop()> 2008for (;;) 2009{ 2010 int alt<decisionNumber>=<maxAlt>; 2011 <@predecision()> 2012 <decision> 2013 <@postdecision()> 2014 switch (alt<decisionNumber>) 2015 { 2016 <alts:{a | <altSwitchCase(i,a)>}> 2017 default: 2018 goto loop<decisionNumber>; /* break out of the loop */ 2019 break; 2020 } 2021} 2022loop<decisionNumber>: ; /* Jump out to here if this rule does not match */ 2023<@postloop()> 2024>> 2025 2026closureBlockSingleAlt ::= closureBlock 2027 2028/** Optional blocks (x)? are translated to (x|) by antlr before code generation 2029 * so we can just use the normal block template 2030 */ 2031optionalBlock ::= block 2032 2033optionalBlockSingleAlt ::= block 2034 2035/** A case in a switch that jumps to an alternative given the alternative 2036 * number. A DFA predicts the alternative and then a simple switch 2037 * does the jump to the code that actually matches that alternative. 2038 */ 2039altSwitchCase(altNum,alt) ::= << 2040case <altNum>: 2041 <@prealt()> 2042 <alt> 2043 break;<\n> 2044>> 2045 2046/** An alternative is just a list of elements; at outermost level */ 2047alt(elements,altNum,description,autoAST,outerAlt,treeLevel,rew) ::= << 2048// <fileName>:<description> 2049{ 2050 <@declarations()> 2051 <@initializations()> 2052 <elements:element()> 2053 <rew> 2054 <@cleanup()> 2055} 2056>> 2057 2058// E L E M E N T S 2059/** What to emit when there is no rewrite. For auto build 2060 * mode, does nothing. 2061 */ 2062noRewrite(rewriteBlockLevel, treeLevel) ::= "" 2063 2064/** Dump the elements one per line */ 2065element(e) ::= << 2066<@prematch()> 2067<e.el><\n> 2068>> 2069 2070/** match a token optionally with a label in front */ 2071tokenRef(token,label,elementIndex,terminalOptions) ::= << 2072<if(label)><label> = (<labelType>)<endif> MATCHT(<token>, &FOLLOW_<token>_in_<ruleName><elementIndex>); 2073<checkRuleBacktrackFailure()> 2074>> 2075 2076/** ids+=ID */ 2077tokenRefAndListLabel(token,label,elementIndex,terminalOptions) ::= << 2078<tokenRef(...)> 2079<listLabel(elem=label,...)> 2080>> 2081 2082listLabel(label,elem) ::= << 2083if (list_<label> == NULL) 2084{ 2085 list_<label>=ctx->vectors->newVector(ctx->vectors); 2086} 2087list_<label>->add(list_<label>, <elem>, NULL); 2088>> 2089 2090 2091/** match a character */ 2092charRef(char,label) ::= << 2093<if(label)> 2094<label> = LA(1);<\n> 2095<endif> 2096MATCHC(<char>); 2097<checkRuleBacktrackFailure()> 2098>> 2099 2100/** match a character range */ 2101charRangeRef(a,b,label) ::= << 2102<if(label)> 2103<label> = LA(1);<\n> 2104<endif> 2105MATCHRANGE(<a>, <b>); 2106<checkRuleBacktrackFailure()> 2107>> 2108 2109/** For now, sets are interval tests and must be tested inline */ 2110matchSet(s,label,elementIndex,terminalOptions,postmatchCode="") ::= << 2111<if(label)> 2112<if(LEXER)> 2113<label>= LA(1);<\n> 2114<else> 2115<label>=(<labelType>)LT(1);<\n> 2116<endif> 2117<endif> 2118if ( <s> ) 2119{ 2120 CONSUME(); 2121 <postmatchCode> 2122<if(!LEXER)> 2123 PERRORRECOVERY=ANTLR3_FALSE; 2124<endif> 2125 <if(backtracking)>FAILEDFLAG=ANTLR3_FALSE;<\n><endif> 2126} 2127else 2128{ 2129 <ruleBacktrackFailure()> 2130 <mismatchedSetEx()> 2131 <@mismatchedSetException()> 2132<if(LEXER)> 2133 LRECOVER(); 2134<else> 2135<! use following code to make it recover inline; 2136 RECOVERFROMMISMATCHEDSET(&FOLLOW_set_in_<ruleName><elementIndex>); 2137!> 2138<endif> 2139 goto rule<ruleDescriptor.name>Ex; 2140}<\n> 2141>> 2142 2143mismatchedSetEx() ::= << 2144CONSTRUCTEX(); 2145EXCEPTION->type = ANTLR3_MISMATCHED_SET_EXCEPTION; 2146EXCEPTION->name = (void *)ANTLR3_MISMATCHED_SET_NAME; 2147<if(PARSER)> 2148EXCEPTION->expectingSet = NULL; 2149<! use following code to make it recover inline; 2150EXCEPTION->expectingSet = &FOLLOW_set_in_<ruleName><elementIndex>; 2151!> 2152<endif> 2153>> 2154 2155matchRuleBlockSet ::= matchSet 2156 2157matchSetAndListLabel(s,label,elementIndex,postmatchCode) ::= << 2158<matchSet(...)> 2159<listLabel(elem=label,...)> 2160>> 2161 2162/** Match a string literal */ 2163lexerStringRef(string,label,elementIndex) ::= << 2164<if(label)> 2165<label>Start = GETCHARINDEX(); 2166MATCHS(<string>); 2167<checkRuleBacktrackFailure()> 2168<label> = LEXSTATE->tokFactory->newToken(LEXSTATE->tokFactory); 2169<label>->setType(<label>, ANTLR3_TOKEN_INVALID); 2170<label>->setStartIndex(<label>, <label>Start); 2171<label>->setStopIndex(<label>, GETCHARINDEX()-1); 2172<label>->input = INPUT->tnstream->istream; 2173<else> 2174MATCHS(<string>); 2175<checkRuleBacktrackFailure()><\n> 2176<endif> 2177>> 2178 2179wildcard(token,label,elementIndex,terminalOptions) ::= << 2180<if(label)> 2181<label>=(<labelType>)LT(1);<\n> 2182<endif> 2183MATCHANYT(); 2184<checkRuleBacktrackFailure()> 2185>> 2186 2187wildcardAndListLabel(token,label,elementIndex,terminalOptions) ::= << 2188<wildcard(...)> 2189<listLabel(elem=label,...)> 2190>> 2191 2192/** Match . wildcard in lexer */ 2193wildcardChar(label, elementIndex) ::= << 2194<if(label)> 2195<label> = LA(1);<\n> 2196<endif> 2197MATCHANY(); 2198<checkRuleBacktrackFailure()> 2199>> 2200 2201wildcardCharListLabel(label, elementIndex) ::= << 2202<wildcardChar(...)> 2203<listLabel(elem=label,...)> 2204>> 2205 2206/** Match a rule reference by invoking it possibly with arguments 2207 * and a return value or values. The 'rule' argument was the 2208 * target rule name, but now is type Rule, whose toString is 2209 * same: the rule name. Now though you can access full rule 2210 * descriptor stuff. 2211 */ 2212ruleRef(rule,label,elementIndex,args,scope) ::= << 2213FOLLOWPUSH(FOLLOW_<rule.name>_in_<ruleName><elementIndex>); 2214<if(label)><label>=<endif><if(scope)>ctx-><scope:delegateName()>-><endif><rule.name>(ctx<if(scope)>-><scope:delegateName()><endif><if(args)>, <args; separator=", "><endif>);<\n> 2215FOLLOWPOP(); 2216<checkRuleBacktrackFailure()> 2217>> 2218 2219/** ids+=r */ 2220ruleRefAndListLabel(rule,label,elementIndex,args,scope) ::= << 2221<ruleRef(...)> 2222<listLabel(elem=label,...)> 2223>> 2224 2225/** A lexer rule reference 2226 * The 'rule' argument was the target rule name, but now 2227 * is type Rule, whose toString is same: the rule name. 2228 * Now though you can access full rule descriptor stuff. 2229 */ 2230lexerRuleRef(rule,label,args,elementIndex,scope) ::= << 2231/* <description> */ 2232<if(label)> 2233{ 2234 ANTLR3_MARKER <label>Start<elementIndex> = GETCHARINDEX(); 2235 <if(scope)>ctx-><scope:delegateName()>-><endif>m<rule.name>(ctx<if(scope)>-><scope:delegateName()><endif> <if(args)>, <endif><args; separator=", ">); 2236 <checkRuleBacktrackFailure()> 2237 <label> = LEXSTATE->tokFactory->newToken(LEXSTATE->tokFactory); 2238 <label>->setType(<label>, ANTLR3_TOKEN_INVALID); 2239 <label>->setStartIndex(<label>, <label>Start<elementIndex>); 2240 <label>->setStopIndex(<label>, GETCHARINDEX()-1); 2241 <label>->input = INPUT; 2242} 2243<else> 2244<if(scope)>ctx-><scope:delegateName()>-><endif>m<rule.name>(ctx<if(scope)>-><scope:delegateName()><endif> <if(args)>, <endif><args; separator=", ">); 2245<checkRuleBacktrackFailure()> 2246<endif> 2247>> 2248 2249/** i+=INT in lexer */ 2250lexerRuleRefAndListLabel(rule,label,args,elementIndex,scope) ::= << 2251<lexerRuleRef(...)> 2252<listLabel(elem=label,...)> 2253>> 2254 2255/** EOF in the lexer */ 2256lexerMatchEOF(label,elementIndex) ::= << 2257<if(label)> 2258{ 2259 ANTLR3_UINT32 <label>Start<elementIndex>; 2260 <labelType> <label>; 2261 <label>Start<elementIndex> = GETCHARINDEX(); 2262 MATCHC(ANTLR3_CHARSTREAM_EOF); 2263 <checkRuleBacktrackFailure()> 2264 <label> = LEXSTATE->tokFactory->newToken(LEXSTATE->tokFactory); 2265 <label>->setType(<label>, ANTLR3_TOKEN_EOF); 2266 <label>->setStartIndex(<label>, <label>Start<elementIndex>); 2267 <label>->setStopIndex(<label>, GETCHARINDEX()-1); 2268 <label>->input = INPUT->tnstream->istream; 2269} 2270<else> 2271 MATCHC(ANTLR3_CHARSTREAM_EOF); 2272 <checkRuleBacktrackFailure()> 2273 <endif> 2274>> 2275 2276// used for left-recursive rules 2277recRuleDefArg() ::= "int <recRuleArg()>" 2278recRuleArg() ::= "_p" 2279recRuleAltPredicate(ruleName,opPrec) ::= "<recRuleArg()> \<= <opPrec>" 2280recRuleSetResultAction() ::= "root_0=$<ruleName>_primary.tree;" 2281recRuleSetReturnAction(src,name) ::= "$<name>=$<src>.<name>;" 2282 2283/** match ^(root children) in tree parser */ 2284tree(root, actionsAfterRoot, children, nullableChildList, enclosingTreeLevel, treeLevel) ::= << 2285<root:element()> 2286<actionsAfterRoot:element()> 2287<if(nullableChildList)> 2288if ( LA(1)==ANTLR3_TOKEN_DOWN ) { 2289 MATCHT(ANTLR3_TOKEN_DOWN, NULL); 2290 <checkRuleBacktrackFailure()> 2291 <children:element()> 2292 MATCHT(ANTLR3_TOKEN_UP, NULL); 2293 <checkRuleBacktrackFailure()> 2294} 2295<else> 2296MATCHT(ANTLR3_TOKEN_DOWN, NULL); 2297<checkRuleBacktrackFailure()> 2298<children:element()> 2299MATCHT(ANTLR3_TOKEN_UP, NULL); 2300<checkRuleBacktrackFailure()> 2301<endif> 2302>> 2303 2304/** Every predicate is used as a validating predicate (even when it is 2305 * also hoisted into a prediction expression). 2306 */ 2307validateSemanticPredicate(pred,description) ::= << 2308if ( !(<evalPredicate(...)>) ) 2309{ 2310 <ruleBacktrackFailure()> 2311 <newFPE(...)> 2312} 2313>> 2314 2315newFPE() ::= << 2316 CONSTRUCTEX(); 2317 EXCEPTION->type = ANTLR3_FAILED_PREDICATE_EXCEPTION; 2318 EXCEPTION->message = (void *)"<description>"; 2319 EXCEPTION->ruleName = (void *)"<ruleName>"; 2320 <\n> 2321>> 2322 2323// F i x e d D F A (if-then-else) 2324 2325dfaState(k,edges,eotPredictsAlt,description,stateNumber,semPredState) ::= << 2326 2327{ 2328 int LA<decisionNumber>_<stateNumber> = LA(<k>); 2329 <edges; separator="\nelse "> 2330 else 2331 { 2332<if(eotPredictsAlt)> 2333 alt<decisionNumber>=<eotPredictsAlt>; 2334<else> 2335 <ruleBacktrackFailure()> 2336 2337 <newNVException()> 2338 goto rule<ruleDescriptor.name>Ex; 2339 2340<endif> 2341 } 2342} 2343>> 2344 2345newNVException() ::= << 2346CONSTRUCTEX(); 2347EXCEPTION->type = ANTLR3_NO_VIABLE_ALT_EXCEPTION; 2348EXCEPTION->message = (void *)"<description>"; 2349EXCEPTION->decisionNum = <decisionNumber>; 2350EXCEPTION->state = <stateNumber>; 2351<@noViableAltException()> 2352<\n> 2353>> 2354 2355/** Same as a normal DFA state except that we don't examine lookahead 2356 * for the bypass alternative. It delays error detection but this 2357 * is faster, smaller, and more what people expect. For (X)? people 2358 * expect "if ( LA(1)==X ) match(X);" and that's it. 2359 */ 2360dfaOptionalBlockState(k,edges,eotPredictsAlt,description,stateNumber,semPredState) ::= << 2361{ 2362 int LA<decisionNumber>_<stateNumber> = LA(<k>); 2363 <edges; separator="\nelse "> 2364} 2365>> 2366 2367/** A DFA state that is actually the loopback decision of a closure 2368 * loop. If end-of-token (EOT) predicts any of the targets then it 2369 * should act like a default clause (i.e., no error can be generated). 2370 * This is used only in the lexer so that for ('a')* on the end of a rule 2371 * anything other than 'a' predicts exiting. 2372 */ 2373 2374dfaLoopbackStateDecls()::= << 2375ANTLR3_UINT32 LA<decisionNumber>_<stateNumber>; 2376>> 2377dfaLoopbackState(k,edges,eotPredictsAlt,description,stateNumber,semPredState) ::= << 2378{ 2379 /* dfaLoopbackState(k,edges,eotPredictsAlt,description,stateNumber,semPredState) 2380 */ 2381 int LA<decisionNumber>_<stateNumber> = LA(<k>); 2382 <edges; separator="\nelse "><\n> 2383 <if(eotPredictsAlt)> 2384 <if(!edges)> 2385 alt<decisionNumber>=<eotPredictsAlt>; <! if no edges, don't gen ELSE !> 2386 <else> 2387 else 2388 { 2389 alt<decisionNumber>=<eotPredictsAlt>; 2390 }<\n> 2391 <endif> 2392 <endif> 2393} 2394>> 2395 2396/** An accept state indicates a unique alternative has been predicted */ 2397dfaAcceptState(alt) ::= "alt<decisionNumber>=<alt>;" 2398 2399/** A simple edge with an expression. If the expression is satisfied, 2400 * enter to the target state. To handle gated productions, we may 2401 * have to evaluate some predicates for this edge. 2402 */ 2403dfaEdge(labelExpr, targetState, predicates) ::= << 2404if ( <if(predicates)>(<predicates>) && <endif>(<labelExpr>)) 2405{ 2406 <targetState> 2407} 2408>> 2409 2410// F i x e d D F A (switch case) 2411 2412/** A DFA state where a SWITCH may be generated. The code generator 2413 * decides if this is possible: CodeGenerator.canGenerateSwitch(). 2414 */ 2415dfaStateSwitch(k,edges,eotPredictsAlt,description,stateNumber,semPredState) ::= << 2416switch ( LA(<k>) ) 2417{ 2418<edges; separator="\n"> 2419 2420default: 2421<if(eotPredictsAlt)> 2422 alt<decisionNumber>=<eotPredictsAlt>; 2423<else> 2424 <ruleBacktrackFailure()> 2425 <newNVException()> 2426 goto rule<ruleDescriptor.name>Ex;<\n> 2427<endif> 2428}<\n> 2429>> 2430 2431dfaOptionalBlockStateSwitch(k,edges,eotPredictsAlt,description,stateNumber,semPredState) ::= << 2432switch ( LA(<k>) ) 2433{ 2434 <edges; separator="\n"> 2435}<\n> 2436>> 2437 2438dfaLoopbackStateSwitch(k, edges,eotPredictsAlt,description,stateNumber,semPredState) ::= << 2439switch ( LA(<k>) ) 2440{ 2441<edges; separator="\n"><\n> 2442<if(eotPredictsAlt)> 2443default: 2444 alt<decisionNumber>=<eotPredictsAlt>; 2445 break;<\n> 2446<endif> 2447}<\n> 2448>> 2449 2450dfaEdgeSwitch(labels, targetState) ::= << 2451<labels:{it |case <it>:}; separator="\n"> 2452 { 2453 <targetState> 2454 } 2455 break; 2456>> 2457 2458// C y c l i c D F A 2459 2460/** The code to initiate execution of a cyclic DFA; this is used 2461 * in the rule to predict an alt just like the fixed DFA case. 2462 * The <name> attribute is inherited via the parser, lexer, ... 2463 */ 2464dfaDecision(decisionNumber,description) ::= << 2465alt<decisionNumber> = cdfa<decisionNumber>.predict(ctx, RECOGNIZER, ISTREAM, &cdfa<decisionNumber>); 2466<checkRuleBacktrackFailure()> 2467>> 2468 2469/* Dump DFA tables as static initialized arrays of shorts(16 bits)/characters(8 bits) 2470 * which are then used to statically initialize the dfa structure, which means that there 2471 * is no runtime initialization whatsoever, other than anything the C compiler might 2472 * need to generate. In general the C compiler will lay out memory such that there is no 2473 * runtime code required. 2474 */ 2475cyclicDFA(dfa) ::= << 2476/** Static dfa state tables for Cyclic dfa: 2477 * <dfa.description> 2478 */ 2479static const ANTLR3_INT32 dfa<dfa.decisionNumber>_eot[<dfa.numberOfStates>] = 2480 { 2481 <dfa.eot; wrap="\n", separator=", ", null="-1"> 2482 }; 2483static const ANTLR3_INT32 dfa<dfa.decisionNumber>_eof[<dfa.numberOfStates>] = 2484 { 2485 <dfa.eof; wrap="\n", separator=", ", null="-1"> 2486 }; 2487static const ANTLR3_INT32 dfa<dfa.decisionNumber>_min[<dfa.numberOfStates>] = 2488 { 2489 <dfa.min; wrap="\n", separator=", ", null="-1"> 2490 }; 2491static const ANTLR3_INT32 dfa<dfa.decisionNumber>_max[<dfa.numberOfStates>] = 2492 { 2493 <dfa.max; wrap="\n", separator=", ", null="-1"> 2494 }; 2495static const ANTLR3_INT32 dfa<dfa.decisionNumber>_accept[<dfa.numberOfStates>] = 2496 { 2497 <dfa.accept; wrap="\n", separator=", ", null="-1"> 2498 }; 2499static const ANTLR3_INT32 dfa<dfa.decisionNumber>_special[<dfa.numberOfStates>] = 2500 { 2501 <dfa.special; wrap="\n", separator=", ", null="-1"> 2502 }; 2503 2504/** Used when there is no transition table entry for a particular state */ 2505#define dfa<dfa.decisionNumber>_T_empty NULL 2506 2507<dfa.edgeTransitionClassMap.keys:{ table | 2508static const ANTLR3_INT32 dfa<dfa.decisionNumber>_T<i0>[] = 2509 { 2510 <table; separator=", ", wrap="\n", null="-1"> 2511 \};<\n>}; null = ""> 2512 2513/* Transition tables are a table of sub tables, with some tables 2514 * reused for efficiency. 2515 */ 2516static const ANTLR3_INT32 * const dfa<dfa.decisionNumber>_transitions[] = 2517{ 2518 <dfa.transitionEdgeTables:{xref|dfa<dfa.decisionNumber>_T<xref>}; separator=", ", wrap="\n", null="NULL"> 2519}; 2520 2521<if(dfa.specialStateSTs)> 2522static ANTLR3_INT32 dfa<dfa.decisionNumber>_sst(p<name> ctx, pANTLR3_BASE_RECOGNIZER recognizer, pANTLR3_INT_STREAM is, pANTLR3_CYCLIC_DFA dfa, ANTLR3_INT32 s) 2523{ 2524 ANTLR3_INT32 _s; 2525 2526 _s = s; 2527 switch (s) 2528 { 2529 <dfa.specialStateSTs:{state | 2530 case <i0>: 2531 2532 <state>}; separator="\n"> 2533 } 2534<if(backtracking)> 2535 if (BACKTRACKING > 0) 2536 { 2537 FAILEDFLAG = ANTLR3_TRUE; 2538 return -1; 2539 } 2540<endif> 2541 2542 CONSTRUCTEX(); 2543 EXCEPTION->type = ANTLR3_NO_VIABLE_ALT_EXCEPTION; 2544 EXCEPTION->message = (void *)"<dfa.description>"; 2545 EXCEPTION->decisionNum = <dfa.decisionNumber>; 2546 EXCEPTION->state = _s; 2547 <@noViableAltException()> 2548 return -1; 2549} 2550<endif> 2551 2552<@errorMethod()> 2553 2554/* Declare tracking structure for Cyclic DFA <dfa.decisionNumber> 2555 */ 2556static 2557ANTLR3_CYCLIC_DFA cdfa<dfa.decisionNumber> 2558 = { 2559 <dfa.decisionNumber>, /* Decision number of this dfa */ 2560 /* Which decision this represents: */ 2561 (const pANTLR3_UCHAR)"<dfa.description>", 2562<if(dfa.specialStateSTs)> 2563 (CDFA_SPECIAL_FUNC) dfa<dfa.decisionNumber>_sst, 2564<else> 2565 (CDFA_SPECIAL_FUNC) antlr3dfaspecialStateTransition, /* Default special state transition function */ 2566<endif> 2567 2568 antlr3dfaspecialTransition, /* DFA specialTransition is currently just a default function in the runtime */ 2569 antlr3dfapredict, /* DFA simulator function is in the runtime */ 2570 dfa<dfa.decisionNumber>_eot, /* EOT table */ 2571 dfa<dfa.decisionNumber>_eof, /* EOF table */ 2572 dfa<dfa.decisionNumber>_min, /* Minimum tokens for each state */ 2573 dfa<dfa.decisionNumber>_max, /* Maximum tokens for each state */ 2574 dfa<dfa.decisionNumber>_accept, /* Accept table */ 2575 dfa<dfa.decisionNumber>_special, /* Special transition states */ 2576 dfa<dfa.decisionNumber>_transitions /* Table of transition tables */ 2577 2578 }; 2579/* End of Cyclic DFA <dfa.decisionNumber> 2580 * --------------------- 2581 */ 2582>> 2583 2584/** A state in a cyclic DFA; it's a special state and part of a big switch on 2585 * state. 2586 */ 2587cyclicDFAState(decisionNumber,stateNumber,edges,needErrorClause,semPredState) ::= << 2588{ 2589 ANTLR3_UINT32 LA<decisionNumber>_<stateNumber>;<\n> 2590 ANTLR3_MARKER index<decisionNumber>_<stateNumber>;<\n> 2591 2592 LA<decisionNumber>_<stateNumber> = LA(1);<\n> 2593 <if(semPredState)> <! get next lookahead symbol to test edges, then rewind !> 2594 index<decisionNumber>_<stateNumber> = INDEX();<\n> 2595 REWINDLAST();<\n> 2596 <endif> 2597 s = -1; 2598 <edges; separator="\nelse "> 2599 <if(semPredState)> <! return input cursor to state before we rewound !> 2600 SEEK(index<decisionNumber>_<stateNumber>);<\n> 2601 <endif> 2602 if ( s>=0 ) 2603 { 2604 return s; 2605 } 2606} 2607break; 2608>> 2609 2610/** Just like a fixed DFA edge, test the lookahead and indicate what 2611 * state to jump to next if successful. 2612 */ 2613cyclicDFAEdge(labelExpr, targetStateNumber, edgeNumber, predicates) ::= << 2614if ( <if(predicates)>(<predicates>) && <endif>(<labelExpr>) ) 2615{ 2616 s = <targetStateNumber>; 2617}<\n> 2618>> 2619 2620/** An edge pointing at end-of-token; essentially matches any char; 2621 * always jump to the target. 2622 */ 2623eotDFAEdge(targetStateNumber,edgeNumber, predicates) ::= << 2624 s = <targetStateNumber>;<\n> 2625>> 2626 2627 2628// D F A E X P R E S S I O N S 2629 2630andPredicates(left,right) ::= "( (<left>) && (<right>) )" 2631 2632orPredicates(operands) ::= "((<first(operands)>)<rest(operands):{o | ||(<o>)}>)" 2633 2634notPredicate(pred) ::= "!( <evalPredicate(pred,{})> )" 2635 2636evalPredicate(pred,description) ::= "(<pred>)" 2637 2638evalSynPredicate(pred,description) ::= "<pred>(ctx)" 2639 2640lookaheadTest(atom,k,atomAsInt) ::= "LA<decisionNumber>_<stateNumber> == <atom>" 2641 2642/** Sometimes a lookahead test cannot assume that LA(k) is in a temp variable 2643 * somewhere. Must ask for the lookahead directly. 2644 */ 2645isolatedLookaheadTest(atom,k,atomAsInt) ::= "LA(<k>) == <atom>" 2646 2647lookaheadRangeTest(lower,upper,k,rangeNumber,lowerAsInt,upperAsInt) ::= <% 2648((LA<decisionNumber>_<stateNumber> >= <lower>) && (LA<decisionNumber>_<stateNumber> \<= <upper>)) 2649%> 2650 2651isolatedLookaheadRangeTest(lower,upper,k,rangeNumber,lowerAsInt,upperAsInt) ::= "((LA(<k>) >= <lower>) && (LA(<k>) \<= <upper>))" 2652 2653setTest(ranges) ::= "<ranges; separator=\" || \">" 2654 2655// A T T R I B U T E S 2656 2657makeScopeSet() ::= << 2658/* makeScopeSet() 2659 */ 2660 /** Definition of the <scope.name> scope variable tracking 2661 * structure. An instance of this structure is created by calling 2662 * <name>_<scope.name>Push(). 2663 */ 2664typedef struct <scopeStruct(sname=scope.name,...)>_struct 2665{ 2666 /** Function that the user may provide to be called when the 2667 * scope is destroyed (so you can free pANTLR3_HASH_TABLES and so on) 2668 * 2669 * \param POinter to an instance of this typedef/struct 2670 */ 2671 void (ANTLR3_CDECL *free) (struct <scopeStruct(sname=scope.name,...)>_struct * frame); 2672 2673 /* ============================================================================= 2674 * Programmer defined variables... 2675 */ 2676 <scope.attributes:{it |<it.decl>;}; separator="\n"> 2677 2678 /* End of programmer defined variables 2679 * ============================================================================= 2680 */ 2681} 2682 <scopeStruct(sname=scope.name,...)>, * <scopeType(sname=scope.name,...)>; 2683 2684>> 2685 2686globalAttributeScopeDecl(scope) ::= << 2687<if(scope.attributes)> 2688/* globalAttributeScopeDecl(scope) 2689 */ 2690<makeScopeSet(...)> 2691<endif> 2692>> 2693 2694ruleAttributeScopeDecl(scope) ::= << 2695<if(scope.attributes)> 2696/* ruleAttributeScopeDecl(scope) 2697 */ 2698<makeScopeSet(...)> 2699<endif> 2700>> 2701 2702globalAttributeScopeFuncDecl(scope) ::= 2703<< 2704/* globalAttributeScopeFuncDecl(scope) 2705 */ 2706<if(scope.attributes)> 2707/* ----------------------------------------------------------------------------- 2708 * Function declaration for creating a <name>_<scope.name> scope set 2709 */ 2710static <scopeType(sname=scope.name,...)> <scopePushName(sname=scope.name,...)>(p<name> ctx); 2711static void ANTLR3_CDECL <scope.name>Free(<scopeType(sname=scope.name)> scope); 2712/* ----------------------------------------------------------------------------- */ 2713 2714<endif> 2715>> 2716 2717globalAttributeScopeFuncMacro(scope) ::= << 2718<if(scope.attributes)> 2719/* globalAttributeScopeFuncMacro(scope) 2720 */ 2721/** Function for popping the top value from a <scopeStack(sname=scope.name)> 2722 */ 2723void 2724<scopePopName(sname=scope.name,...)>(p<name> ctx) 2725{ 2726 // First see if the user defined a function they want to be called when a 2727 // scope is popped/freed. 2728 // 2729 // If the user supplied the scope entries with a free function,then call it first 2730 // 2731 if (SCOPE_TOP(<scope.name>)->free != NULL) 2732 { 2733 SCOPE_TOP(<scope.name>)->free(SCOPE_TOP(<scope.name>)); 2734 } 2735 2736 // Now we decrement the scope's upper limit bound. We do not actually pop the scope as 2737 // we want to reuse scope entries if we do continuous push and pops. Most scopes don't 2738 // next too far so we don't want to keep freeing and allocating them 2739 // 2740 ctx-><scopeStack(sname=scope.name,...)>_limit--; 2741 SCOPE_TOP(<scope.name>) = (<scopeType(sname=scope.name)>)(ctx-><scopeStack(sname=scope.name,...)>->get(ctx-><bscopeStack(sname=scope.name,...)>, ctx-><scopeStack(sname=scope.name,...)>_limit - 1)); 2742} 2743<endif> 2744>> 2745 2746ruleAttributeScopeFuncDecl(scope) ::= << 2747<if(scope.attributes)> 2748/* ruleAttributeScopeFuncDecl(scope) 2749 */ 2750/* ----------------------------------------------------------------------------- 2751 * Function declarations for creating a <name>_<scope.name> scope set 2752 */ 2753static <scopeType(sname=scope.name,...)> <scopePushName(sname=scope.name,...)>(p<name> ctx); 2754static void ANTLR3_CDECL <scope.name>Free(<scopeType(sname=scope.name)> scope); 2755/* ----------------------------------------------------------------------------- */ 2756 2757<endif> 2758>> 2759 2760ruleAttributeScopeFuncMacro(scope) ::= << 2761<if(scope.attributes)> 2762/* ruleAttributeScopeFuncMacro(scope) 2763 */ 2764/** Function for popping the top value from a <scopeStack(sname=scope.name,...)> 2765 */ 2766void 2767<scopePopName(sname=scope.name,...)>(p<name> ctx) 2768{ 2769 // First see if the user defined a function they want to be called when a 2770 // scope is popped/freed. 2771 // 2772 // If the user supplied the scope entries with a free function,then call it first 2773 // 2774 if (SCOPE_TOP(<scope.name>)->free != NULL) 2775 { 2776 SCOPE_TOP(<scope.name>)->free(SCOPE_TOP(<scope.name>)); 2777 } 2778 2779 // Now we decrement the scope's upper limit bound. We do not actually pop the scope as 2780 // we want to reuse scope entries if we do continuous push and pops. Most scopes don't 2781 // next too far so we don't want to keep freeing and allocating them 2782 // 2783 ctx-><scopeStack(sname=scope.name,...)>_limit--; 2784 SCOPE_TOP(<scope.name>) = (<scopeType(sname=scope.name)>)(ctx-><scopeStack(sname=scope.name,...)>->get(ctx-><scopeStack(sname=scope.name,...)>, ctx-><scopeStack(sname=scope.name,...)>_limit - 1)); 2785} 2786 2787<endif> 2788>> 2789 2790globalAttributeScopeDef(scope) ::= 2791<< 2792/* globalAttributeScopeDef(scope) 2793 */ 2794<if(scope.attributes)> 2795/** Pointer to the <scope.name> stack for use by <scopePushName(sname=scope.name)>() 2796 * and <scopePopName(sname=scope.name,...)>() 2797 */ 2798pANTLR3_STACK <scopeStack(sname=scope.name)>; 2799ANTLR3_UINT32 <scopeStack(sname=scope.name)>_limit; 2800/** Pointer to the top of the stack for the global scope <scopeStack(sname=scope.name)> 2801 */ 2802<scopeType(sname=scope.name,...)> (*<scopePushName(sname=scope.name,...)>)(struct <name>_Ctx_struct * ctx); 2803<scopeType(sname=scope.name,...)> <scopeTopDecl(sname=scope.name,...)>; 2804 2805<endif> 2806>> 2807 2808ruleAttributeScopeDef(scope) ::= << 2809<if(scope.attributes)> 2810/* ruleAttributeScopeDef(scope) 2811 */ 2812/** Pointer to the <scope.name> stack for use by <scopePushName(sname=scope.name)>() 2813 * and <scopePopName(sname=scope.name,...)>() 2814 */ 2815pANTLR3_STACK <scopeStack(sname=scope.name,...)>; 2816ANTLR3_UINT32 <scopeStack(sname=scope.name,...)>_limit; 2817<scopeType(sname=scope.name,...)> (*<scopePushName(sname=scope.name,...)>)(struct <name>_Ctx_struct * ctx); 2818<scopeType(sname=scope.name,...)> <scopeTopDecl(sname=scope.name,...)>; 2819 2820<endif> 2821>> 2822 2823globalAttributeScopeFuncs(scope) ::= << 2824<if(scope.attributes)> 2825/* globalAttributeScopeFuncs(scope) 2826 */ 2827<attributeFuncs(scope)> 2828<endif> 2829>> 2830 2831ruleAttributeScopeFuncs(scope) ::= << 2832<if(scope.attributes)> 2833/* ruleAttributeScopeFuncs(scope) 2834 */ 2835<attributeFuncs(scope)> 2836<endif> 2837>> 2838 2839globalAttributeScope(scope) ::= << 2840<if(scope.attributes)> 2841/* globalAttributeScope(scope) 2842 */ 2843ctx-><scopePushName(sname=scope.name,...)> = <scopePushName(sname=scope.name,...)>; 2844ctx-><scopeStack(sname=scope.name,...)> = antlr3StackNew(0); 2845ctx-><scopeStack(sname=scope.name,...)>_limit = 0; 2846<scopeTop(sname=scope.name,...)> = NULL; 2847<endif> 2848>> 2849 2850ruleAttributeScope(scope) ::= 2851<< 2852<if(scope.attributes)> 2853/* ruleAttributeScope(scope) 2854 */ 2855ctx-><scopePushName(sname=scope.name,...)> = <scopePushName(sname=scope.name,...)>; 2856ctx-><scopeStack(sname=scope.name,...)> = antlr3StackNew(0); 2857ctx-><scopeStack(sname=scope.name,...)>_limit = 0; 2858<scopeTop(sname=scope.name,...)> = NULL; 2859<endif> 2860>> 2861globalAttributeScopeFree(scope) ::= << 2862<if(scope.attributes)> 2863/* globalAttributeScope(scope) 2864 */ 2865ctx-><scopeStack(sname=scope.name,...)>->free(ctx-><scopeStack(sname=scope.name,...)>); 2866<endif> 2867>> 2868 2869ruleAttributeScopeFree(scope) ::= 2870<< 2871<if(scope.attributes)> 2872/* ruleAttributeScope(scope) 2873 */ 2874ctx-><scopeStack(sname=scope.name,...)>->free(ctx-><scopeStack(sname=scope.name,...)>); 2875<endif> 2876>> 2877 2878scopeTopDecl(sname) ::= << 2879p<name>_<sname>Top 2880>> 2881 2882scopeTop(sname) ::= << 2883ctx-><scopeTopDecl(sname=sname,...)> 2884>> 2885 2886scopePop(sname) ::= << 2887<scopePopName(sname=sname,...)>(ctx); 2888>> 2889 2890scopePush(sname) ::= << 2891p<name>_<sname>Push(ctx) 2892>> 2893 2894scopePopName(sname) ::= << 2895p<name>_<sname>Pop 2896>> 2897 2898scopePushName(sname) ::= << 2899p<name>_<sname>Push 2900>> 2901 2902scopeType(sname) ::= << 2903p<name>_<sname>_SCOPE 2904>> 2905 2906scopeStruct(sname) ::= << 2907<name>_<sname>_SCOPE 2908>> 2909 2910scopeStack(sname) ::= << 2911p<name>_<sname>Stack 2912>> 2913 2914attributeFuncs(scope) ::= << 2915<if(scope.attributes)> 2916/* attributeFuncs(scope) 2917 */ 2918 2919static void ANTLR3_CDECL <scope.name>Free(<scopeType(sname=scope.name)> scope) 2920{ 2921 ANTLR3_FREE(scope); 2922} 2923 2924/** \brief Allocate initial memory for a <name> <scope.name> scope variable stack entry and 2925 * add it to the top of the stack. 2926 * 2927 * \remark 2928 * By default the structure is freed with ANTLR_FREE(), but you can use the 2929 * the \@init action to install a pointer to a custom free() routine by 2930 * adding the code: 2931 * \code 2932 * <scopeTop(sname=scope.name)>->free = myroutine; 2933 * \endcode 2934 * 2935 * With lots of comments of course! The routine should be declared in 2936 * \@members { } as: 2937 * \code 2938 * void ANTLR3_CDECL myfunc( <scopeType(sname=scope.name)> ptr). 2939 * \endcode 2940 * 2941 * It should perform any custom freeing stuff that you need (call ANTLR_FREE3, not free() 2942 * NB: It should not free the pointer it is given, which is the scope stack entry itself 2943 * and will be freed by the function that calls your custom free routine. 2944 * 2945 */ 2946static <scopeType(sname=scope.name)> 2947<scopePushName(sname=scope.name)>(p<name> ctx) 2948{ 2949 /* Pointer used to create a new set of attributes 2950 */ 2951 <scopeType(sname=scope.name)> newAttributes; 2952 2953 /* Allocate the memory for a new structure if we need one. 2954 */ 2955 if (ctx-><scopeStack(sname=scope.name)>->size(ctx-><scopeStack(sname=scope.name)>) > ctx-><scopeStack(sname=scope.name)>_limit) 2956 { 2957 // The current limit value was less than the number of scopes available on the stack so 2958 // we can just reuse one. Our limit tracks the stack count, so the index of the entry we want 2959 // is one less than that, or conveniently, the current value of limit. 2960 // 2961 newAttributes = (<scopeType(sname=scope.name)>)ctx-><scopeStack(sname=scope.name)>->get(ctx-><scopeStack(sname=scope.name)>, ctx-><scopeStack(sname=scope.name)>_limit); 2962 } 2963 else 2964 { 2965 // Need a new allocation 2966 // 2967 newAttributes = (<scopeType(sname=scope.name)>) ANTLR3_MALLOC(sizeof(<scopeStruct(sname=scope.name)>)); 2968 if (newAttributes != NULL) 2969 { 2970 /* Standard ANTLR3 library implementation 2971 */ 2972 ctx-><scopeStack(sname=scope.name)>->push(ctx-><scopeStack(sname=scope.name)>, newAttributes, (void (*)(void *))<scope.name>Free); 2973 } 2974 } 2975 2976 // Blank out any previous free pointer, the user might or might install a new one. 2977 // 2978 newAttributes->free = NULL; 2979 2980 // Indicate the position in the available stack that the current level is at 2981 // 2982 ctx-><scopeStack(sname=scope.name)>_limit++; 2983 2984 /* Return value is the pointer to the new entry, which may be used locally 2985 * without de-referencing via the context. 2986 */ 2987 return newAttributes; 2988}<\n> 2989 2990<endif> 2991>> 2992returnStructName(r) ::= "<r.name>_return" 2993 2994returnType() ::= <% 2995<if(!ruleDescriptor.isSynPred)> 2996<if(ruleDescriptor.hasMultipleReturnValues)> 2997<ruleDescriptor.grammar.recognizerName>_<ruleDescriptor:returnStructName()> 2998<else> 2999<if(ruleDescriptor.hasSingleReturnValue)> 3000<ruleDescriptor.singleValueReturnType> 3001<else> 3002void 3003<endif> 3004<endif> 3005<else> 3006ANTLR3_BOOLEAN 3007<endif> 3008%> 3009 3010/** Generate the C type associated with a single or multiple return 3011 * value(s). 3012 */ 3013ruleLabelType(referencedRule) ::= <% 3014<if(referencedRule.hasMultipleReturnValues)> 3015<referencedRule.grammar.recognizerName>_<referencedRule.name>_return 3016<else> 3017<if(referencedRule.hasSingleReturnValue)> 3018<referencedRule.singleValueReturnType> 3019<else> 3020void 3021<endif> 3022<endif> 3023%> 3024 3025delegateName(d) ::= << 3026<if(d.label)><d.label><else>g<d.name><endif> 3027>> 3028 3029/** Using a type to init value map, try to init a type; if not in table 3030 * must be an object, default value is "0". 3031 */ 3032initValue(typeName) ::= << 3033 = <cTypeInitMap.(typeName)> 3034>> 3035 3036/** Define a rule label */ 3037ruleLabelDef(label) ::= << 3038<ruleLabelType(referencedRule=label.referencedRule)> <label.label.text>; 3039#undef RETURN_TYPE_<label.label.text> 3040#define RETURN_TYPE_<label.label.text> <ruleLabelType(referencedRule=label.referencedRule)><\n> 3041>> 3042/** Rule label default value */ 3043ruleLabelInitVal(label) ::= << 3044>> 3045 3046ASTLabelType() ::= "<if(recognizer.ASTLabelType)><recognizer.ASTLabelType><else>pANTLR3_BASE_TREE<endif>" 3047 3048/** Define a return struct for a rule if the code needs to access its 3049 * start/stop tokens, tree stuff, attributes, ... Leave a hole for 3050 * subgroups to stick in members. 3051 */ 3052returnScope(scope) ::= << 3053<if(!ruleDescriptor.isSynPred)> 3054<if(ruleDescriptor.hasMultipleReturnValues)> 3055typedef struct <ruleDescriptor.grammar.recognizerName>_<ruleDescriptor:returnStructName()>_struct 3056{ 3057<if(!TREE_PARSER)> 3058 /** Generic return elements for ANTLR3 rules that are not in tree parsers or returning trees 3059 */ 3060 pANTLR3_COMMON_TOKEN start; 3061 pANTLR3_COMMON_TOKEN stop; 3062<else> 3063 <recognizer.ASTLabelType> start; 3064 <recognizer.ASTLabelType> stop; 3065<endif> 3066 <@ruleReturnMembers()> 3067 <ruleDescriptor.returnScope.attributes:{it |<it.type> <it.name>;}; separator="\n"> 3068} 3069 <ruleDescriptor.grammar.recognizerName>_<ruleDescriptor:returnStructName()>;<\n><\n> 3070<endif> 3071<endif> 3072>> 3073 3074parameterScope(scope) ::= << 3075<scope.attributes:{it |<it.decl>}; separator=", "> 3076>> 3077 3078parameterAttributeRef(attr) ::= "<attr.name>" 3079parameterSetAttributeRef(attr,expr) ::= "<attr.name>=<expr>;" 3080 3081/** Note that the scopeAttributeRef does not have access to the 3082 * grammar name directly 3083 */ 3084scopeAttributeRef(scope,attr,index,negIndex) ::= <% 3085<if(negIndex)> 3086 ((SCOPE_TYPE(<scope>))(ctx->SCOPE_STACK(<scope>)->get( ctx->SCOPE_STACK(<scope>), ctx->SCOPE_STACK(<scope>)->size(ctx->SCOPE_STACK(<scope>)) - <negIndex> - 1) ))-><attr.name> 3087<else> 3088<if(index)> 3089 ((SCOPE_TYPE(<scope>))(ctx->SCOPE_STACK(<scope>)->get(ctx->SCOPE_STACK(<scope>), (ANTLR3_UINT32)<index> ) ))-><attr.name> 3090<else> 3091 (SCOPE_TOP(<scope>))-><attr.name> 3092<endif> 3093<endif> 3094%> 3095 3096scopeSetAttributeRef(scope,attr,expr,index,negIndex) ::= <% 3097<if(negIndex)> 3098 ((SCOPE_TYPE(<scope>))(ctx->SCOPE_STACK(<scope>)->get( ctx->SCOPE_STACK(<scope>), ctx->SCOPE_STACK(<scope>)->size(ctx->SCOPE_STACK(<scope>)) - <negIndex> - 1) ))-><attr.name> = <expr>; 3099<else> 3100<if(index)> 3101 ((SCOPE_TYPE(<scope>))(ctx->SCOPE_STACK(<scope>)->get(ctx->SCOPE_STACK(<scope>), (ANTLR3_UINT32)<index> ) ))-><attr.name> = <expr>; 3102<else> 3103 (SCOPE_TOP(<scope>))-><attr.name>=<expr>; 3104<endif> 3105<endif> 3106%> 3107 3108/** $x is either global scope or x is rule with dynamic scope; refers 3109 * to stack itself not top of stack. This is useful for predicates 3110 * like {$function.size()>0 && $function::name.equals("foo")}? 3111 */ 3112isolatedDynamicScopeRef(scope) ::= "ctx->SCOPE_STACK(<scope>)" 3113 3114/** reference an attribute of rule; might only have single return value */ 3115ruleLabelRef(referencedRule,scope,attr) ::= << 3116<if(referencedRule.hasMultipleReturnValues)> 3117<scope>.<attr.name> 3118<else> 3119<scope> 3120<endif> 3121>> 3122 3123returnAttributeRef(ruleDescriptor,attr) ::= << 3124<if(ruleDescriptor.hasMultipleReturnValues)> 3125retval.<attr.name> 3126<else> 3127<attr.name> 3128<endif> 3129>> 3130 3131returnSetAttributeRef(ruleDescriptor,attr,expr) ::= << 3132<if(ruleDescriptor.hasMultipleReturnValues)> 3133retval.<attr.name>=<expr>; 3134<else> 3135<attr.name>=<expr>; 3136<endif> 3137>> 3138 3139/** How to translate $tokenLabel */ 3140tokenLabelRef(label) ::= "<label>" 3141 3142/** ids+=ID {$ids} or e+=expr {$e} */ 3143listLabelRef(label) ::= "list_<label>" 3144 3145 3146// not sure the next are the right approach 3147// 3148tokenLabelPropertyRef_text(scope,attr) ::= "(<scope>->getText(<scope>))" 3149tokenLabelPropertyRef_type(scope,attr) ::= "(<scope>->getType(<scope>))" 3150tokenLabelPropertyRef_line(scope,attr) ::= "(<scope>->getLine(<scope>))" 3151tokenLabelPropertyRef_pos(scope,attr) ::= "(<scope>->getCharPositionInLine(<scope>))" 3152tokenLabelPropertyRef_channel(scope,attr) ::= "(<scope>->getChannel(<scope>))" 3153tokenLabelPropertyRef_index(scope,attr) ::= "(<scope>->getTokenIndex(<scope>))" 3154tokenLabelPropertyRef_tree(scope,attr) ::= "(<scope>->tree)" 3155tokenLabelPropertyRef_int(scope,attr) ::= "(<scope>->getText(<scope>)->toInt32(<scope>->getText(<scope>)))" 3156 3157ruleLabelPropertyRef_start(scope,attr) ::= "(<scope>.start)" 3158ruleLabelPropertyRef_stop(scope,attr) ::= "(<scope>.stop)" 3159ruleLabelPropertyRef_tree(scope,attr) ::= "(<scope>.tree)" 3160ruleLabelPropertyRef_text(scope,attr) ::= << 3161<if(TREE_PARSER)> 3162(STRSTREAM->toStringSS(STRSTREAM, <scope>.start, <scope>.start)) 3163<else> 3164(STRSTREAM->toStringTT(STRSTREAM, <scope>.start, <scope>.stop)) 3165<endif> 3166>> 3167 3168ruleLabelPropertyRef_st(scope,attr) ::= "<scope>.st" 3169 3170/** Isolated $RULE ref ok in lexer as it's a Token */ 3171lexerRuleLabel(label) ::= "<label>" 3172 3173lexerRuleLabelPropertyRef_type(scope,attr) ::= "(<scope>->getType(<scope>))" 3174lexerRuleLabelPropertyRef_line(scope,attr) ::= "(<scope>->getLine(<scope>))" 3175lexerRuleLabelPropertyRef_pos(scope,attr) ::= "(<scope>->getCharPositionInLine(<scope>))" 3176lexerRuleLabelPropertyRef_channel(scope,attr) ::= "(<scope>->getChannel(<scope>))" 3177lexerRuleLabelPropertyRef_index(scope,attr) ::= "(<scope>->getTokenIndex(<scope>))" 3178lexerRuleLabelPropertyRef_text(scope,attr) ::= "(<scope>->getText(<scope>))" 3179 3180// Somebody may ref $template or $tree or $stop within a rule: 3181rulePropertyRef_start(scope,attr) ::= "retval.start" 3182rulePropertyRef_stop(scope,attr) ::= "retval.stop" 3183rulePropertyRef_tree(scope,attr) ::= "retval.tree" 3184rulePropertyRef_text(scope,attr) ::= << 3185<if(TREE_PARSER)> 3186INPUT->toStringSS(INPUT, ADAPTOR->getTokenStartIndex(ADAPTOR, retval.start), ADAPTOR->getTokenStopIndex(ADAPTOR, retval.start)) 3187<else> 3188STRSTREAM->toStringTT(STRSTREAM, retval.start, LT(-1)) 3189<endif> 3190>> 3191rulePropertyRef_st(scope,attr) ::= "retval.st" 3192 3193lexerRulePropertyRef_text(scope,attr) ::= "LEXER->getText(LEXER)" 3194lexerRulePropertyRef_type(scope,attr) ::= "_type" 3195lexerRulePropertyRef_line(scope,attr) ::= "LEXSTATE->tokenStartLine" 3196lexerRulePropertyRef_pos(scope,attr) ::= "LEXSTATE->tokenStartCharPositionInLine" 3197lexerRulePropertyRef_channel(scope,attr) ::= "LEXSTATE->channel" 3198lexerRulePropertyRef_start(scope,attr) ::= "LEXSTATE->tokenStartCharIndex" 3199lexerRulePropertyRef_stop(scope,attr) ::= "(LEXER->getCharIndex(LEXER)-1)" 3200lexerRulePropertyRef_index(scope,attr) ::= "-1" // undefined token index in lexer 3201lexerRulePropertyRef_int(scope,attr) ::= "LEXER->getText(LEXER)->toInt32(LEXER->getText(LEXER))" 3202 3203 3204// setting $st and $tree is allowed in local rule. everything else is flagged as error 3205ruleSetPropertyRef_tree(scope,attr,expr) ::= "retval.tree=<expr>;" 3206ruleSetPropertyRef_st(scope,attr,expr) ::= "retval.st=<expr>;" 3207 3208 3209/** How to deal with an @after for C targets. Because we cannot rely on 3210 * any garbage collection, after code is executed even in backtracking 3211 * mode. Must be documented clearly. 3212 */ 3213execAfter(action) ::= << 3214{ 3215 <action> 3216} 3217>> 3218 3219/** How to execute an action (when not backtracking) */ 3220execAction(action) ::= << 3221<if(backtracking)> 3222<if(actions.(actionScope).synpredgate)> 3223if ( <actions.(actionScope).synpredgate> ) 3224{ 3225 <action> 3226} 3227<else> 3228if ( BACKTRACKING == 0 ) 3229{ 3230 <action> 3231} 3232<endif> 3233<else> 3234{ 3235 <action> 3236} 3237<endif> 3238>> 3239 3240// M I S C (properties, etc...) 3241 3242bitsetDeclare(name, words64) ::= << 3243 3244/** Bitset defining follow set for error recovery in rule state: <name> */ 3245static ANTLR3_BITWORD <name>_bits[] = { <words64:{it |ANTLR3_UINT64_LIT(<it>)}; separator=", "> }; 3246static ANTLR3_BITSET_LIST <name> = { <name>_bits, <length(words64)> }; 3247>> 3248 3249bitset(name, words64) ::= << 3250antlr3BitsetSetAPI(&<name>);<\n> 3251>> 3252 3253codeFileExtension() ::= ".c" 3254 3255true_value() ::= "ANTLR3_TRUE" 3256false_value() ::= "ANTLR3_FALSE" 3257