hb-ot-layout-gsubgpos-private.hh revision 458ecbb60bb7e8e32aca62a562586d921d5396aa
1/* 2 * Copyright (C) 2007,2008,2009,2010 Red Hat, Inc. 3 * 4 * This is part of HarfBuzz, a text shaping library. 5 * 6 * Permission is hereby granted, without written agreement and without 7 * license or royalty fees, to use, copy, modify, and distribute this 8 * software and its documentation for any purpose, provided that the 9 * above copyright notice and the following two paragraphs appear in 10 * all copies of this software. 11 * 12 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 13 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 14 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 15 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 16 * DAMAGE. 17 * 18 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 19 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 20 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 21 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 22 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 23 * 24 * Red Hat Author(s): Behdad Esfahbod 25 */ 26 27#ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 28#define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 29 30#include "hb-buffer-private.h" 31#include "hb-ot-layout-gdef-private.hh" 32 33 34#ifndef HB_DEBUG_APPLY 35#define HB_DEBUG_APPLY HB_DEBUG+0 36#endif 37 38#define TRACE_APPLY() \ 39 hb_trace_t<HB_DEBUG_APPLY> trace (&context->debug_depth, "APPLY", HB_FUNC, this); \ 40 41 42struct hb_apply_context_t 43{ 44 unsigned int debug_depth; 45 hb_ot_layout_context_t *layout; 46 hb_buffer_t *buffer; 47 unsigned int context_length; 48 unsigned int nesting_level_left; 49 unsigned int lookup_flag; 50 unsigned int property; /* propety of first glyph (TODO remove) */ 51}; 52 53 54 55 56#undef BUFFER 57#define BUFFER context->buffer 58 59 60typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 61typedef bool (*apply_lookup_func_t) (hb_apply_context_t *context, unsigned int lookup_index); 62 63struct ContextFuncs 64{ 65 match_func_t match; 66 apply_lookup_func_t apply; 67}; 68 69 70static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED) 71{ 72 return glyph_id == value; 73} 74 75static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 76{ 77 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 78 return class_def.get_class (glyph_id) == value; 79} 80 81static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 82{ 83 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 84 return (data+coverage) (glyph_id) != NOT_COVERED; 85} 86 87 88static inline bool match_input (hb_apply_context_t *context, 89 unsigned int count, /* Including the first glyph (not matched) */ 90 const USHORT input[], /* Array of input values--start with second glyph */ 91 match_func_t match_func, 92 const void *match_data, 93 unsigned int *context_length_out) 94{ 95 unsigned int i, j; 96 unsigned int end = MIN (context->buffer->in_length, context->buffer->in_pos + context->context_length); 97 if (unlikely (context->buffer->in_pos + count > end)) 98 return false; 99 100 for (i = 1, j = context->buffer->in_pos + 1; i < count; i++, j++) 101 { 102 while (_hb_ot_layout_skip_mark (context->layout->face, IN_INFO (j), context->lookup_flag, NULL)) 103 { 104 if (unlikely (j + count - i == end)) 105 return false; 106 j++; 107 } 108 109 if (likely (!match_func (IN_GLYPH (j), input[i - 1], match_data))) 110 return false; 111 } 112 113 *context_length_out = j - context->buffer->in_pos; 114 115 return true; 116} 117 118static inline bool match_backtrack (hb_apply_context_t *context, 119 unsigned int count, 120 const USHORT backtrack[], 121 match_func_t match_func, 122 const void *match_data) 123{ 124 if (unlikely (context->buffer->out_pos < count)) 125 return false; 126 127 for (unsigned int i = 0, j = context->buffer->out_pos - 1; i < count; i++, j--) 128 { 129 while (_hb_ot_layout_skip_mark (context->layout->face, OUT_INFO (j), context->lookup_flag, NULL)) 130 { 131 if (unlikely (j + 1 == count - i)) 132 return false; 133 j--; 134 } 135 136 if (likely (!match_func (OUT_GLYPH (j), backtrack[i], match_data))) 137 return false; 138 } 139 140 return true; 141} 142 143static inline bool match_lookahead (hb_apply_context_t *context, 144 unsigned int count, 145 const USHORT lookahead[], 146 match_func_t match_func, 147 const void *match_data, 148 unsigned int offset) 149{ 150 unsigned int i, j; 151 unsigned int end = MIN (context->buffer->in_length, context->buffer->in_pos + context->context_length); 152 if (unlikely (context->buffer->in_pos + offset + count > end)) 153 return false; 154 155 for (i = 0, j = context->buffer->in_pos + offset; i < count; i++, j++) 156 { 157 while (_hb_ot_layout_skip_mark (context->layout->face, OUT_INFO (j), context->lookup_flag, NULL)) 158 { 159 if (unlikely (j + count - i == end)) 160 return false; 161 j++; 162 } 163 164 if (likely (!match_func (IN_GLYPH (j), lookahead[i], match_data))) 165 return false; 166 } 167 168 return true; 169} 170 171 172struct LookupRecord 173{ 174 inline bool sanitize (hb_sanitize_context_t *context) { 175 TRACE_SANITIZE (); 176 return context->check_struct (this); 177 } 178 179 USHORT sequenceIndex; /* Index into current glyph 180 * sequence--first glyph = 0 */ 181 USHORT lookupListIndex; /* Lookup to apply to that 182 * position--zero--based */ 183 public: 184 DEFINE_SIZE_STATIC (4); 185}; 186 187static inline bool apply_lookup (hb_apply_context_t *context, 188 unsigned int count, /* Including the first glyph */ 189 unsigned int lookupCount, 190 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ 191 apply_lookup_func_t apply_func) 192{ 193 unsigned int end = MIN (context->buffer->in_length, context->buffer->in_pos + context->context_length); 194 if (unlikely (context->buffer->in_pos + count > end)) 195 return false; 196 197 /* TODO We don't support lookupRecord arrays that are not increasing: 198 * Should be easy for in_place ones at least. */ 199 200 /* Note: If sublookup is reverse, i will underflow after the first loop 201 * and we jump out of it. Not entirely disastrous. So we don't check 202 * for reverse lookup here. 203 */ 204 for (unsigned int i = 0; i < count; /* NOP */) 205 { 206 while (_hb_ot_layout_skip_mark (context->layout->face, IN_CURINFO (), context->lookup_flag, NULL)) 207 { 208 if (unlikely (context->buffer->in_pos == end)) 209 return true; 210 /* No lookup applied for this index */ 211 _hb_buffer_next_glyph (context->buffer); 212 } 213 214 if (lookupCount && i == lookupRecord->sequenceIndex) 215 { 216 unsigned int old_pos = context->buffer->in_pos; 217 218 /* Apply a lookup */ 219 bool done = apply_func (context, lookupRecord->lookupListIndex); 220 221 lookupRecord++; 222 lookupCount--; 223 /* Err, this is wrong if the lookup jumped over some glyphs */ 224 i += context->buffer->in_pos - old_pos; 225 if (unlikely (context->buffer->in_pos == end)) 226 return true; 227 228 if (!done) 229 goto not_applied; 230 } 231 else 232 { 233 not_applied: 234 /* No lookup applied for this index */ 235 _hb_buffer_next_glyph (context->buffer); 236 i++; 237 } 238 } 239 240 return true; 241} 242 243 244/* Contextual lookups */ 245 246struct ContextLookupContext 247{ 248 ContextFuncs funcs; 249 const void *match_data; 250}; 251 252static inline bool context_lookup (hb_apply_context_t *context, 253 unsigned int inputCount, /* Including the first glyph (not matched) */ 254 const USHORT input[], /* Array of input values--start with second glyph */ 255 unsigned int lookupCount, 256 const LookupRecord lookupRecord[], 257 ContextLookupContext &lookup_context) 258{ 259 hb_apply_context_t new_context = *context; 260 return match_input (context, 261 inputCount, input, 262 lookup_context.funcs.match, lookup_context.match_data, 263 &new_context.context_length) 264 && apply_lookup (&new_context, 265 inputCount, 266 lookupCount, lookupRecord, 267 lookup_context.funcs.apply); 268} 269 270struct Rule 271{ 272 friend struct RuleSet; 273 274 private: 275 inline bool apply (hb_apply_context_t *context, ContextLookupContext &lookup_context) const 276 { 277 TRACE_APPLY (); 278 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0)); 279 return context_lookup (context, 280 inputCount, input, 281 lookupCount, lookupRecord, 282 lookup_context); 283 } 284 285 public: 286 inline bool sanitize (hb_sanitize_context_t *context) { 287 TRACE_SANITIZE (); 288 return inputCount.sanitize (context) 289 && lookupCount.sanitize (context) 290 && context->check_range (input, 291 input[0].static_size * inputCount 292 + lookupRecordX[0].static_size * lookupCount); 293 } 294 295 private: 296 USHORT inputCount; /* Total number of glyphs in input 297 * glyph sequence--includes the first 298 * glyph */ 299 USHORT lookupCount; /* Number of LookupRecords */ 300 USHORT input[VAR]; /* Array of match inputs--start with 301 * second glyph */ 302 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 303 * design order */ 304 public: 305 DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX); 306}; 307 308struct RuleSet 309{ 310 inline bool apply (hb_apply_context_t *context, ContextLookupContext &lookup_context) const 311 { 312 TRACE_APPLY (); 313 unsigned int num_rules = rule.len; 314 for (unsigned int i = 0; i < num_rules; i++) 315 { 316 if ((this+rule[i]).apply (context, lookup_context)) 317 return true; 318 } 319 320 return false; 321 } 322 323 inline bool sanitize (hb_sanitize_context_t *context) { 324 TRACE_SANITIZE (); 325 return rule.sanitize (context, this); 326 } 327 328 private: 329 OffsetArrayOf<Rule> 330 rule; /* Array of Rule tables 331 * ordered by preference */ 332 public: 333 DEFINE_SIZE_ARRAY (2, rule); 334}; 335 336 337struct ContextFormat1 338{ 339 friend struct Context; 340 341 private: 342 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const 343 { 344 TRACE_APPLY (); 345 unsigned int index = (this+coverage) (IN_CURGLYPH ()); 346 if (likely (index == NOT_COVERED)) 347 return false; 348 349 const RuleSet &rule_set = this+ruleSet[index]; 350 struct ContextLookupContext lookup_context = { 351 {match_glyph, apply_func}, 352 NULL 353 }; 354 return rule_set.apply (context, lookup_context); 355 } 356 357 inline bool sanitize (hb_sanitize_context_t *context) { 358 TRACE_SANITIZE (); 359 return coverage.sanitize (context, this) 360 && ruleSet.sanitize (context, this); 361 } 362 363 private: 364 USHORT format; /* Format identifier--format = 1 */ 365 OffsetTo<Coverage> 366 coverage; /* Offset to Coverage table--from 367 * beginning of table */ 368 OffsetArrayOf<RuleSet> 369 ruleSet; /* Array of RuleSet tables 370 * ordered by Coverage Index */ 371 public: 372 DEFINE_SIZE_ARRAY (6, ruleSet); 373}; 374 375 376struct ContextFormat2 377{ 378 friend struct Context; 379 380 private: 381 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const 382 { 383 TRACE_APPLY (); 384 unsigned int index = (this+coverage) (IN_CURGLYPH ()); 385 if (likely (index == NOT_COVERED)) 386 return false; 387 388 const ClassDef &class_def = this+classDef; 389 index = class_def (IN_CURGLYPH ()); 390 const RuleSet &rule_set = this+ruleSet[index]; 391 /* LONGTERMTODO: Old code fetches glyph classes at most once and caches 392 * them across subrule lookups. Not sure it's worth it. 393 */ 394 struct ContextLookupContext lookup_context = { 395 {match_class, apply_func}, 396 &class_def 397 }; 398 return rule_set.apply (context, lookup_context); 399 } 400 401 inline bool sanitize (hb_sanitize_context_t *context) { 402 TRACE_SANITIZE (); 403 return coverage.sanitize (context, this) 404 && classDef.sanitize (context, this) 405 && ruleSet.sanitize (context, this); 406 } 407 408 private: 409 USHORT format; /* Format identifier--format = 2 */ 410 OffsetTo<Coverage> 411 coverage; /* Offset to Coverage table--from 412 * beginning of table */ 413 OffsetTo<ClassDef> 414 classDef; /* Offset to glyph ClassDef table--from 415 * beginning of table */ 416 OffsetArrayOf<RuleSet> 417 ruleSet; /* Array of RuleSet tables 418 * ordered by class */ 419 public: 420 DEFINE_SIZE_ARRAY (8, ruleSet); 421}; 422 423 424struct ContextFormat3 425{ 426 friend struct Context; 427 428 private: 429 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const 430 { 431 TRACE_APPLY (); 432 unsigned int index = (this+coverage[0]) (IN_CURGLYPH ()); 433 if (likely (index == NOT_COVERED)) 434 return false; 435 436 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount); 437 struct ContextLookupContext lookup_context = { 438 {match_coverage, apply_func}, 439 this 440 }; 441 return context_lookup (context, 442 glyphCount, (const USHORT *) (coverage + 1), 443 lookupCount, lookupRecord, 444 lookup_context); 445 } 446 447 inline bool sanitize (hb_sanitize_context_t *context) { 448 TRACE_SANITIZE (); 449 if (!context->check_struct (this)) return false; 450 unsigned int count = glyphCount; 451 if (!context->check_array (coverage, coverage[0].static_size, count)) return false; 452 for (unsigned int i = 0; i < count; i++) 453 if (!coverage[i].sanitize (context, this)) return false; 454 LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count); 455 return context->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount); 456 } 457 458 private: 459 USHORT format; /* Format identifier--format = 3 */ 460 USHORT glyphCount; /* Number of glyphs in the input glyph 461 * sequence */ 462 USHORT lookupCount; /* Number of LookupRecords */ 463 OffsetTo<Coverage> 464 coverage[VAR]; /* Array of offsets to Coverage 465 * table in glyph sequence order */ 466 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 467 * design order */ 468 public: 469 DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX); 470}; 471 472struct Context 473{ 474 protected: 475 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const 476 { 477 TRACE_APPLY (); 478 switch (u.format) { 479 case 1: return u.format1.apply (context, apply_func); 480 case 2: return u.format2.apply (context, apply_func); 481 case 3: return u.format3.apply (context, apply_func); 482 default:return false; 483 } 484 } 485 486 inline bool sanitize (hb_sanitize_context_t *context) { 487 TRACE_SANITIZE (); 488 if (!u.format.sanitize (context)) return false; 489 switch (u.format) { 490 case 1: return u.format1.sanitize (context); 491 case 2: return u.format2.sanitize (context); 492 case 3: return u.format3.sanitize (context); 493 default:return true; 494 } 495 } 496 497 private: 498 union { 499 USHORT format; /* Format identifier */ 500 ContextFormat1 format1; 501 ContextFormat2 format2; 502 ContextFormat3 format3; 503 } u; 504}; 505 506 507/* Chaining Contextual lookups */ 508 509struct ChainContextLookupContext 510{ 511 ContextFuncs funcs; 512 const void *match_data[3]; 513}; 514 515static inline bool chain_context_lookup (hb_apply_context_t *context, 516 unsigned int backtrackCount, 517 const USHORT backtrack[], 518 unsigned int inputCount, /* Including the first glyph (not matched) */ 519 const USHORT input[], /* Array of input values--start with second glyph */ 520 unsigned int lookaheadCount, 521 const USHORT lookahead[], 522 unsigned int lookupCount, 523 const LookupRecord lookupRecord[], 524 ChainContextLookupContext &lookup_context) 525{ 526 /* First guess */ 527 if (unlikely (context->buffer->out_pos < backtrackCount || 528 context->buffer->in_pos + inputCount + lookaheadCount > context->buffer->in_length || 529 inputCount + lookaheadCount > context->context_length)) 530 return false; 531 532 hb_apply_context_t new_context = *context; 533 return match_backtrack (context, 534 backtrackCount, backtrack, 535 lookup_context.funcs.match, lookup_context.match_data[0]) 536 && match_input (context, 537 inputCount, input, 538 lookup_context.funcs.match, lookup_context.match_data[1], 539 &new_context.context_length) 540 && match_lookahead (context, 541 lookaheadCount, lookahead, 542 lookup_context.funcs.match, lookup_context.match_data[2], 543 new_context.context_length) 544 && apply_lookup (&new_context, 545 inputCount, 546 lookupCount, lookupRecord, 547 lookup_context.funcs.apply); 548} 549 550struct ChainRule 551{ 552 friend struct ChainRuleSet; 553 554 private: 555 inline bool apply (hb_apply_context_t *context, ChainContextLookupContext &lookup_context) const 556 { 557 TRACE_APPLY (); 558 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 559 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 560 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 561 return chain_context_lookup (context, 562 backtrack.len, backtrack.array, 563 input.len, input.array, 564 lookahead.len, lookahead.array, 565 lookup.len, lookup.array, 566 lookup_context); 567 return false; 568 } 569 570 public: 571 inline bool sanitize (hb_sanitize_context_t *context) { 572 TRACE_SANITIZE (); 573 if (!backtrack.sanitize (context)) return false; 574 HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 575 if (!input.sanitize (context)) return false; 576 ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 577 if (!lookahead.sanitize (context)) return false; 578 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 579 return lookup.sanitize (context); 580 } 581 582 private: 583 ArrayOf<USHORT> 584 backtrack; /* Array of backtracking values 585 * (to be matched before the input 586 * sequence) */ 587 HeadlessArrayOf<USHORT> 588 inputX; /* Array of input values (start with 589 * second glyph) */ 590 ArrayOf<USHORT> 591 lookaheadX; /* Array of lookahead values's (to be 592 * matched after the input sequence) */ 593 ArrayOf<LookupRecord> 594 lookupX; /* Array of LookupRecords--in 595 * design order) */ 596 public: 597 DEFINE_SIZE_MIN (8); 598}; 599 600struct ChainRuleSet 601{ 602 inline bool apply (hb_apply_context_t *context, ChainContextLookupContext &lookup_context) const 603 { 604 TRACE_APPLY (); 605 unsigned int num_rules = rule.len; 606 for (unsigned int i = 0; i < num_rules; i++) 607 { 608 if ((this+rule[i]).apply (context, lookup_context)) 609 return true; 610 } 611 612 return false; 613 } 614 615 inline bool sanitize (hb_sanitize_context_t *context) { 616 TRACE_SANITIZE (); 617 return rule.sanitize (context, this); 618 } 619 620 private: 621 OffsetArrayOf<ChainRule> 622 rule; /* Array of ChainRule tables 623 * ordered by preference */ 624 public: 625 DEFINE_SIZE_ARRAY (2, rule); 626}; 627 628struct ChainContextFormat1 629{ 630 friend struct ChainContext; 631 632 private: 633 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const 634 { 635 TRACE_APPLY (); 636 unsigned int index = (this+coverage) (IN_CURGLYPH ()); 637 if (likely (index == NOT_COVERED)) 638 return false; 639 640 const ChainRuleSet &rule_set = this+ruleSet[index]; 641 struct ChainContextLookupContext lookup_context = { 642 {match_glyph, apply_func}, 643 {NULL, NULL, NULL} 644 }; 645 return rule_set.apply (context, lookup_context); 646 } 647 648 inline bool sanitize (hb_sanitize_context_t *context) { 649 TRACE_SANITIZE (); 650 return coverage.sanitize (context, this) 651 && ruleSet.sanitize (context, this); 652 } 653 654 private: 655 USHORT format; /* Format identifier--format = 1 */ 656 OffsetTo<Coverage> 657 coverage; /* Offset to Coverage table--from 658 * beginning of table */ 659 OffsetArrayOf<ChainRuleSet> 660 ruleSet; /* Array of ChainRuleSet tables 661 * ordered by Coverage Index */ 662 public: 663 DEFINE_SIZE_ARRAY (6, ruleSet); 664}; 665 666struct ChainContextFormat2 667{ 668 friend struct ChainContext; 669 670 private: 671 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const 672 { 673 TRACE_APPLY (); 674 unsigned int index = (this+coverage) (IN_CURGLYPH ()); 675 if (likely (index == NOT_COVERED)) 676 return false; 677 678 const ClassDef &backtrack_class_def = this+backtrackClassDef; 679 const ClassDef &input_class_def = this+inputClassDef; 680 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 681 682 index = input_class_def (IN_CURGLYPH ()); 683 const ChainRuleSet &rule_set = this+ruleSet[index]; 684 /* LONGTERMTODO: Old code fetches glyph classes at most once and caches 685 * them across subrule lookups. Not sure it's worth it. 686 */ 687 struct ChainContextLookupContext lookup_context = { 688 {match_class, apply_func}, 689 {&backtrack_class_def, 690 &input_class_def, 691 &lookahead_class_def} 692 }; 693 return rule_set.apply (context, lookup_context); 694 } 695 696 inline bool sanitize (hb_sanitize_context_t *context) { 697 TRACE_SANITIZE (); 698 return coverage.sanitize (context, this) 699 && backtrackClassDef.sanitize (context, this) 700 && inputClassDef.sanitize (context, this) 701 && lookaheadClassDef.sanitize (context, this) 702 && ruleSet.sanitize (context, this); 703 } 704 705 private: 706 USHORT format; /* Format identifier--format = 2 */ 707 OffsetTo<Coverage> 708 coverage; /* Offset to Coverage table--from 709 * beginning of table */ 710 OffsetTo<ClassDef> 711 backtrackClassDef; /* Offset to glyph ClassDef table 712 * containing backtrack sequence 713 * data--from beginning of table */ 714 OffsetTo<ClassDef> 715 inputClassDef; /* Offset to glyph ClassDef 716 * table containing input sequence 717 * data--from beginning of table */ 718 OffsetTo<ClassDef> 719 lookaheadClassDef; /* Offset to glyph ClassDef table 720 * containing lookahead sequence 721 * data--from beginning of table */ 722 OffsetArrayOf<ChainRuleSet> 723 ruleSet; /* Array of ChainRuleSet tables 724 * ordered by class */ 725 public: 726 DEFINE_SIZE_ARRAY (12, ruleSet); 727}; 728 729struct ChainContextFormat3 730{ 731 friend struct ChainContext; 732 733 private: 734 735 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const 736 { 737 TRACE_APPLY (); 738 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 739 740 unsigned int index = (this+input[0]) (IN_CURGLYPH ()); 741 if (likely (index == NOT_COVERED)) 742 return false; 743 744 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 745 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 746 struct ChainContextLookupContext lookup_context = { 747 {match_coverage, apply_func}, 748 {this, this, this} 749 }; 750 return chain_context_lookup (context, 751 backtrack.len, (const USHORT *) backtrack.array, 752 input.len, (const USHORT *) input.array + 1, 753 lookahead.len, (const USHORT *) lookahead.array, 754 lookup.len, lookup.array, 755 lookup_context); 756 return false; 757 } 758 759 inline bool sanitize (hb_sanitize_context_t *context) { 760 TRACE_SANITIZE (); 761 if (!backtrack.sanitize (context, this)) return false; 762 OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 763 if (!input.sanitize (context, this)) return false; 764 OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 765 if (!lookahead.sanitize (context, this)) return false; 766 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 767 return lookup.sanitize (context); 768 } 769 770 private: 771 USHORT format; /* Format identifier--format = 3 */ 772 OffsetArrayOf<Coverage> 773 backtrack; /* Array of coverage tables 774 * in backtracking sequence, in glyph 775 * sequence order */ 776 OffsetArrayOf<Coverage> 777 inputX ; /* Array of coverage 778 * tables in input sequence, in glyph 779 * sequence order */ 780 OffsetArrayOf<Coverage> 781 lookaheadX; /* Array of coverage tables 782 * in lookahead sequence, in glyph 783 * sequence order */ 784 ArrayOf<LookupRecord> 785 lookupX; /* Array of LookupRecords--in 786 * design order) */ 787 public: 788 DEFINE_SIZE_MIN (10); 789}; 790 791struct ChainContext 792{ 793 protected: 794 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const 795 { 796 TRACE_APPLY (); 797 switch (u.format) { 798 case 1: return u.format1.apply (context, apply_func); 799 case 2: return u.format2.apply (context, apply_func); 800 case 3: return u.format3.apply (context, apply_func); 801 default:return false; 802 } 803 } 804 805 inline bool sanitize (hb_sanitize_context_t *context) { 806 TRACE_SANITIZE (); 807 if (!u.format.sanitize (context)) return false; 808 switch (u.format) { 809 case 1: return u.format1.sanitize (context); 810 case 2: return u.format2.sanitize (context); 811 case 3: return u.format3.sanitize (context); 812 default:return true; 813 } 814 } 815 816 private: 817 union { 818 USHORT format; /* Format identifier */ 819 ChainContextFormat1 format1; 820 ChainContextFormat2 format2; 821 ChainContextFormat3 format3; 822 } u; 823}; 824 825 826struct ExtensionFormat1 827{ 828 friend struct Extension; 829 830 protected: 831 inline unsigned int get_type (void) const { return extensionLookupType; } 832 inline unsigned int get_offset (void) const { return extensionOffset; } 833 834 inline bool sanitize (hb_sanitize_context_t *context) { 835 TRACE_SANITIZE (); 836 return context->check_struct (this); 837 } 838 839 private: 840 USHORT format; /* Format identifier. Set to 1. */ 841 USHORT extensionLookupType; /* Lookup type of subtable referenced 842 * by ExtensionOffset (i.e. the 843 * extension subtable). */ 844 ULONG extensionOffset; /* Offset to the extension subtable, 845 * of lookup type subtable. */ 846 public: 847 DEFINE_SIZE_STATIC (8); 848}; 849 850struct Extension 851{ 852 inline unsigned int get_type (void) const 853 { 854 switch (u.format) { 855 case 1: return u.format1.get_type (); 856 default:return 0; 857 } 858 } 859 inline unsigned int get_offset (void) const 860 { 861 switch (u.format) { 862 case 1: return u.format1.get_offset (); 863 default:return 0; 864 } 865 } 866 867 inline bool sanitize (hb_sanitize_context_t *context) { 868 TRACE_SANITIZE (); 869 if (!u.format.sanitize (context)) return false; 870 switch (u.format) { 871 case 1: return u.format1.sanitize (context); 872 default:return true; 873 } 874 } 875 876 private: 877 union { 878 USHORT format; /* Format identifier */ 879 ExtensionFormat1 format1; 880 } u; 881}; 882 883 884/* 885 * GSUB/GPOS Common 886 */ 887 888struct GSUBGPOS 889{ 890 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB; 891 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS; 892 893 inline unsigned int get_script_count (void) const 894 { return (this+scriptList).len; } 895 inline const Tag& get_script_tag (unsigned int i) const 896 { return (this+scriptList).get_tag (i); } 897 inline unsigned int get_script_tags (unsigned int start_offset, 898 unsigned int *script_count /* IN/OUT */, 899 hb_tag_t *script_tags /* OUT */) const 900 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); } 901 inline const Script& get_script (unsigned int i) const 902 { return (this+scriptList)[i]; } 903 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const 904 { return (this+scriptList).find_index (tag, index); } 905 906 inline unsigned int get_feature_count (void) const 907 { return (this+featureList).len; } 908 inline const Tag& get_feature_tag (unsigned int i) const 909 { return (this+featureList).get_tag (i); } 910 inline unsigned int get_feature_tags (unsigned int start_offset, 911 unsigned int *feature_count /* IN/OUT */, 912 hb_tag_t *feature_tags /* OUT */) const 913 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); } 914 inline const Feature& get_feature (unsigned int i) const 915 { return (this+featureList)[i]; } 916 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const 917 { return (this+featureList).find_index (tag, index); } 918 919 inline unsigned int get_lookup_count (void) const 920 { return (this+lookupList).len; } 921 inline const Lookup& get_lookup (unsigned int i) const 922 { return (this+lookupList)[i]; } 923 924 inline bool sanitize (hb_sanitize_context_t *context) { 925 TRACE_SANITIZE (); 926 return version.sanitize (context) && likely (version.major == 1) 927 && scriptList.sanitize (context, this) 928 && featureList.sanitize (context, this) 929 && lookupList.sanitize (context, this); 930 } 931 932 protected: 933 FixedVersion version; /* Version of the GSUB/GPOS table--initially set 934 * to 0x00010000 */ 935 OffsetTo<ScriptList> 936 scriptList; /* ScriptList table */ 937 OffsetTo<FeatureList> 938 featureList; /* FeatureList table */ 939 OffsetTo<LookupList> 940 lookupList; /* LookupList table */ 941 public: 942 DEFINE_SIZE_STATIC (10); 943}; 944 945 946#endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */ 947