hb-ot-layout-gsubgpos-private.hh revision 6d08c7f1b3601095f9a12630045331dd0fe75380
1/* 2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright © 2010,2012 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29#ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 30#define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 31 32#include "hb-buffer-private.hh" 33#include "hb-ot-layout-gdef-table.hh" 34 35 36 37/* unique ligature id */ 38/* component number in the ligature (0 = base) */ 39static inline void 40set_lig_props (hb_glyph_info_t &info, unsigned int lig_id, unsigned int lig_comp) 41{ 42 info.lig_props() = (lig_id << 4) | (lig_comp & 0x0F); 43} 44static inline unsigned int 45get_lig_id (hb_glyph_info_t &info) 46{ 47 return info.lig_props() >> 4; 48} 49static inline unsigned int 50get_lig_comp (hb_glyph_info_t &info) 51{ 52 return info.lig_props() & 0x0F; 53} 54 55static inline uint8_t allocate_lig_id (hb_buffer_t *buffer) { 56 uint8_t lig_id = buffer->next_serial () & 0x0F; 57 if (unlikely (!lig_id)) 58 lig_id = allocate_lig_id (buffer); /* in case of overflow */ 59 return lig_id; 60} 61 62 63 64#ifndef HB_DEBUG_CLOSURE 65#define HB_DEBUG_CLOSURE (HB_DEBUG+0) 66#endif 67 68#define TRACE_CLOSURE() \ 69 hb_auto_trace_t<HB_DEBUG_CLOSURE> trace (&c->debug_depth, "CLOSURE", this, HB_FUNC, ""); 70 71 72/* TODO Add TRACE_RETURN annotation for would_apply */ 73 74 75struct hb_closure_context_t 76{ 77 hb_face_t *face; 78 hb_set_t *glyphs; 79 unsigned int nesting_level_left; 80 unsigned int debug_depth; 81 82 83 hb_closure_context_t (hb_face_t *face_, 84 hb_set_t *glyphs_, 85 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) : 86 face (face_), glyphs (glyphs_), 87 nesting_level_left (nesting_level_left_), 88 debug_depth (0) {} 89}; 90 91 92 93#ifndef HB_DEBUG_APPLY 94#define HB_DEBUG_APPLY (HB_DEBUG+0) 95#endif 96 97#define TRACE_APPLY() \ 98 hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", this, HB_FUNC, "idx %d codepoint %u", c->buffer->idx, c->buffer->cur().codepoint); 99 100 101 102struct hb_apply_context_t 103{ 104 hb_font_t *font; 105 hb_face_t *face; 106 hb_buffer_t *buffer; 107 hb_direction_t direction; 108 hb_mask_t lookup_mask; 109 unsigned int nesting_level_left; 110 unsigned int lookup_props; 111 unsigned int property; /* propety of first glyph */ 112 unsigned int debug_depth; 113 114 115 hb_apply_context_t (hb_font_t *font_, 116 hb_face_t *face_, 117 hb_buffer_t *buffer_, 118 hb_mask_t lookup_mask_) : 119 font (font_), face (face_), buffer (buffer_), 120 direction (buffer_->props.direction), 121 lookup_mask (lookup_mask_), 122 nesting_level_left (MAX_NESTING_LEVEL), 123 lookup_props (0), property (0), debug_depth (0) {} 124 125 void set_lookup (const Lookup &l) { 126 lookup_props = l.get_props (); 127 } 128 129 struct mark_skipping_forward_iterator_t 130 { 131 inline mark_skipping_forward_iterator_t (hb_apply_context_t *c_, 132 unsigned int start_index_, 133 unsigned int num_items_, 134 bool context_match = false) 135 { 136 c = c_; 137 idx = start_index_; 138 num_items = num_items_; 139 mask = context_match ? -1 : c->lookup_mask; 140 syllable = context_match ? 0 : c->buffer->cur().syllable (); 141 end = c->buffer->len; 142 } 143 inline bool has_no_chance (void) const 144 { 145 return unlikely (num_items && idx + num_items >= end); 146 } 147 inline void reject (void) 148 { 149 num_items++; 150 } 151 inline bool next (unsigned int *property_out, 152 unsigned int lookup_props) 153 { 154 assert (num_items > 0); 155 do 156 { 157 if (has_no_chance ()) 158 return false; 159 idx++; 160 } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[idx], lookup_props, property_out)); 161 num_items--; 162 return (c->buffer->info[idx].mask & mask) && (!syllable || syllable == c->buffer->info[idx].syllable ()); 163 } 164 inline bool next (unsigned int *property_out = NULL) 165 { 166 return next (property_out, c->lookup_props); 167 } 168 169 unsigned int idx; 170 private: 171 hb_apply_context_t *c; 172 unsigned int num_items; 173 hb_mask_t mask; 174 uint8_t syllable; 175 unsigned int end; 176 }; 177 178 struct mark_skipping_backward_iterator_t 179 { 180 inline mark_skipping_backward_iterator_t (hb_apply_context_t *c_, 181 unsigned int start_index_, 182 unsigned int num_items_, 183 hb_mask_t mask_ = 0, 184 bool match_syllable_ = true) 185 { 186 c = c_; 187 idx = start_index_; 188 num_items = num_items_; 189 mask = mask_ ? mask_ : c->lookup_mask; 190 syllable = match_syllable_ ? c->buffer->cur().syllable () : 0; 191 } 192 inline bool has_no_chance (void) const 193 { 194 return unlikely (idx < num_items); 195 } 196 inline void reject (void) 197 { 198 num_items++; 199 } 200 inline bool prev (unsigned int *property_out, 201 unsigned int lookup_props) 202 { 203 assert (num_items > 0); 204 do 205 { 206 if (has_no_chance ()) 207 return false; 208 idx--; 209 } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->out_info[idx], lookup_props, property_out)); 210 num_items--; 211 return (c->buffer->out_info[idx].mask & mask) && (!syllable || syllable == c->buffer->out_info[idx].syllable ()); 212 } 213 inline bool prev (unsigned int *property_out = NULL) 214 { 215 return prev (property_out, c->lookup_props); 216 } 217 218 unsigned int idx; 219 private: 220 hb_apply_context_t *c; 221 unsigned int num_items; 222 hb_mask_t mask; 223 uint8_t syllable; 224 }; 225 226 inline bool should_mark_skip_current_glyph (void) const 227 { 228 unsigned int property; 229 return _hb_ot_layout_skip_mark (face, &buffer->cur(), lookup_props, &property); 230 } 231 232 233 inline void output_glyph (hb_codepoint_t glyph_index, 234 unsigned int klass = 0) const 235 { 236 buffer->cur().props_cache() = klass; /*XXX if has gdef? */ 237 buffer->output_glyph (glyph_index); 238 } 239 inline void replace_glyph (hb_codepoint_t glyph_index, 240 unsigned int klass = 0) const 241 { 242 buffer->cur().props_cache() = klass; /*XXX if has gdef? */ 243 buffer->replace_glyph (glyph_index); 244 } 245 inline void replace_glyphs (unsigned int num_in, 246 unsigned int num_out, 247 hb_codepoint_t *glyph_data, 248 unsigned int klass = 0) const 249 { 250 buffer->cur().props_cache() = klass; /* XXX if has gdef? */ 251 buffer->replace_glyphs (num_in, num_out, glyph_data); 252 } 253}; 254 255 256 257typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); 258typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 259typedef void (*closure_lookup_func_t) (hb_closure_context_t *c, unsigned int lookup_index); 260typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index); 261 262struct ContextClosureFuncs 263{ 264 intersects_func_t intersects; 265 closure_lookup_func_t closure; 266}; 267struct ContextApplyFuncs 268{ 269 match_func_t match; 270 apply_lookup_func_t apply; 271}; 272 273static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) 274{ 275 return glyphs->has (value); 276} 277static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data) 278{ 279 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 280 return class_def.intersects_class (glyphs, value); 281} 282static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) 283{ 284 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 285 return (data+coverage).intersects (glyphs); 286} 287 288static inline bool intersects_array (hb_closure_context_t *c, 289 unsigned int count, 290 const USHORT values[], 291 intersects_func_t intersects_func, 292 const void *intersects_data) 293{ 294 for (unsigned int i = 0; i < count; i++) 295 if (likely (!intersects_func (c->glyphs, values[i], intersects_data))) 296 return false; 297 return true; 298} 299 300 301static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED) 302{ 303 return glyph_id == value; 304} 305static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 306{ 307 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 308 return class_def.get_class (glyph_id) == value; 309} 310static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 311{ 312 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 313 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED; 314} 315 316 317static inline bool match_input (hb_apply_context_t *c, 318 unsigned int count, /* Including the first glyph (not matched) */ 319 const USHORT input[], /* Array of input values--start with second glyph */ 320 match_func_t match_func, 321 const void *match_data, 322 unsigned int *end_offset = NULL) 323{ 324 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1); 325 if (skippy_iter.has_no_chance ()) 326 return false; 327 328 for (unsigned int i = 1; i < count; i++) 329 { 330 if (!skippy_iter.next ()) 331 return false; 332 333 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, input[i - 1], match_data))) 334 return false; 335 } 336 337 if (end_offset) 338 *end_offset = skippy_iter.idx - c->buffer->idx + 1; 339 340 return true; 341} 342 343static inline bool match_backtrack (hb_apply_context_t *c, 344 unsigned int count, 345 const USHORT backtrack[], 346 match_func_t match_func, 347 const void *match_data) 348{ 349 hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count, true); 350 if (skippy_iter.has_no_chance ()) 351 return false; 352 353 for (unsigned int i = 0; i < count; i++) 354 { 355 if (!skippy_iter.prev ()) 356 return false; 357 358 if (likely (!match_func (c->buffer->out_info[skippy_iter.idx].codepoint, backtrack[i], match_data))) 359 return false; 360 } 361 362 return true; 363} 364 365static inline bool match_lookahead (hb_apply_context_t *c, 366 unsigned int count, 367 const USHORT lookahead[], 368 match_func_t match_func, 369 const void *match_data, 370 unsigned int offset) 371{ 372 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count, true); 373 if (skippy_iter.has_no_chance ()) 374 return false; 375 376 for (unsigned int i = 0; i < count; i++) 377 { 378 if (!skippy_iter.next ()) 379 return false; 380 381 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, lookahead[i], match_data))) 382 return false; 383 } 384 385 return true; 386} 387 388 389 390struct LookupRecord 391{ 392 inline bool sanitize (hb_sanitize_context_t *c) { 393 TRACE_SANITIZE (); 394 return TRACE_RETURN (c->check_struct (this)); 395 } 396 397 USHORT sequenceIndex; /* Index into current glyph 398 * sequence--first glyph = 0 */ 399 USHORT lookupListIndex; /* Lookup to apply to that 400 * position--zero--based */ 401 public: 402 DEFINE_SIZE_STATIC (4); 403}; 404 405 406static inline void closure_lookup (hb_closure_context_t *c, 407 unsigned int lookupCount, 408 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ 409 closure_lookup_func_t closure_func) 410{ 411 for (unsigned int i = 0; i < lookupCount; i++) 412 closure_func (c, lookupRecord->lookupListIndex); 413} 414 415static inline bool apply_lookup (hb_apply_context_t *c, 416 unsigned int count, /* Including the first glyph */ 417 unsigned int lookupCount, 418 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ 419 apply_lookup_func_t apply_func) 420{ 421 unsigned int end = c->buffer->len; 422 if (unlikely (count == 0 || c->buffer->idx + count > end)) 423 return false; 424 425 /* TODO We don't support lookupRecord arrays that are not increasing: 426 * Should be easy for in_place ones at least. */ 427 428 /* Note: If sublookup is reverse, it will underflow after the first loop 429 * and we jump out of it. Not entirely disastrous. So we don't check 430 * for reverse lookup here. 431 */ 432 for (unsigned int i = 0; i < count; /* NOP */) 433 { 434 if (unlikely (c->buffer->idx == end)) 435 return true; 436 while (c->should_mark_skip_current_glyph ()) 437 { 438 /* No lookup applied for this index */ 439 c->buffer->next_glyph (); 440 if (unlikely (c->buffer->idx == end)) 441 return true; 442 } 443 444 if (lookupCount && i == lookupRecord->sequenceIndex) 445 { 446 unsigned int old_pos = c->buffer->idx; 447 448 /* Apply a lookup */ 449 bool done = apply_func (c, lookupRecord->lookupListIndex); 450 451 lookupRecord++; 452 lookupCount--; 453 /* Err, this is wrong if the lookup jumped over some glyphs */ 454 i += c->buffer->idx - old_pos; 455 if (unlikely (c->buffer->idx == end)) 456 return true; 457 458 if (!done) 459 goto not_applied; 460 } 461 else 462 { 463 not_applied: 464 /* No lookup applied for this index */ 465 c->buffer->next_glyph (); 466 i++; 467 } 468 } 469 470 return true; 471} 472 473 474 475/* Contextual lookups */ 476 477struct ContextClosureLookupContext 478{ 479 ContextClosureFuncs funcs; 480 const void *intersects_data; 481}; 482 483struct ContextApplyLookupContext 484{ 485 ContextApplyFuncs funcs; 486 const void *match_data; 487}; 488 489static inline void context_closure_lookup (hb_closure_context_t *c, 490 unsigned int inputCount, /* Including the first glyph (not matched) */ 491 const USHORT input[], /* Array of input values--start with second glyph */ 492 unsigned int lookupCount, 493 const LookupRecord lookupRecord[], 494 ContextClosureLookupContext &lookup_context) 495{ 496 if (intersects_array (c, 497 inputCount ? inputCount - 1 : 0, input, 498 lookup_context.funcs.intersects, lookup_context.intersects_data)) 499 closure_lookup (c, 500 lookupCount, lookupRecord, 501 lookup_context.funcs.closure); 502} 503 504 505static inline bool context_apply_lookup (hb_apply_context_t *c, 506 unsigned int inputCount, /* Including the first glyph (not matched) */ 507 const USHORT input[], /* Array of input values--start with second glyph */ 508 unsigned int lookupCount, 509 const LookupRecord lookupRecord[], 510 ContextApplyLookupContext &lookup_context) 511{ 512 return match_input (c, 513 inputCount, input, 514 lookup_context.funcs.match, lookup_context.match_data) 515 && apply_lookup (c, 516 inputCount, 517 lookupCount, lookupRecord, 518 lookup_context.funcs.apply); 519} 520 521struct Rule 522{ 523 friend struct RuleSet; 524 525 private: 526 527 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 528 { 529 TRACE_CLOSURE (); 530 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0)); 531 context_closure_lookup (c, 532 inputCount, input, 533 lookupCount, lookupRecord, 534 lookup_context); 535 } 536 537 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 538 { 539 TRACE_APPLY (); 540 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0)); 541 return TRACE_RETURN (context_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context)); 542 } 543 544 public: 545 inline bool sanitize (hb_sanitize_context_t *c) { 546 TRACE_SANITIZE (); 547 return inputCount.sanitize (c) 548 && lookupCount.sanitize (c) 549 && c->check_range (input, 550 input[0].static_size * inputCount 551 + lookupRecordX[0].static_size * lookupCount); 552 } 553 554 private: 555 USHORT inputCount; /* Total number of glyphs in input 556 * glyph sequence--includes the first 557 * glyph */ 558 USHORT lookupCount; /* Number of LookupRecords */ 559 USHORT input[VAR]; /* Array of match inputs--start with 560 * second glyph */ 561 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 562 * design order */ 563 public: 564 DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX); 565}; 566 567struct RuleSet 568{ 569 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 570 { 571 TRACE_CLOSURE (); 572 unsigned int num_rules = rule.len; 573 for (unsigned int i = 0; i < num_rules; i++) 574 (this+rule[i]).closure (c, lookup_context); 575 } 576 577 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 578 { 579 TRACE_APPLY (); 580 unsigned int num_rules = rule.len; 581 for (unsigned int i = 0; i < num_rules; i++) 582 { 583 if ((this+rule[i]).apply (c, lookup_context)) 584 return TRACE_RETURN (true); 585 } 586 return TRACE_RETURN (false); 587 } 588 589 inline bool sanitize (hb_sanitize_context_t *c) { 590 TRACE_SANITIZE (); 591 return TRACE_RETURN (rule.sanitize (c, this)); 592 } 593 594 private: 595 OffsetArrayOf<Rule> 596 rule; /* Array of Rule tables 597 * ordered by preference */ 598 public: 599 DEFINE_SIZE_ARRAY (2, rule); 600}; 601 602 603struct ContextFormat1 604{ 605 friend struct Context; 606 607 private: 608 609 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 610 { 611 TRACE_CLOSURE (); 612 613 const Coverage &cov = (this+coverage); 614 615 struct ContextClosureLookupContext lookup_context = { 616 {intersects_glyph, closure_func}, 617 NULL 618 }; 619 620 unsigned int count = ruleSet.len; 621 for (unsigned int i = 0; i < count; i++) 622 if (cov.intersects_coverage (c->glyphs, i)) { 623 const RuleSet &rule_set = this+ruleSet[i]; 624 rule_set.closure (c, lookup_context); 625 } 626 } 627 628 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 629 { 630 TRACE_APPLY (); 631 unsigned int index = (this+coverage) (c->buffer->cur().codepoint); 632 if (likely (index == NOT_COVERED)) 633 return TRACE_RETURN (false); 634 635 const RuleSet &rule_set = this+ruleSet[index]; 636 struct ContextApplyLookupContext lookup_context = { 637 {match_glyph, apply_func}, 638 NULL 639 }; 640 return TRACE_RETURN (rule_set.apply (c, lookup_context)); 641 } 642 643 inline bool sanitize (hb_sanitize_context_t *c) { 644 TRACE_SANITIZE (); 645 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 646 } 647 648 private: 649 USHORT format; /* Format identifier--format = 1 */ 650 OffsetTo<Coverage> 651 coverage; /* Offset to Coverage table--from 652 * beginning of table */ 653 OffsetArrayOf<RuleSet> 654 ruleSet; /* Array of RuleSet tables 655 * ordered by Coverage Index */ 656 public: 657 DEFINE_SIZE_ARRAY (6, ruleSet); 658}; 659 660 661struct ContextFormat2 662{ 663 friend struct Context; 664 665 private: 666 667 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 668 { 669 TRACE_CLOSURE (); 670 if (!(this+coverage).intersects (c->glyphs)) 671 return; 672 673 const ClassDef &class_def = this+classDef; 674 675 struct ContextClosureLookupContext lookup_context = { 676 {intersects_class, closure_func}, 677 NULL 678 }; 679 680 unsigned int count = ruleSet.len; 681 for (unsigned int i = 0; i < count; i++) 682 if (class_def.intersects_class (c->glyphs, i)) { 683 const RuleSet &rule_set = this+ruleSet[i]; 684 rule_set.closure (c, lookup_context); 685 } 686 } 687 688 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 689 { 690 TRACE_APPLY (); 691 unsigned int index = (this+coverage) (c->buffer->cur().codepoint); 692 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 693 694 const ClassDef &class_def = this+classDef; 695 index = class_def (c->buffer->cur().codepoint); 696 const RuleSet &rule_set = this+ruleSet[index]; 697 struct ContextApplyLookupContext lookup_context = { 698 {match_class, apply_func}, 699 &class_def 700 }; 701 return TRACE_RETURN (rule_set.apply (c, lookup_context)); 702 } 703 704 inline bool sanitize (hb_sanitize_context_t *c) { 705 TRACE_SANITIZE (); 706 return TRACE_RETURN (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this)); 707 } 708 709 private: 710 USHORT format; /* Format identifier--format = 2 */ 711 OffsetTo<Coverage> 712 coverage; /* Offset to Coverage table--from 713 * beginning of table */ 714 OffsetTo<ClassDef> 715 classDef; /* Offset to glyph ClassDef table--from 716 * beginning of table */ 717 OffsetArrayOf<RuleSet> 718 ruleSet; /* Array of RuleSet tables 719 * ordered by class */ 720 public: 721 DEFINE_SIZE_ARRAY (8, ruleSet); 722}; 723 724 725struct ContextFormat3 726{ 727 friend struct Context; 728 729 private: 730 731 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 732 { 733 TRACE_CLOSURE (); 734 if (!(this+coverage[0]).intersects (c->glyphs)) 735 return; 736 737 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount); 738 struct ContextClosureLookupContext lookup_context = { 739 {intersects_coverage, closure_func}, 740 this 741 }; 742 context_closure_lookup (c, 743 glyphCount, (const USHORT *) (coverage + 1), 744 lookupCount, lookupRecord, 745 lookup_context); 746 } 747 748 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 749 { 750 TRACE_APPLY (); 751 unsigned int index = (this+coverage[0]) (c->buffer->cur().codepoint); 752 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 753 754 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount); 755 struct ContextApplyLookupContext lookup_context = { 756 {match_coverage, apply_func}, 757 this 758 }; 759 return TRACE_RETURN (context_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context)); 760 } 761 762 inline bool sanitize (hb_sanitize_context_t *c) { 763 TRACE_SANITIZE (); 764 if (!c->check_struct (this)) return TRACE_RETURN (false); 765 unsigned int count = glyphCount; 766 if (!c->check_array (coverage, coverage[0].static_size, count)) return TRACE_RETURN (false); 767 for (unsigned int i = 0; i < count; i++) 768 if (!coverage[i].sanitize (c, this)) return TRACE_RETURN (false); 769 LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count); 770 return TRACE_RETURN (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount)); 771 } 772 773 private: 774 USHORT format; /* Format identifier--format = 3 */ 775 USHORT glyphCount; /* Number of glyphs in the input glyph 776 * sequence */ 777 USHORT lookupCount; /* Number of LookupRecords */ 778 OffsetTo<Coverage> 779 coverage[VAR]; /* Array of offsets to Coverage 780 * table in glyph sequence order */ 781 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 782 * design order */ 783 public: 784 DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX); 785}; 786 787struct Context 788{ 789 protected: 790 791 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 792 { 793 TRACE_CLOSURE (); 794 switch (u.format) { 795 case 1: u.format1.closure (c, closure_func); break; 796 case 2: u.format2.closure (c, closure_func); break; 797 case 3: u.format3.closure (c, closure_func); break; 798 default: break; 799 } 800 } 801 802 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 803 { 804 TRACE_APPLY (); 805 switch (u.format) { 806 case 1: return TRACE_RETURN (u.format1.apply (c, apply_func)); 807 case 2: return TRACE_RETURN (u.format2.apply (c, apply_func)); 808 case 3: return TRACE_RETURN (u.format3.apply (c, apply_func)); 809 default:return TRACE_RETURN (false); 810 } 811 } 812 813 inline bool sanitize (hb_sanitize_context_t *c) { 814 TRACE_SANITIZE (); 815 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 816 switch (u.format) { 817 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 818 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 819 case 3: return TRACE_RETURN (u.format3.sanitize (c)); 820 default:return TRACE_RETURN (true); 821 } 822 } 823 824 private: 825 union { 826 USHORT format; /* Format identifier */ 827 ContextFormat1 format1; 828 ContextFormat2 format2; 829 ContextFormat3 format3; 830 } u; 831}; 832 833 834/* Chaining Contextual lookups */ 835 836struct ChainContextClosureLookupContext 837{ 838 ContextClosureFuncs funcs; 839 const void *intersects_data[3]; 840}; 841 842struct ChainContextApplyLookupContext 843{ 844 ContextApplyFuncs funcs; 845 const void *match_data[3]; 846}; 847 848static inline void chain_context_closure_lookup (hb_closure_context_t *c, 849 unsigned int backtrackCount, 850 const USHORT backtrack[], 851 unsigned int inputCount, /* Including the first glyph (not matched) */ 852 const USHORT input[], /* Array of input values--start with second glyph */ 853 unsigned int lookaheadCount, 854 const USHORT lookahead[], 855 unsigned int lookupCount, 856 const LookupRecord lookupRecord[], 857 ChainContextClosureLookupContext &lookup_context) 858{ 859 if (intersects_array (c, 860 backtrackCount, backtrack, 861 lookup_context.funcs.intersects, lookup_context.intersects_data[0]) 862 && intersects_array (c, 863 inputCount ? inputCount - 1 : 0, input, 864 lookup_context.funcs.intersects, lookup_context.intersects_data[1]) 865 && intersects_array (c, 866 lookaheadCount, lookahead, 867 lookup_context.funcs.intersects, lookup_context.intersects_data[2])) 868 closure_lookup (c, 869 lookupCount, lookupRecord, 870 lookup_context.funcs.closure); 871} 872 873static inline bool chain_context_apply_lookup (hb_apply_context_t *c, 874 unsigned int backtrackCount, 875 const USHORT backtrack[], 876 unsigned int inputCount, /* Including the first glyph (not matched) */ 877 const USHORT input[], /* Array of input values--start with second glyph */ 878 unsigned int lookaheadCount, 879 const USHORT lookahead[], 880 unsigned int lookupCount, 881 const LookupRecord lookupRecord[], 882 ChainContextApplyLookupContext &lookup_context) 883{ 884 unsigned int lookahead_offset; 885 return match_input (c, 886 inputCount, input, 887 lookup_context.funcs.match, lookup_context.match_data[1], 888 &lookahead_offset) 889 && match_backtrack (c, 890 backtrackCount, backtrack, 891 lookup_context.funcs.match, lookup_context.match_data[0]) 892 && match_lookahead (c, 893 lookaheadCount, lookahead, 894 lookup_context.funcs.match, lookup_context.match_data[2], 895 lookahead_offset) 896 && apply_lookup (c, 897 inputCount, 898 lookupCount, lookupRecord, 899 lookup_context.funcs.apply); 900} 901 902struct ChainRule 903{ 904 friend struct ChainRuleSet; 905 906 private: 907 908 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 909 { 910 TRACE_CLOSURE (); 911 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 912 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 913 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 914 chain_context_closure_lookup (c, 915 backtrack.len, backtrack.array, 916 input.len, input.array, 917 lookahead.len, lookahead.array, 918 lookup.len, lookup.array, 919 lookup_context); 920 } 921 922 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 923 { 924 TRACE_APPLY (); 925 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 926 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 927 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 928 return TRACE_RETURN (chain_context_apply_lookup (c, 929 backtrack.len, backtrack.array, 930 input.len, input.array, 931 lookahead.len, lookahead.array, lookup.len, 932 lookup.array, lookup_context)); 933 } 934 935 public: 936 inline bool sanitize (hb_sanitize_context_t *c) { 937 TRACE_SANITIZE (); 938 if (!backtrack.sanitize (c)) return TRACE_RETURN (false); 939 HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 940 if (!input.sanitize (c)) return TRACE_RETURN (false); 941 ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 942 if (!lookahead.sanitize (c)) return TRACE_RETURN (false); 943 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 944 return TRACE_RETURN (lookup.sanitize (c)); 945 } 946 947 private: 948 ArrayOf<USHORT> 949 backtrack; /* Array of backtracking values 950 * (to be matched before the input 951 * sequence) */ 952 HeadlessArrayOf<USHORT> 953 inputX; /* Array of input values (start with 954 * second glyph) */ 955 ArrayOf<USHORT> 956 lookaheadX; /* Array of lookahead values's (to be 957 * matched after the input sequence) */ 958 ArrayOf<LookupRecord> 959 lookupX; /* Array of LookupRecords--in 960 * design order) */ 961 public: 962 DEFINE_SIZE_MIN (8); 963}; 964 965struct ChainRuleSet 966{ 967 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 968 { 969 TRACE_CLOSURE (); 970 unsigned int num_rules = rule.len; 971 for (unsigned int i = 0; i < num_rules; i++) 972 (this+rule[i]).closure (c, lookup_context); 973 } 974 975 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 976 { 977 TRACE_APPLY (); 978 unsigned int num_rules = rule.len; 979 for (unsigned int i = 0; i < num_rules; i++) 980 if ((this+rule[i]).apply (c, lookup_context)) 981 return TRACE_RETURN (true); 982 983 return TRACE_RETURN (false); 984 } 985 986 inline bool sanitize (hb_sanitize_context_t *c) { 987 TRACE_SANITIZE (); 988 return TRACE_RETURN (rule.sanitize (c, this)); 989 } 990 991 private: 992 OffsetArrayOf<ChainRule> 993 rule; /* Array of ChainRule tables 994 * ordered by preference */ 995 public: 996 DEFINE_SIZE_ARRAY (2, rule); 997}; 998 999struct ChainContextFormat1 1000{ 1001 friend struct ChainContext; 1002 1003 private: 1004 1005 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 1006 { 1007 TRACE_CLOSURE (); 1008 const Coverage &cov = (this+coverage); 1009 1010 struct ChainContextClosureLookupContext lookup_context = { 1011 {intersects_glyph, closure_func}, 1012 {NULL, NULL, NULL} 1013 }; 1014 1015 unsigned int count = ruleSet.len; 1016 for (unsigned int i = 0; i < count; i++) 1017 if (cov.intersects_coverage (c->glyphs, i)) { 1018 const ChainRuleSet &rule_set = this+ruleSet[i]; 1019 rule_set.closure (c, lookup_context); 1020 } 1021 } 1022 1023 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 1024 { 1025 TRACE_APPLY (); 1026 unsigned int index = (this+coverage) (c->buffer->cur().codepoint); 1027 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 1028 1029 const ChainRuleSet &rule_set = this+ruleSet[index]; 1030 struct ChainContextApplyLookupContext lookup_context = { 1031 {match_glyph, apply_func}, 1032 {NULL, NULL, NULL} 1033 }; 1034 return TRACE_RETURN (rule_set.apply (c, lookup_context)); 1035 } 1036 1037 inline bool sanitize (hb_sanitize_context_t *c) { 1038 TRACE_SANITIZE (); 1039 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1040 } 1041 1042 private: 1043 USHORT format; /* Format identifier--format = 1 */ 1044 OffsetTo<Coverage> 1045 coverage; /* Offset to Coverage table--from 1046 * beginning of table */ 1047 OffsetArrayOf<ChainRuleSet> 1048 ruleSet; /* Array of ChainRuleSet tables 1049 * ordered by Coverage Index */ 1050 public: 1051 DEFINE_SIZE_ARRAY (6, ruleSet); 1052}; 1053 1054struct ChainContextFormat2 1055{ 1056 friend struct ChainContext; 1057 1058 private: 1059 1060 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 1061 { 1062 TRACE_CLOSURE (); 1063 if (!(this+coverage).intersects (c->glyphs)) 1064 return; 1065 1066 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1067 const ClassDef &input_class_def = this+inputClassDef; 1068 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1069 1070 struct ChainContextClosureLookupContext lookup_context = { 1071 {intersects_class, closure_func}, 1072 {&backtrack_class_def, 1073 &input_class_def, 1074 &lookahead_class_def} 1075 }; 1076 1077 unsigned int count = ruleSet.len; 1078 for (unsigned int i = 0; i < count; i++) 1079 if (input_class_def.intersects_class (c->glyphs, i)) { 1080 const ChainRuleSet &rule_set = this+ruleSet[i]; 1081 rule_set.closure (c, lookup_context); 1082 } 1083 } 1084 1085 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 1086 { 1087 TRACE_APPLY (); 1088 unsigned int index = (this+coverage) (c->buffer->cur().codepoint); 1089 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 1090 1091 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1092 const ClassDef &input_class_def = this+inputClassDef; 1093 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1094 1095 index = input_class_def (c->buffer->cur().codepoint); 1096 const ChainRuleSet &rule_set = this+ruleSet[index]; 1097 struct ChainContextApplyLookupContext lookup_context = { 1098 {match_class, apply_func}, 1099 {&backtrack_class_def, 1100 &input_class_def, 1101 &lookahead_class_def} 1102 }; 1103 return TRACE_RETURN (rule_set.apply (c, lookup_context)); 1104 } 1105 1106 inline bool sanitize (hb_sanitize_context_t *c) { 1107 TRACE_SANITIZE (); 1108 return TRACE_RETURN (coverage.sanitize (c, this) && backtrackClassDef.sanitize (c, this) && 1109 inputClassDef.sanitize (c, this) && lookaheadClassDef.sanitize (c, this) && 1110 ruleSet.sanitize (c, this)); 1111 } 1112 1113 private: 1114 USHORT format; /* Format identifier--format = 2 */ 1115 OffsetTo<Coverage> 1116 coverage; /* Offset to Coverage table--from 1117 * beginning of table */ 1118 OffsetTo<ClassDef> 1119 backtrackClassDef; /* Offset to glyph ClassDef table 1120 * containing backtrack sequence 1121 * data--from beginning of table */ 1122 OffsetTo<ClassDef> 1123 inputClassDef; /* Offset to glyph ClassDef 1124 * table containing input sequence 1125 * data--from beginning of table */ 1126 OffsetTo<ClassDef> 1127 lookaheadClassDef; /* Offset to glyph ClassDef table 1128 * containing lookahead sequence 1129 * data--from beginning of table */ 1130 OffsetArrayOf<ChainRuleSet> 1131 ruleSet; /* Array of ChainRuleSet tables 1132 * ordered by class */ 1133 public: 1134 DEFINE_SIZE_ARRAY (12, ruleSet); 1135}; 1136 1137struct ChainContextFormat3 1138{ 1139 friend struct ChainContext; 1140 1141 private: 1142 1143 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 1144 { 1145 TRACE_CLOSURE (); 1146 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 1147 1148 if (!(this+input[0]).intersects (c->glyphs)) 1149 return; 1150 1151 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 1152 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1153 struct ChainContextClosureLookupContext lookup_context = { 1154 {intersects_coverage, closure_func}, 1155 {this, this, this} 1156 }; 1157 chain_context_closure_lookup (c, 1158 backtrack.len, (const USHORT *) backtrack.array, 1159 input.len, (const USHORT *) input.array + 1, 1160 lookahead.len, (const USHORT *) lookahead.array, 1161 lookup.len, lookup.array, 1162 lookup_context); 1163 } 1164 1165 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 1166 { 1167 TRACE_APPLY (); 1168 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 1169 1170 unsigned int index = (this+input[0]) (c->buffer->cur().codepoint); 1171 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 1172 1173 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 1174 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1175 struct ChainContextApplyLookupContext lookup_context = { 1176 {match_coverage, apply_func}, 1177 {this, this, this} 1178 }; 1179 return TRACE_RETURN (chain_context_apply_lookup (c, 1180 backtrack.len, (const USHORT *) backtrack.array, 1181 input.len, (const USHORT *) input.array + 1, 1182 lookahead.len, (const USHORT *) lookahead.array, 1183 lookup.len, lookup.array, lookup_context)); 1184 } 1185 1186 inline bool sanitize (hb_sanitize_context_t *c) { 1187 TRACE_SANITIZE (); 1188 if (!backtrack.sanitize (c, this)) return TRACE_RETURN (false); 1189 OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 1190 if (!input.sanitize (c, this)) return TRACE_RETURN (false); 1191 OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 1192 if (!lookahead.sanitize (c, this)) return TRACE_RETURN (false); 1193 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1194 return TRACE_RETURN (lookup.sanitize (c)); 1195 } 1196 1197 private: 1198 USHORT format; /* Format identifier--format = 3 */ 1199 OffsetArrayOf<Coverage> 1200 backtrack; /* Array of coverage tables 1201 * in backtracking sequence, in glyph 1202 * sequence order */ 1203 OffsetArrayOf<Coverage> 1204 inputX ; /* Array of coverage 1205 * tables in input sequence, in glyph 1206 * sequence order */ 1207 OffsetArrayOf<Coverage> 1208 lookaheadX; /* Array of coverage tables 1209 * in lookahead sequence, in glyph 1210 * sequence order */ 1211 ArrayOf<LookupRecord> 1212 lookupX; /* Array of LookupRecords--in 1213 * design order) */ 1214 public: 1215 DEFINE_SIZE_MIN (10); 1216}; 1217 1218struct ChainContext 1219{ 1220 protected: 1221 1222 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 1223 { 1224 TRACE_CLOSURE (); 1225 switch (u.format) { 1226 case 1: u.format1.closure (c, closure_func); break; 1227 case 2: u.format2.closure (c, closure_func); break; 1228 case 3: u.format3.closure (c, closure_func); break; 1229 default: break; 1230 } 1231 } 1232 1233 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 1234 { 1235 TRACE_APPLY (); 1236 switch (u.format) { 1237 case 1: return TRACE_RETURN (u.format1.apply (c, apply_func)); 1238 case 2: return TRACE_RETURN (u.format2.apply (c, apply_func)); 1239 case 3: return TRACE_RETURN (u.format3.apply (c, apply_func)); 1240 default:return TRACE_RETURN (false); 1241 } 1242 } 1243 1244 inline bool sanitize (hb_sanitize_context_t *c) { 1245 TRACE_SANITIZE (); 1246 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1247 switch (u.format) { 1248 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1249 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 1250 case 3: return TRACE_RETURN (u.format3.sanitize (c)); 1251 default:return TRACE_RETURN (true); 1252 } 1253 } 1254 1255 private: 1256 union { 1257 USHORT format; /* Format identifier */ 1258 ChainContextFormat1 format1; 1259 ChainContextFormat2 format2; 1260 ChainContextFormat3 format3; 1261 } u; 1262}; 1263 1264 1265struct ExtensionFormat1 1266{ 1267 friend struct Extension; 1268 1269 protected: 1270 inline unsigned int get_type (void) const { return extensionLookupType; } 1271 inline unsigned int get_offset (void) const { return extensionOffset; } 1272 1273 inline bool sanitize (hb_sanitize_context_t *c) { 1274 TRACE_SANITIZE (); 1275 return TRACE_RETURN (c->check_struct (this)); 1276 } 1277 1278 private: 1279 USHORT format; /* Format identifier. Set to 1. */ 1280 USHORT extensionLookupType; /* Lookup type of subtable referenced 1281 * by ExtensionOffset (i.e. the 1282 * extension subtable). */ 1283 ULONG extensionOffset; /* Offset to the extension subtable, 1284 * of lookup type subtable. */ 1285 public: 1286 DEFINE_SIZE_STATIC (8); 1287}; 1288 1289struct Extension 1290{ 1291 inline unsigned int get_type (void) const 1292 { 1293 switch (u.format) { 1294 case 1: return u.format1.get_type (); 1295 default:return 0; 1296 } 1297 } 1298 inline unsigned int get_offset (void) const 1299 { 1300 switch (u.format) { 1301 case 1: return u.format1.get_offset (); 1302 default:return 0; 1303 } 1304 } 1305 1306 inline bool sanitize (hb_sanitize_context_t *c) { 1307 TRACE_SANITIZE (); 1308 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1309 switch (u.format) { 1310 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1311 default:return TRACE_RETURN (true); 1312 } 1313 } 1314 1315 private: 1316 union { 1317 USHORT format; /* Format identifier */ 1318 ExtensionFormat1 format1; 1319 } u; 1320}; 1321 1322 1323/* 1324 * GSUB/GPOS Common 1325 */ 1326 1327struct GSUBGPOS 1328{ 1329 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB; 1330 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS; 1331 1332 inline unsigned int get_script_count (void) const 1333 { return (this+scriptList).len; } 1334 inline const Tag& get_script_tag (unsigned int i) const 1335 { return (this+scriptList).get_tag (i); } 1336 inline unsigned int get_script_tags (unsigned int start_offset, 1337 unsigned int *script_count /* IN/OUT */, 1338 hb_tag_t *script_tags /* OUT */) const 1339 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); } 1340 inline const Script& get_script (unsigned int i) const 1341 { return (this+scriptList)[i]; } 1342 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const 1343 { return (this+scriptList).find_index (tag, index); } 1344 1345 inline unsigned int get_feature_count (void) const 1346 { return (this+featureList).len; } 1347 inline const Tag& get_feature_tag (unsigned int i) const 1348 { return (this+featureList).get_tag (i); } 1349 inline unsigned int get_feature_tags (unsigned int start_offset, 1350 unsigned int *feature_count /* IN/OUT */, 1351 hb_tag_t *feature_tags /* OUT */) const 1352 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); } 1353 inline const Feature& get_feature (unsigned int i) const 1354 { return (this+featureList)[i]; } 1355 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const 1356 { return (this+featureList).find_index (tag, index); } 1357 1358 inline unsigned int get_lookup_count (void) const 1359 { return (this+lookupList).len; } 1360 inline const Lookup& get_lookup (unsigned int i) const 1361 { return (this+lookupList)[i]; } 1362 1363 inline bool sanitize (hb_sanitize_context_t *c) { 1364 TRACE_SANITIZE (); 1365 return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) && 1366 scriptList.sanitize (c, this) && 1367 featureList.sanitize (c, this) && 1368 lookupList.sanitize (c, this)); 1369 } 1370 1371 protected: 1372 FixedVersion version; /* Version of the GSUB/GPOS table--initially set 1373 * to 0x00010000 */ 1374 OffsetTo<ScriptList> 1375 scriptList; /* ScriptList table */ 1376 OffsetTo<FeatureList> 1377 featureList; /* FeatureList table */ 1378 OffsetTo<LookupList> 1379 lookupList; /* LookupList table */ 1380 public: 1381 DEFINE_SIZE_STATIC (10); 1382}; 1383 1384 1385 1386#endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */ 1387