hb-ot-layout-gsubgpos-private.hh revision 8fd83aaf6e50c2c25002c51fee26d82847a61769
1/* 2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright © 2010,2012 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29#ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 30#define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 31 32#include "hb-buffer-private.hh" 33#include "hb-ot-layout-gdef-table.hh" 34 35 36 37/* unique ligature id */ 38/* component number in the ligature (0 = base) */ 39static inline void 40set_lig_props (hb_glyph_info_t &info, unsigned int lig_id, unsigned int lig_comp) 41{ 42 info.lig_props() = (lig_id << 4) | (lig_comp & 0x0F); 43} 44static inline unsigned int 45get_lig_id (hb_glyph_info_t &info) 46{ 47 return info.lig_props() >> 4; 48} 49static inline unsigned int 50get_lig_comp (hb_glyph_info_t &info) 51{ 52 return info.lig_props() & 0x0F; 53} 54 55static inline uint8_t allocate_lig_id (hb_buffer_t *buffer) { 56 uint8_t lig_id = buffer->next_serial () & 0x0F; 57 if (unlikely (!lig_id)) 58 lig_id = allocate_lig_id (buffer); /* in case of overflow */ 59 return lig_id; 60} 61 62 63 64#ifndef HB_DEBUG_CLOSURE 65#define HB_DEBUG_CLOSURE (HB_DEBUG+0) 66#endif 67 68#define TRACE_CLOSURE() \ 69 hb_auto_trace_t<HB_DEBUG_CLOSURE> trace (&c->debug_depth, "CLOSURE", this, HB_FUNC, ""); 70 71 72/* TODO Add TRACE_RETURN annotation for would_apply */ 73 74 75struct hb_closure_context_t 76{ 77 hb_face_t *face; 78 hb_set_t *glyphs; 79 unsigned int nesting_level_left; 80 unsigned int debug_depth; 81 82 83 hb_closure_context_t (hb_face_t *face_, 84 hb_set_t *glyphs_, 85 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) : 86 face (face_), glyphs (glyphs_), 87 nesting_level_left (nesting_level_left_), 88 debug_depth (0) {} 89}; 90 91 92 93#ifndef HB_DEBUG_APPLY 94#define HB_DEBUG_APPLY (HB_DEBUG+0) 95#endif 96 97#define TRACE_APPLY() \ 98 hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", this, HB_FUNC, "idx %d codepoint %u", c->buffer->idx, c->buffer->info[c->buffer->idx].codepoint); 99 100 101 102struct hb_apply_context_t 103{ 104 hb_font_t *font; 105 hb_face_t *face; 106 hb_buffer_t *buffer; 107 hb_direction_t direction; 108 hb_mask_t lookup_mask; 109 unsigned int context_length; 110 unsigned int nesting_level_left; 111 unsigned int lookup_props; 112 unsigned int property; /* propety of first glyph */ 113 unsigned int debug_depth; 114 115 116 hb_apply_context_t (hb_font_t *font_, 117 hb_face_t *face_, 118 hb_buffer_t *buffer_, 119 hb_mask_t lookup_mask_, 120 unsigned int context_length_ = NO_CONTEXT, 121 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) : 122 font (font_), face (face_), buffer (buffer_), 123 direction (buffer_->props.direction), 124 lookup_mask (lookup_mask_), 125 context_length (context_length_), 126 nesting_level_left (nesting_level_left_), 127 lookup_props (0), property (0), debug_depth (0) {} 128 129 void set_lookup (const Lookup &l) { 130 lookup_props = l.get_props (); 131 } 132 133 struct mark_skipping_forward_iterator_t 134 { 135 inline mark_skipping_forward_iterator_t (hb_apply_context_t *c_, 136 unsigned int start_index_, 137 unsigned int num_items_, 138 hb_mask_t mask_ = 0) 139 { 140 c = c_; 141 idx = start_index_; 142 num_items = num_items_; 143 mask = mask_ ? mask_ : c->lookup_mask; 144 end = MIN (c->buffer->len, c->buffer->idx + c->context_length); 145 } 146 inline bool has_no_chance (void) const 147 { 148 return unlikely (num_items && idx + num_items >= end); 149 } 150 inline bool next (unsigned int *property_out, 151 unsigned int lookup_props) 152 { 153 assert (num_items > 0); 154 do 155 { 156 if (has_no_chance ()) 157 return false; 158 idx++; 159 } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[idx], lookup_props, property_out)); 160 num_items--; 161 return !!(c->buffer->info[idx].mask & mask); 162 } 163 inline bool next (unsigned int *property_out = NULL) 164 { 165 return next (property_out, c->lookup_props); 166 } 167 168 unsigned int idx; 169 private: 170 hb_apply_context_t *c; 171 unsigned int num_items; 172 hb_mask_t mask; 173 unsigned int end; 174 }; 175 176 struct mark_skipping_backward_iterator_t 177 { 178 inline mark_skipping_backward_iterator_t (hb_apply_context_t *c_, 179 unsigned int start_index_, 180 unsigned int num_items_, 181 hb_mask_t mask_ = 0) 182 { 183 c = c_; 184 idx = start_index_; 185 num_items = num_items_; 186 mask = mask_ ? mask_ : c->lookup_mask; 187 } 188 inline bool has_no_chance (void) const 189 { 190 return unlikely (idx < num_items); 191 } 192 inline bool prev (unsigned int *property_out, 193 unsigned int lookup_props) 194 { 195 assert (num_items > 0); 196 do 197 { 198 if (has_no_chance ()) 199 return false; 200 idx--; 201 } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->out_info[idx], lookup_props, property_out)); 202 num_items--; 203 return !!(c->buffer->out_info[idx].mask & mask); 204 } 205 inline bool prev (unsigned int *property_out = NULL) 206 { 207 return prev (property_out, c->lookup_props); 208 } 209 210 unsigned int idx; 211 private: 212 hb_apply_context_t *c; 213 unsigned int num_items; 214 hb_mask_t mask; 215 }; 216 217 inline bool should_mark_skip_current_glyph (void) const 218 { 219 return _hb_ot_layout_skip_mark (face, &buffer->info[buffer->idx], lookup_props, NULL); 220 } 221 222 223 224 inline void replace_glyph (hb_codepoint_t glyph_index) const 225 { 226 clear_property (); 227 buffer->replace_glyph (glyph_index); 228 } 229 inline void replace_glyphs_be16 (unsigned int num_in, 230 unsigned int num_out, 231 const uint16_t *glyph_data_be) const 232 { 233 clear_property (); 234 buffer->replace_glyphs_be16 (num_in, num_out, glyph_data_be); 235 } 236 237 inline void guess_glyph_class (unsigned int klass) 238 { 239 /* XXX if ! has gdef */ 240 buffer->info[buffer->idx].props_cache() = klass; 241 } 242 243 private: 244 inline void clear_property (void) const 245 { 246 /* XXX if has gdef */ 247 buffer->info[buffer->idx].props_cache() = 0; 248 } 249}; 250 251 252 253typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); 254typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 255typedef void (*closure_lookup_func_t) (hb_closure_context_t *c, unsigned int lookup_index); 256typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index); 257 258struct ContextClosureFuncs 259{ 260 intersects_func_t intersects; 261 closure_lookup_func_t closure; 262}; 263struct ContextApplyFuncs 264{ 265 match_func_t match; 266 apply_lookup_func_t apply; 267}; 268 269static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) 270{ 271 return glyphs->has (value); 272} 273static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data) 274{ 275 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 276 return class_def.intersects_class (glyphs, value); 277} 278static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) 279{ 280 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 281 return (data+coverage).intersects (glyphs); 282} 283 284static inline bool intersects_array (hb_closure_context_t *c, 285 unsigned int count, 286 const USHORT values[], 287 intersects_func_t intersects_func, 288 const void *intersects_data) 289{ 290 for (unsigned int i = 0; i < count; i++) 291 if (likely (!intersects_func (c->glyphs, values[i], intersects_data))) 292 return false; 293 return true; 294} 295 296 297static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED) 298{ 299 return glyph_id == value; 300} 301static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 302{ 303 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 304 return class_def.get_class (glyph_id) == value; 305} 306static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 307{ 308 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 309 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED; 310} 311 312 313static inline bool match_input (hb_apply_context_t *c, 314 unsigned int count, /* Including the first glyph (not matched) */ 315 const USHORT input[], /* Array of input values--start with second glyph */ 316 match_func_t match_func, 317 const void *match_data, 318 unsigned int *context_length_out) 319{ 320 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1); 321 if (skippy_iter.has_no_chance ()) 322 return false; 323 324 for (unsigned int i = 1; i < count; i++) 325 { 326 if (!skippy_iter.next ()) 327 return false; 328 329 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, input[i - 1], match_data))) 330 return false; 331 } 332 333 *context_length_out = skippy_iter.idx - c->buffer->idx + 1; 334 335 return true; 336} 337 338static inline bool match_backtrack (hb_apply_context_t *c, 339 unsigned int count, 340 const USHORT backtrack[], 341 match_func_t match_func, 342 const void *match_data) 343{ 344 hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count, (hb_mask_t) -1); 345 if (skippy_iter.has_no_chance ()) 346 return false; 347 348 for (unsigned int i = 0; i < count; i++) 349 { 350 if (!skippy_iter.prev ()) 351 return false; 352 353 if (likely (!match_func (c->buffer->out_info[skippy_iter.idx].codepoint, backtrack[i], match_data))) 354 return false; 355 } 356 357 return true; 358} 359 360static inline bool match_lookahead (hb_apply_context_t *c, 361 unsigned int count, 362 const USHORT lookahead[], 363 match_func_t match_func, 364 const void *match_data, 365 unsigned int offset) 366{ 367 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count, (hb_mask_t) -1); 368 if (skippy_iter.has_no_chance ()) 369 return false; 370 371 for (unsigned int i = 0; i < count; i++) 372 { 373 if (!skippy_iter.next ()) 374 return false; 375 376 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, lookahead[i], match_data))) 377 return false; 378 } 379 380 return true; 381} 382 383 384 385struct LookupRecord 386{ 387 inline bool sanitize (hb_sanitize_context_t *c) { 388 TRACE_SANITIZE (); 389 return TRACE_RETURN (c->check_struct (this)); 390 } 391 392 USHORT sequenceIndex; /* Index into current glyph 393 * sequence--first glyph = 0 */ 394 USHORT lookupListIndex; /* Lookup to apply to that 395 * position--zero--based */ 396 public: 397 DEFINE_SIZE_STATIC (4); 398}; 399 400 401static inline void closure_lookup (hb_closure_context_t *c, 402 unsigned int lookupCount, 403 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ 404 closure_lookup_func_t closure_func) 405{ 406 for (unsigned int i = 0; i < lookupCount; i++) 407 closure_func (c, lookupRecord->lookupListIndex); 408} 409 410static inline bool apply_lookup (hb_apply_context_t *c, 411 unsigned int count, /* Including the first glyph */ 412 unsigned int lookupCount, 413 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ 414 apply_lookup_func_t apply_func) 415{ 416 unsigned int end = MIN (c->buffer->len, c->buffer->idx + c->context_length); 417 if (unlikely (count == 0 || c->buffer->idx + count > end)) 418 return false; 419 420 /* TODO We don't support lookupRecord arrays that are not increasing: 421 * Should be easy for in_place ones at least. */ 422 423 /* Note: If sublookup is reverse, it will underflow after the first loop 424 * and we jump out of it. Not entirely disastrous. So we don't check 425 * for reverse lookup here. 426 */ 427 for (unsigned int i = 0; i < count; /* NOP */) 428 { 429 if (unlikely (c->buffer->idx == end)) 430 return true; 431 while (c->should_mark_skip_current_glyph ()) 432 { 433 /* No lookup applied for this index */ 434 c->buffer->next_glyph (); 435 if (unlikely (c->buffer->idx == end)) 436 return true; 437 } 438 439 if (lookupCount && i == lookupRecord->sequenceIndex) 440 { 441 unsigned int old_pos = c->buffer->idx; 442 443 /* Apply a lookup */ 444 bool done = apply_func (c, lookupRecord->lookupListIndex); 445 446 lookupRecord++; 447 lookupCount--; 448 /* Err, this is wrong if the lookup jumped over some glyphs */ 449 i += c->buffer->idx - old_pos; 450 if (unlikely (c->buffer->idx == end)) 451 return true; 452 453 if (!done) 454 goto not_applied; 455 } 456 else 457 { 458 not_applied: 459 /* No lookup applied for this index */ 460 c->buffer->next_glyph (); 461 i++; 462 } 463 } 464 465 return true; 466} 467 468 469 470/* Contextual lookups */ 471 472struct ContextClosureLookupContext 473{ 474 ContextClosureFuncs funcs; 475 const void *intersects_data; 476}; 477 478struct ContextApplyLookupContext 479{ 480 ContextApplyFuncs funcs; 481 const void *match_data; 482}; 483 484static inline void context_closure_lookup (hb_closure_context_t *c, 485 unsigned int inputCount, /* Including the first glyph (not matched) */ 486 const USHORT input[], /* Array of input values--start with second glyph */ 487 unsigned int lookupCount, 488 const LookupRecord lookupRecord[], 489 ContextClosureLookupContext &lookup_context) 490{ 491 if (intersects_array (c, 492 inputCount ? inputCount - 1 : 0, input, 493 lookup_context.funcs.intersects, lookup_context.intersects_data)) 494 closure_lookup (c, 495 lookupCount, lookupRecord, 496 lookup_context.funcs.closure); 497} 498 499 500static inline bool context_apply_lookup (hb_apply_context_t *c, 501 unsigned int inputCount, /* Including the first glyph (not matched) */ 502 const USHORT input[], /* Array of input values--start with second glyph */ 503 unsigned int lookupCount, 504 const LookupRecord lookupRecord[], 505 ContextApplyLookupContext &lookup_context) 506{ 507 hb_apply_context_t new_context = *c; 508 return match_input (c, 509 inputCount, input, 510 lookup_context.funcs.match, lookup_context.match_data, 511 &new_context.context_length) 512 && apply_lookup (&new_context, 513 inputCount, 514 lookupCount, lookupRecord, 515 lookup_context.funcs.apply); 516} 517 518struct Rule 519{ 520 friend struct RuleSet; 521 522 private: 523 524 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 525 { 526 TRACE_CLOSURE (); 527 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0)); 528 context_closure_lookup (c, 529 inputCount, input, 530 lookupCount, lookupRecord, 531 lookup_context); 532 } 533 534 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 535 { 536 TRACE_APPLY (); 537 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0)); 538 return TRACE_RETURN (context_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context)); 539 } 540 541 public: 542 inline bool sanitize (hb_sanitize_context_t *c) { 543 TRACE_SANITIZE (); 544 return inputCount.sanitize (c) 545 && lookupCount.sanitize (c) 546 && c->check_range (input, 547 input[0].static_size * inputCount 548 + lookupRecordX[0].static_size * lookupCount); 549 } 550 551 private: 552 USHORT inputCount; /* Total number of glyphs in input 553 * glyph sequence--includes the first 554 * glyph */ 555 USHORT lookupCount; /* Number of LookupRecords */ 556 USHORT input[VAR]; /* Array of match inputs--start with 557 * second glyph */ 558 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 559 * design order */ 560 public: 561 DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX); 562}; 563 564struct RuleSet 565{ 566 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 567 { 568 TRACE_CLOSURE (); 569 unsigned int num_rules = rule.len; 570 for (unsigned int i = 0; i < num_rules; i++) 571 (this+rule[i]).closure (c, lookup_context); 572 } 573 574 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 575 { 576 TRACE_APPLY (); 577 unsigned int num_rules = rule.len; 578 for (unsigned int i = 0; i < num_rules; i++) 579 { 580 if ((this+rule[i]).apply (c, lookup_context)) 581 return TRACE_RETURN (true); 582 } 583 return TRACE_RETURN (false); 584 } 585 586 inline bool sanitize (hb_sanitize_context_t *c) { 587 TRACE_SANITIZE (); 588 return TRACE_RETURN (rule.sanitize (c, this)); 589 } 590 591 private: 592 OffsetArrayOf<Rule> 593 rule; /* Array of Rule tables 594 * ordered by preference */ 595 public: 596 DEFINE_SIZE_ARRAY (2, rule); 597}; 598 599 600struct ContextFormat1 601{ 602 friend struct Context; 603 604 private: 605 606 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 607 { 608 TRACE_CLOSURE (); 609 610 const Coverage &cov = (this+coverage); 611 612 struct ContextClosureLookupContext lookup_context = { 613 {intersects_glyph, closure_func}, 614 NULL 615 }; 616 617 unsigned int count = ruleSet.len; 618 for (unsigned int i = 0; i < count; i++) 619 if (cov.intersects_coverage (c->glyphs, i)) { 620 const RuleSet &rule_set = this+ruleSet[i]; 621 rule_set.closure (c, lookup_context); 622 } 623 } 624 625 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 626 { 627 TRACE_APPLY (); 628 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint); 629 if (likely (index == NOT_COVERED)) 630 return TRACE_RETURN (false); 631 632 const RuleSet &rule_set = this+ruleSet[index]; 633 struct ContextApplyLookupContext lookup_context = { 634 {match_glyph, apply_func}, 635 NULL 636 }; 637 return TRACE_RETURN (rule_set.apply (c, lookup_context)); 638 } 639 640 inline bool sanitize (hb_sanitize_context_t *c) { 641 TRACE_SANITIZE (); 642 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 643 } 644 645 private: 646 USHORT format; /* Format identifier--format = 1 */ 647 OffsetTo<Coverage> 648 coverage; /* Offset to Coverage table--from 649 * beginning of table */ 650 OffsetArrayOf<RuleSet> 651 ruleSet; /* Array of RuleSet tables 652 * ordered by Coverage Index */ 653 public: 654 DEFINE_SIZE_ARRAY (6, ruleSet); 655}; 656 657 658struct ContextFormat2 659{ 660 friend struct Context; 661 662 private: 663 664 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 665 { 666 TRACE_CLOSURE (); 667 if (!(this+coverage).intersects (c->glyphs)) 668 return; 669 670 const ClassDef &class_def = this+classDef; 671 672 struct ContextClosureLookupContext lookup_context = { 673 {intersects_class, closure_func}, 674 NULL 675 }; 676 677 unsigned int count = ruleSet.len; 678 for (unsigned int i = 0; i < count; i++) 679 if (class_def.intersects_class (c->glyphs, i)) { 680 const RuleSet &rule_set = this+ruleSet[i]; 681 rule_set.closure (c, lookup_context); 682 } 683 } 684 685 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 686 { 687 TRACE_APPLY (); 688 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint); 689 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 690 691 const ClassDef &class_def = this+classDef; 692 index = class_def (c->buffer->info[c->buffer->idx].codepoint); 693 const RuleSet &rule_set = this+ruleSet[index]; 694 struct ContextApplyLookupContext lookup_context = { 695 {match_class, apply_func}, 696 &class_def 697 }; 698 return TRACE_RETURN (rule_set.apply (c, lookup_context)); 699 } 700 701 inline bool sanitize (hb_sanitize_context_t *c) { 702 TRACE_SANITIZE (); 703 return TRACE_RETURN (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this)); 704 } 705 706 private: 707 USHORT format; /* Format identifier--format = 2 */ 708 OffsetTo<Coverage> 709 coverage; /* Offset to Coverage table--from 710 * beginning of table */ 711 OffsetTo<ClassDef> 712 classDef; /* Offset to glyph ClassDef table--from 713 * beginning of table */ 714 OffsetArrayOf<RuleSet> 715 ruleSet; /* Array of RuleSet tables 716 * ordered by class */ 717 public: 718 DEFINE_SIZE_ARRAY (8, ruleSet); 719}; 720 721 722struct ContextFormat3 723{ 724 friend struct Context; 725 726 private: 727 728 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 729 { 730 TRACE_CLOSURE (); 731 if (!(this+coverage[0]).intersects (c->glyphs)) 732 return; 733 734 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount); 735 struct ContextClosureLookupContext lookup_context = { 736 {intersects_coverage, closure_func}, 737 this 738 }; 739 context_closure_lookup (c, 740 glyphCount, (const USHORT *) (coverage + 1), 741 lookupCount, lookupRecord, 742 lookup_context); 743 } 744 745 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 746 { 747 TRACE_APPLY (); 748 unsigned int index = (this+coverage[0]) (c->buffer->info[c->buffer->idx].codepoint); 749 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 750 751 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount); 752 struct ContextApplyLookupContext lookup_context = { 753 {match_coverage, apply_func}, 754 this 755 }; 756 return TRACE_RETURN (context_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context)); 757 } 758 759 inline bool sanitize (hb_sanitize_context_t *c) { 760 TRACE_SANITIZE (); 761 if (!c->check_struct (this)) return TRACE_RETURN (false); 762 unsigned int count = glyphCount; 763 if (!c->check_array (coverage, coverage[0].static_size, count)) return TRACE_RETURN (false); 764 for (unsigned int i = 0; i < count; i++) 765 if (!coverage[i].sanitize (c, this)) return TRACE_RETURN (false); 766 LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count); 767 return TRACE_RETURN (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount)); 768 } 769 770 private: 771 USHORT format; /* Format identifier--format = 3 */ 772 USHORT glyphCount; /* Number of glyphs in the input glyph 773 * sequence */ 774 USHORT lookupCount; /* Number of LookupRecords */ 775 OffsetTo<Coverage> 776 coverage[VAR]; /* Array of offsets to Coverage 777 * table in glyph sequence order */ 778 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 779 * design order */ 780 public: 781 DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX); 782}; 783 784struct Context 785{ 786 protected: 787 788 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 789 { 790 TRACE_CLOSURE (); 791 switch (u.format) { 792 case 1: u.format1.closure (c, closure_func); break; 793 case 2: u.format2.closure (c, closure_func); break; 794 case 3: u.format3.closure (c, closure_func); break; 795 default: break; 796 } 797 } 798 799 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 800 { 801 TRACE_APPLY (); 802 switch (u.format) { 803 case 1: return TRACE_RETURN (u.format1.apply (c, apply_func)); 804 case 2: return TRACE_RETURN (u.format2.apply (c, apply_func)); 805 case 3: return TRACE_RETURN (u.format3.apply (c, apply_func)); 806 default:return TRACE_RETURN (false); 807 } 808 } 809 810 inline bool sanitize (hb_sanitize_context_t *c) { 811 TRACE_SANITIZE (); 812 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 813 switch (u.format) { 814 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 815 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 816 case 3: return TRACE_RETURN (u.format3.sanitize (c)); 817 default:return TRACE_RETURN (true); 818 } 819 } 820 821 private: 822 union { 823 USHORT format; /* Format identifier */ 824 ContextFormat1 format1; 825 ContextFormat2 format2; 826 ContextFormat3 format3; 827 } u; 828}; 829 830 831/* Chaining Contextual lookups */ 832 833struct ChainContextClosureLookupContext 834{ 835 ContextClosureFuncs funcs; 836 const void *intersects_data[3]; 837}; 838 839struct ChainContextApplyLookupContext 840{ 841 ContextApplyFuncs funcs; 842 const void *match_data[3]; 843}; 844 845static inline void chain_context_closure_lookup (hb_closure_context_t *c, 846 unsigned int backtrackCount, 847 const USHORT backtrack[], 848 unsigned int inputCount, /* Including the first glyph (not matched) */ 849 const USHORT input[], /* Array of input values--start with second glyph */ 850 unsigned int lookaheadCount, 851 const USHORT lookahead[], 852 unsigned int lookupCount, 853 const LookupRecord lookupRecord[], 854 ChainContextClosureLookupContext &lookup_context) 855{ 856 if (intersects_array (c, 857 backtrackCount, backtrack, 858 lookup_context.funcs.intersects, lookup_context.intersects_data[0]) 859 && intersects_array (c, 860 inputCount ? inputCount - 1 : 0, input, 861 lookup_context.funcs.intersects, lookup_context.intersects_data[1]) 862 && intersects_array (c, 863 lookaheadCount, lookahead, 864 lookup_context.funcs.intersects, lookup_context.intersects_data[2])) 865 closure_lookup (c, 866 lookupCount, lookupRecord, 867 lookup_context.funcs.closure); 868} 869 870static inline bool chain_context_apply_lookup (hb_apply_context_t *c, 871 unsigned int backtrackCount, 872 const USHORT backtrack[], 873 unsigned int inputCount, /* Including the first glyph (not matched) */ 874 const USHORT input[], /* Array of input values--start with second glyph */ 875 unsigned int lookaheadCount, 876 const USHORT lookahead[], 877 unsigned int lookupCount, 878 const LookupRecord lookupRecord[], 879 ChainContextApplyLookupContext &lookup_context) 880{ 881 /* First guess */ 882 if (unlikely (c->buffer->backtrack_len () < backtrackCount || 883 c->buffer->idx + inputCount + lookaheadCount > c->buffer->len || 884 inputCount + lookaheadCount > c->context_length)) 885 return false; 886 887 hb_apply_context_t new_context = *c; 888 return match_backtrack (c, 889 backtrackCount, backtrack, 890 lookup_context.funcs.match, lookup_context.match_data[0]) 891 && match_input (c, 892 inputCount, input, 893 lookup_context.funcs.match, lookup_context.match_data[1], 894 &new_context.context_length) 895 && match_lookahead (c, 896 lookaheadCount, lookahead, 897 lookup_context.funcs.match, lookup_context.match_data[2], 898 new_context.context_length) 899 && apply_lookup (&new_context, 900 inputCount, 901 lookupCount, lookupRecord, 902 lookup_context.funcs.apply); 903} 904 905struct ChainRule 906{ 907 friend struct ChainRuleSet; 908 909 private: 910 911 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 912 { 913 TRACE_CLOSURE (); 914 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 915 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 916 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 917 chain_context_closure_lookup (c, 918 backtrack.len, backtrack.array, 919 input.len, input.array, 920 lookahead.len, lookahead.array, 921 lookup.len, lookup.array, 922 lookup_context); 923 } 924 925 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 926 { 927 TRACE_APPLY (); 928 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 929 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 930 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 931 return TRACE_RETURN (chain_context_apply_lookup (c, 932 backtrack.len, backtrack.array, 933 input.len, input.array, 934 lookahead.len, lookahead.array, lookup.len, 935 lookup.array, lookup_context)); 936 } 937 938 public: 939 inline bool sanitize (hb_sanitize_context_t *c) { 940 TRACE_SANITIZE (); 941 if (!backtrack.sanitize (c)) return TRACE_RETURN (false); 942 HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 943 if (!input.sanitize (c)) return TRACE_RETURN (false); 944 ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 945 if (!lookahead.sanitize (c)) return TRACE_RETURN (false); 946 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 947 return TRACE_RETURN (lookup.sanitize (c)); 948 } 949 950 private: 951 ArrayOf<USHORT> 952 backtrack; /* Array of backtracking values 953 * (to be matched before the input 954 * sequence) */ 955 HeadlessArrayOf<USHORT> 956 inputX; /* Array of input values (start with 957 * second glyph) */ 958 ArrayOf<USHORT> 959 lookaheadX; /* Array of lookahead values's (to be 960 * matched after the input sequence) */ 961 ArrayOf<LookupRecord> 962 lookupX; /* Array of LookupRecords--in 963 * design order) */ 964 public: 965 DEFINE_SIZE_MIN (8); 966}; 967 968struct ChainRuleSet 969{ 970 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 971 { 972 TRACE_CLOSURE (); 973 unsigned int num_rules = rule.len; 974 for (unsigned int i = 0; i < num_rules; i++) 975 (this+rule[i]).closure (c, lookup_context); 976 } 977 978 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 979 { 980 TRACE_APPLY (); 981 unsigned int num_rules = rule.len; 982 for (unsigned int i = 0; i < num_rules; i++) 983 if ((this+rule[i]).apply (c, lookup_context)) 984 return TRACE_RETURN (true); 985 986 return TRACE_RETURN (false); 987 } 988 989 inline bool sanitize (hb_sanitize_context_t *c) { 990 TRACE_SANITIZE (); 991 return TRACE_RETURN (rule.sanitize (c, this)); 992 } 993 994 private: 995 OffsetArrayOf<ChainRule> 996 rule; /* Array of ChainRule tables 997 * ordered by preference */ 998 public: 999 DEFINE_SIZE_ARRAY (2, rule); 1000}; 1001 1002struct ChainContextFormat1 1003{ 1004 friend struct ChainContext; 1005 1006 private: 1007 1008 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 1009 { 1010 TRACE_CLOSURE (); 1011 const Coverage &cov = (this+coverage); 1012 1013 struct ChainContextClosureLookupContext lookup_context = { 1014 {intersects_glyph, closure_func}, 1015 {NULL, NULL, NULL} 1016 }; 1017 1018 unsigned int count = ruleSet.len; 1019 for (unsigned int i = 0; i < count; i++) 1020 if (cov.intersects_coverage (c->glyphs, i)) { 1021 const ChainRuleSet &rule_set = this+ruleSet[i]; 1022 rule_set.closure (c, lookup_context); 1023 } 1024 } 1025 1026 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 1027 { 1028 TRACE_APPLY (); 1029 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint); 1030 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 1031 1032 const ChainRuleSet &rule_set = this+ruleSet[index]; 1033 struct ChainContextApplyLookupContext lookup_context = { 1034 {match_glyph, apply_func}, 1035 {NULL, NULL, NULL} 1036 }; 1037 return TRACE_RETURN (rule_set.apply (c, lookup_context)); 1038 } 1039 1040 inline bool sanitize (hb_sanitize_context_t *c) { 1041 TRACE_SANITIZE (); 1042 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1043 } 1044 1045 private: 1046 USHORT format; /* Format identifier--format = 1 */ 1047 OffsetTo<Coverage> 1048 coverage; /* Offset to Coverage table--from 1049 * beginning of table */ 1050 OffsetArrayOf<ChainRuleSet> 1051 ruleSet; /* Array of ChainRuleSet tables 1052 * ordered by Coverage Index */ 1053 public: 1054 DEFINE_SIZE_ARRAY (6, ruleSet); 1055}; 1056 1057struct ChainContextFormat2 1058{ 1059 friend struct ChainContext; 1060 1061 private: 1062 1063 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 1064 { 1065 TRACE_CLOSURE (); 1066 if (!(this+coverage).intersects (c->glyphs)) 1067 return; 1068 1069 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1070 const ClassDef &input_class_def = this+inputClassDef; 1071 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1072 1073 struct ChainContextClosureLookupContext lookup_context = { 1074 {intersects_class, closure_func}, 1075 {&backtrack_class_def, 1076 &input_class_def, 1077 &lookahead_class_def} 1078 }; 1079 1080 unsigned int count = ruleSet.len; 1081 for (unsigned int i = 0; i < count; i++) 1082 if (input_class_def.intersects_class (c->glyphs, i)) { 1083 const ChainRuleSet &rule_set = this+ruleSet[i]; 1084 rule_set.closure (c, lookup_context); 1085 } 1086 } 1087 1088 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 1089 { 1090 TRACE_APPLY (); 1091 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint); 1092 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 1093 1094 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1095 const ClassDef &input_class_def = this+inputClassDef; 1096 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1097 1098 index = input_class_def (c->buffer->info[c->buffer->idx].codepoint); 1099 const ChainRuleSet &rule_set = this+ruleSet[index]; 1100 struct ChainContextApplyLookupContext lookup_context = { 1101 {match_class, apply_func}, 1102 {&backtrack_class_def, 1103 &input_class_def, 1104 &lookahead_class_def} 1105 }; 1106 return TRACE_RETURN (rule_set.apply (c, lookup_context)); 1107 } 1108 1109 inline bool sanitize (hb_sanitize_context_t *c) { 1110 TRACE_SANITIZE (); 1111 return TRACE_RETURN (coverage.sanitize (c, this) && backtrackClassDef.sanitize (c, this) && 1112 inputClassDef.sanitize (c, this) && lookaheadClassDef.sanitize (c, this) && 1113 ruleSet.sanitize (c, this)); 1114 } 1115 1116 private: 1117 USHORT format; /* Format identifier--format = 2 */ 1118 OffsetTo<Coverage> 1119 coverage; /* Offset to Coverage table--from 1120 * beginning of table */ 1121 OffsetTo<ClassDef> 1122 backtrackClassDef; /* Offset to glyph ClassDef table 1123 * containing backtrack sequence 1124 * data--from beginning of table */ 1125 OffsetTo<ClassDef> 1126 inputClassDef; /* Offset to glyph ClassDef 1127 * table containing input sequence 1128 * data--from beginning of table */ 1129 OffsetTo<ClassDef> 1130 lookaheadClassDef; /* Offset to glyph ClassDef table 1131 * containing lookahead sequence 1132 * data--from beginning of table */ 1133 OffsetArrayOf<ChainRuleSet> 1134 ruleSet; /* Array of ChainRuleSet tables 1135 * ordered by class */ 1136 public: 1137 DEFINE_SIZE_ARRAY (12, ruleSet); 1138}; 1139 1140struct ChainContextFormat3 1141{ 1142 friend struct ChainContext; 1143 1144 private: 1145 1146 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 1147 { 1148 TRACE_CLOSURE (); 1149 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 1150 1151 if (!(this+input[0]).intersects (c->glyphs)) 1152 return; 1153 1154 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 1155 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1156 struct ChainContextClosureLookupContext lookup_context = { 1157 {intersects_coverage, closure_func}, 1158 {this, this, this} 1159 }; 1160 chain_context_closure_lookup (c, 1161 backtrack.len, (const USHORT *) backtrack.array, 1162 input.len, (const USHORT *) input.array + 1, 1163 lookahead.len, (const USHORT *) lookahead.array, 1164 lookup.len, lookup.array, 1165 lookup_context); 1166 } 1167 1168 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 1169 { 1170 TRACE_APPLY (); 1171 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 1172 1173 unsigned int index = (this+input[0]) (c->buffer->info[c->buffer->idx].codepoint); 1174 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 1175 1176 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 1177 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1178 struct ChainContextApplyLookupContext lookup_context = { 1179 {match_coverage, apply_func}, 1180 {this, this, this} 1181 }; 1182 return TRACE_RETURN (chain_context_apply_lookup (c, 1183 backtrack.len, (const USHORT *) backtrack.array, 1184 input.len, (const USHORT *) input.array + 1, 1185 lookahead.len, (const USHORT *) lookahead.array, 1186 lookup.len, lookup.array, lookup_context)); 1187 } 1188 1189 inline bool sanitize (hb_sanitize_context_t *c) { 1190 TRACE_SANITIZE (); 1191 if (!backtrack.sanitize (c, this)) return TRACE_RETURN (false); 1192 OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 1193 if (!input.sanitize (c, this)) return TRACE_RETURN (false); 1194 OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 1195 if (!lookahead.sanitize (c, this)) return TRACE_RETURN (false); 1196 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1197 return TRACE_RETURN (lookup.sanitize (c)); 1198 } 1199 1200 private: 1201 USHORT format; /* Format identifier--format = 3 */ 1202 OffsetArrayOf<Coverage> 1203 backtrack; /* Array of coverage tables 1204 * in backtracking sequence, in glyph 1205 * sequence order */ 1206 OffsetArrayOf<Coverage> 1207 inputX ; /* Array of coverage 1208 * tables in input sequence, in glyph 1209 * sequence order */ 1210 OffsetArrayOf<Coverage> 1211 lookaheadX; /* Array of coverage tables 1212 * in lookahead sequence, in glyph 1213 * sequence order */ 1214 ArrayOf<LookupRecord> 1215 lookupX; /* Array of LookupRecords--in 1216 * design order) */ 1217 public: 1218 DEFINE_SIZE_MIN (10); 1219}; 1220 1221struct ChainContext 1222{ 1223 protected: 1224 1225 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 1226 { 1227 TRACE_CLOSURE (); 1228 switch (u.format) { 1229 case 1: u.format1.closure (c, closure_func); break; 1230 case 2: u.format2.closure (c, closure_func); break; 1231 case 3: u.format3.closure (c, closure_func); break; 1232 default: break; 1233 } 1234 } 1235 1236 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 1237 { 1238 TRACE_APPLY (); 1239 switch (u.format) { 1240 case 1: return TRACE_RETURN (u.format1.apply (c, apply_func)); 1241 case 2: return TRACE_RETURN (u.format2.apply (c, apply_func)); 1242 case 3: return TRACE_RETURN (u.format3.apply (c, apply_func)); 1243 default:return TRACE_RETURN (false); 1244 } 1245 } 1246 1247 inline bool sanitize (hb_sanitize_context_t *c) { 1248 TRACE_SANITIZE (); 1249 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1250 switch (u.format) { 1251 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1252 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 1253 case 3: return TRACE_RETURN (u.format3.sanitize (c)); 1254 default:return TRACE_RETURN (true); 1255 } 1256 } 1257 1258 private: 1259 union { 1260 USHORT format; /* Format identifier */ 1261 ChainContextFormat1 format1; 1262 ChainContextFormat2 format2; 1263 ChainContextFormat3 format3; 1264 } u; 1265}; 1266 1267 1268struct ExtensionFormat1 1269{ 1270 friend struct Extension; 1271 1272 protected: 1273 inline unsigned int get_type (void) const { return extensionLookupType; } 1274 inline unsigned int get_offset (void) const { return extensionOffset; } 1275 1276 inline bool sanitize (hb_sanitize_context_t *c) { 1277 TRACE_SANITIZE (); 1278 return TRACE_RETURN (c->check_struct (this)); 1279 } 1280 1281 private: 1282 USHORT format; /* Format identifier. Set to 1. */ 1283 USHORT extensionLookupType; /* Lookup type of subtable referenced 1284 * by ExtensionOffset (i.e. the 1285 * extension subtable). */ 1286 ULONG extensionOffset; /* Offset to the extension subtable, 1287 * of lookup type subtable. */ 1288 public: 1289 DEFINE_SIZE_STATIC (8); 1290}; 1291 1292struct Extension 1293{ 1294 inline unsigned int get_type (void) const 1295 { 1296 switch (u.format) { 1297 case 1: return u.format1.get_type (); 1298 default:return 0; 1299 } 1300 } 1301 inline unsigned int get_offset (void) const 1302 { 1303 switch (u.format) { 1304 case 1: return u.format1.get_offset (); 1305 default:return 0; 1306 } 1307 } 1308 1309 inline bool sanitize (hb_sanitize_context_t *c) { 1310 TRACE_SANITIZE (); 1311 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1312 switch (u.format) { 1313 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1314 default:return TRACE_RETURN (true); 1315 } 1316 } 1317 1318 private: 1319 union { 1320 USHORT format; /* Format identifier */ 1321 ExtensionFormat1 format1; 1322 } u; 1323}; 1324 1325 1326/* 1327 * GSUB/GPOS Common 1328 */ 1329 1330struct GSUBGPOS 1331{ 1332 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB; 1333 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS; 1334 1335 inline unsigned int get_script_count (void) const 1336 { return (this+scriptList).len; } 1337 inline const Tag& get_script_tag (unsigned int i) const 1338 { return (this+scriptList).get_tag (i); } 1339 inline unsigned int get_script_tags (unsigned int start_offset, 1340 unsigned int *script_count /* IN/OUT */, 1341 hb_tag_t *script_tags /* OUT */) const 1342 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); } 1343 inline const Script& get_script (unsigned int i) const 1344 { return (this+scriptList)[i]; } 1345 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const 1346 { return (this+scriptList).find_index (tag, index); } 1347 1348 inline unsigned int get_feature_count (void) const 1349 { return (this+featureList).len; } 1350 inline const Tag& get_feature_tag (unsigned int i) const 1351 { return (this+featureList).get_tag (i); } 1352 inline unsigned int get_feature_tags (unsigned int start_offset, 1353 unsigned int *feature_count /* IN/OUT */, 1354 hb_tag_t *feature_tags /* OUT */) const 1355 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); } 1356 inline const Feature& get_feature (unsigned int i) const 1357 { return (this+featureList)[i]; } 1358 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const 1359 { return (this+featureList).find_index (tag, index); } 1360 1361 inline unsigned int get_lookup_count (void) const 1362 { return (this+lookupList).len; } 1363 inline const Lookup& get_lookup (unsigned int i) const 1364 { return (this+lookupList)[i]; } 1365 1366 inline bool sanitize (hb_sanitize_context_t *c) { 1367 TRACE_SANITIZE (); 1368 return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) && 1369 scriptList.sanitize (c, this) && 1370 featureList.sanitize (c, this) && 1371 lookupList.sanitize (c, this)); 1372 } 1373 1374 protected: 1375 FixedVersion version; /* Version of the GSUB/GPOS table--initially set 1376 * to 0x00010000 */ 1377 OffsetTo<ScriptList> 1378 scriptList; /* ScriptList table */ 1379 OffsetTo<FeatureList> 1380 featureList; /* FeatureList table */ 1381 OffsetTo<LookupList> 1382 lookupList; /* LookupList table */ 1383 public: 1384 DEFINE_SIZE_STATIC (10); 1385}; 1386 1387 1388 1389#endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */ 1390