hb-ot-layout-gsubgpos-private.hh revision 31081f7390e5130df72f89acc609ccab5dc77a48
1/* 2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright © 2010 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29#ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 30#define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 31 32#include "hb-buffer-private.hh" 33#include "hb-ot-layout-gdef-table.hh" 34 35 36 37/* buffer var allocations */ 38#define lig_id() var2.u8[2] /* unique ligature id */ 39#define lig_comp() var2.u8[3] /* component number in the ligature (0 = base) */ 40 41static inline uint8_t allocate_lig_id (hb_buffer_t *buffer) { 42 uint8_t lig_id = buffer->next_serial (); 43 if (unlikely (!lig_id)) lig_id = buffer->next_serial (); /* in case of overflow */ 44 return lig_id; 45} 46 47 48 49#ifndef HB_DEBUG_CLOSURE 50#define HB_DEBUG_CLOSURE (HB_DEBUG+0) 51#endif 52 53#define TRACE_CLOSURE() \ 54 hb_auto_trace_t<HB_DEBUG_CLOSURE> trace (&c->debug_depth, "CLOSURE", this, NULL, HB_FUNC); 55 56 57 58struct hb_closure_context_t 59{ 60 hb_face_t *face; 61 hb_glyph_map_t *glyphs; 62 unsigned int nesting_level_left; 63 unsigned int debug_depth; 64 65 66 hb_closure_context_t (hb_face_t *face_, 67 hb_glyph_map_t *glyphs_, 68 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) : 69 face (face_), glyphs (glyphs_), 70 nesting_level_left (nesting_level_left_), 71 debug_depth (0) {} 72}; 73 74 75 76#ifndef HB_DEBUG_APPLY 77#define HB_DEBUG_APPLY (HB_DEBUG+0) 78#endif 79 80#define TRACE_APPLY() \ 81 hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", this, NULL, HB_FUNC); 82 83 84 85struct hb_apply_context_t 86{ 87 hb_font_t *font; 88 hb_face_t *face; 89 hb_buffer_t *buffer; 90 hb_direction_t direction; 91 hb_mask_t lookup_mask; 92 unsigned int context_length; 93 unsigned int nesting_level_left; 94 unsigned int lookup_props; 95 unsigned int property; /* propety of first glyph */ 96 unsigned int debug_depth; 97 98 99 hb_apply_context_t (hb_font_t *font_, 100 hb_face_t *face_, 101 hb_buffer_t *buffer_, 102 hb_mask_t lookup_mask_, 103 unsigned int context_length_ = NO_CONTEXT, 104 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) : 105 font (font_), face (face_), buffer (buffer_), 106 direction (buffer_->props.direction), 107 lookup_mask (lookup_mask_), 108 context_length (context_length_), 109 nesting_level_left (nesting_level_left_), 110 lookup_props (0), property (0), debug_depth (0) {} 111 112 void set_lookup (const Lookup &l) { 113 lookup_props = l.get_props (); 114 } 115 116 struct mark_skipping_forward_iterator_t 117 { 118 inline mark_skipping_forward_iterator_t (hb_apply_context_t *c_, 119 unsigned int start_index_, 120 unsigned int num_items_) 121 { 122 c = c_; 123 idx = start_index_; 124 num_items = num_items_; 125 end = MIN (c->buffer->len, c->buffer->idx + c->context_length); 126 } 127 inline bool has_no_chance (void) const 128 { 129 return unlikely (num_items && idx + num_items >= end); 130 } 131 inline bool next (unsigned int *property_out, 132 unsigned int lookup_props) 133 { 134 assert (num_items > 0); 135 do 136 { 137 if (has_no_chance ()) 138 return false; 139 idx++; 140 } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[idx], lookup_props, property_out)); 141 num_items--; 142 return true; 143 } 144 inline bool next (unsigned int *property_out = NULL) 145 { 146 return next (property_out, c->lookup_props); 147 } 148 149 unsigned int idx; 150 private: 151 hb_apply_context_t *c; 152 unsigned int num_items; 153 unsigned int end; 154 }; 155 156 struct mark_skipping_backward_iterator_t 157 { 158 inline mark_skipping_backward_iterator_t (hb_apply_context_t *c_, 159 unsigned int start_index_, 160 unsigned int num_items_) 161 { 162 c = c_; 163 idx = start_index_; 164 num_items = num_items_; 165 } 166 inline bool has_no_chance (void) const 167 { 168 return unlikely (idx < num_items); 169 } 170 inline bool prev (unsigned int *property_out, 171 unsigned int lookup_props) 172 { 173 assert (num_items > 0); 174 do 175 { 176 if (has_no_chance ()) 177 return false; 178 idx--; 179 } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->out_info[idx], lookup_props, property_out)); 180 num_items--; 181 return true; 182 } 183 inline bool prev (unsigned int *property_out = NULL) 184 { 185 return prev (property_out, c->lookup_props); 186 } 187 188 unsigned int idx; 189 private: 190 hb_apply_context_t *c; 191 unsigned int num_items; 192 }; 193 194 inline bool should_mark_skip_current_glyph (void) const 195 { 196 return _hb_ot_layout_skip_mark (face, &buffer->info[buffer->idx], lookup_props, NULL); 197 } 198 199 200 201 inline void replace_glyph (hb_codepoint_t glyph_index) const 202 { 203 clear_property (); 204 buffer->replace_glyph (glyph_index); 205 } 206 inline void replace_glyphs_be16 (unsigned int num_in, 207 unsigned int num_out, 208 const uint16_t *glyph_data_be) const 209 { 210 clear_property (); 211 buffer->replace_glyphs_be16 (num_in, num_out, glyph_data_be); 212 } 213 214 inline void guess_glyph_class (unsigned int klass) 215 { 216 /* XXX if ! has gdef */ 217 buffer->info[buffer->idx].props_cache() = klass; 218 } 219 220 private: 221 inline void clear_property (void) const 222 { 223 /* XXX if has gdef */ 224 buffer->info[buffer->idx].props_cache() = 0; 225 } 226}; 227 228 229 230typedef bool (*intersects_func_t) (hb_glyph_map_t *glyphs, const USHORT &value, const void *data); 231typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 232typedef bool (*closure_lookup_func_t) (hb_closure_context_t *c, unsigned int lookup_index); 233typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index); 234 235struct ContextClosureFuncs 236{ 237 intersects_func_t intersects; 238 closure_lookup_func_t closure; 239}; 240struct ContextApplyFuncs 241{ 242 match_func_t match; 243 apply_lookup_func_t apply; 244}; 245 246static inline bool intersects_glyph (hb_glyph_map_t *glyphs, const USHORT &value, const void *data HB_UNUSED) 247{ 248 return glyphs->has (value); 249} 250static inline bool intersects_class (hb_glyph_map_t *glyphs, const USHORT &value, const void *data) 251{ 252 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 253 return class_def.intersects_class (glyphs, value); 254} 255static inline bool intersects_coverage (hb_glyph_map_t *glyphs, const USHORT &value, const void *data) 256{ 257 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 258 return (data+coverage).intersects (glyphs); 259} 260 261static inline bool intersects_array (hb_closure_context_t *c, 262 unsigned int count, 263 const USHORT values[], 264 intersects_func_t intersects_func, 265 const void *intersects_data) 266{ 267 for (unsigned int i = 0; i < count; i++) 268 if (likely (!intersects_func (c->glyphs, values[i], intersects_data))) 269 return false; 270 return true; 271} 272 273 274static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED) 275{ 276 return glyph_id == value; 277} 278static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 279{ 280 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 281 return class_def.get_class (glyph_id) == value; 282} 283static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 284{ 285 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 286 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED; 287} 288 289 290static inline bool match_input (hb_apply_context_t *c, 291 unsigned int count, /* Including the first glyph (not matched) */ 292 const USHORT input[], /* Array of input values--start with second glyph */ 293 match_func_t match_func, 294 const void *match_data, 295 unsigned int *context_length_out) 296{ 297 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1); 298 if (skippy_iter.has_no_chance ()) 299 return false; 300 301 for (unsigned int i = 1; i < count; i++) 302 { 303 if (!skippy_iter.next ()) 304 return false; 305 306 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, input[i - 1], match_data))) 307 return false; 308 } 309 310 *context_length_out = skippy_iter.idx - c->buffer->idx + 1; 311 312 return true; 313} 314 315static inline bool match_backtrack (hb_apply_context_t *c, 316 unsigned int count, 317 const USHORT backtrack[], 318 match_func_t match_func, 319 const void *match_data) 320{ 321 hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count); 322 if (skippy_iter.has_no_chance ()) 323 return false; 324 325 for (unsigned int i = 0; i < count; i++) 326 { 327 if (!skippy_iter.prev ()) 328 return false; 329 330 if (likely (!match_func (c->buffer->out_info[skippy_iter.idx].codepoint, backtrack[i], match_data))) 331 return false; 332 } 333 334 return true; 335} 336 337static inline bool match_lookahead (hb_apply_context_t *c, 338 unsigned int count, 339 const USHORT lookahead[], 340 match_func_t match_func, 341 const void *match_data, 342 unsigned int offset) 343{ 344 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count); 345 if (skippy_iter.has_no_chance ()) 346 return false; 347 348 for (unsigned int i = 0; i < count; i++) 349 { 350 if (!skippy_iter.next ()) 351 return false; 352 353 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, lookahead[i], match_data))) 354 return false; 355 } 356 357 return true; 358} 359 360 361 362struct LookupRecord 363{ 364 inline bool sanitize (hb_sanitize_context_t *c) { 365 TRACE_SANITIZE (); 366 return c->check_struct (this); 367 } 368 369 USHORT sequenceIndex; /* Index into current glyph 370 * sequence--first glyph = 0 */ 371 USHORT lookupListIndex; /* Lookup to apply to that 372 * position--zero--based */ 373 public: 374 DEFINE_SIZE_STATIC (4); 375}; 376 377 378static inline bool closure_lookup (hb_closure_context_t *c, 379 unsigned int lookupCount, 380 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ 381 closure_lookup_func_t closure_func) 382{ 383 bool ret = false; 384 for (unsigned int i = 0; i < lookupCount; i++) 385 ret = closure_func (c, lookupRecord->lookupListIndex) || ret; 386 return ret; 387} 388 389static inline bool apply_lookup (hb_apply_context_t *c, 390 unsigned int count, /* Including the first glyph */ 391 unsigned int lookupCount, 392 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ 393 apply_lookup_func_t apply_func) 394{ 395 unsigned int end = MIN (c->buffer->len, c->buffer->idx + c->context_length); 396 if (unlikely (count == 0 || c->buffer->idx + count > end)) 397 return false; 398 399 /* TODO We don't support lookupRecord arrays that are not increasing: 400 * Should be easy for in_place ones at least. */ 401 402 /* Note: If sublookup is reverse, it will underflow after the first loop 403 * and we jump out of it. Not entirely disastrous. So we don't check 404 * for reverse lookup here. 405 */ 406 for (unsigned int i = 0; i < count; /* NOP */) 407 { 408 if (unlikely (c->buffer->idx == end)) 409 return true; 410 while (c->should_mark_skip_current_glyph ()) 411 { 412 /* No lookup applied for this index */ 413 c->buffer->next_glyph (); 414 if (unlikely (c->buffer->idx == end)) 415 return true; 416 } 417 418 if (lookupCount && i == lookupRecord->sequenceIndex) 419 { 420 unsigned int old_pos = c->buffer->idx; 421 422 /* Apply a lookup */ 423 bool done = apply_func (c, lookupRecord->lookupListIndex); 424 425 lookupRecord++; 426 lookupCount--; 427 /* Err, this is wrong if the lookup jumped over some glyphs */ 428 i += c->buffer->idx - old_pos; 429 if (unlikely (c->buffer->idx == end)) 430 return true; 431 432 if (!done) 433 goto not_applied; 434 } 435 else 436 { 437 not_applied: 438 /* No lookup applied for this index */ 439 c->buffer->next_glyph (); 440 i++; 441 } 442 } 443 444 return true; 445} 446 447 448 449/* Contextual lookups */ 450 451struct ContextClosureLookupContext 452{ 453 ContextClosureFuncs funcs; 454 const void *intersects_data; 455}; 456 457struct ContextApplyLookupContext 458{ 459 ContextApplyFuncs funcs; 460 const void *match_data; 461}; 462 463static inline bool context_closure_lookup (hb_closure_context_t *c, 464 unsigned int inputCount, /* Including the first glyph (not matched) */ 465 const USHORT input[], /* Array of input values--start with second glyph */ 466 unsigned int lookupCount, 467 const LookupRecord lookupRecord[], 468 ContextClosureLookupContext &lookup_context) 469{ 470 return intersects_array (c, 471 inputCount ? inputCount - 1 : 0, input, 472 lookup_context.funcs.intersects, lookup_context.intersects_data) 473 && closure_lookup (c, 474 lookupCount, lookupRecord, 475 lookup_context.funcs.closure); 476} 477 478 479static inline bool context_apply_lookup (hb_apply_context_t *c, 480 unsigned int inputCount, /* Including the first glyph (not matched) */ 481 const USHORT input[], /* Array of input values--start with second glyph */ 482 unsigned int lookupCount, 483 const LookupRecord lookupRecord[], 484 ContextApplyLookupContext &lookup_context) 485{ 486 hb_apply_context_t new_context = *c; 487 return match_input (c, 488 inputCount, input, 489 lookup_context.funcs.match, lookup_context.match_data, 490 &new_context.context_length) 491 && apply_lookup (&new_context, 492 inputCount, 493 lookupCount, lookupRecord, 494 lookup_context.funcs.apply); 495} 496 497struct Rule 498{ 499 friend struct RuleSet; 500 501 private: 502 503 inline bool closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 504 { 505 TRACE_CLOSURE (); 506 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0)); 507 return context_closure_lookup (c, 508 inputCount, input, 509 lookupCount, lookupRecord, 510 lookup_context); 511 } 512 513 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 514 { 515 TRACE_APPLY (); 516 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0)); 517 return context_apply_lookup (c, 518 inputCount, input, 519 lookupCount, lookupRecord, 520 lookup_context); 521 } 522 523 public: 524 inline bool sanitize (hb_sanitize_context_t *c) { 525 TRACE_SANITIZE (); 526 return inputCount.sanitize (c) 527 && lookupCount.sanitize (c) 528 && c->check_range (input, 529 input[0].static_size * inputCount 530 + lookupRecordX[0].static_size * lookupCount); 531 } 532 533 private: 534 USHORT inputCount; /* Total number of glyphs in input 535 * glyph sequence--includes the first 536 * glyph */ 537 USHORT lookupCount; /* Number of LookupRecords */ 538 USHORT input[VAR]; /* Array of match inputs--start with 539 * second glyph */ 540 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 541 * design order */ 542 public: 543 DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX); 544}; 545 546struct RuleSet 547{ 548 inline bool closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 549 { 550 TRACE_CLOSURE (); 551 bool ret = false; 552 unsigned int num_rules = rule.len; 553 for (unsigned int i = 0; i < num_rules; i++) 554 ret = (this+rule[i]).closure (c, lookup_context) || ret; 555 return ret; 556 } 557 558 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 559 { 560 TRACE_APPLY (); 561 unsigned int num_rules = rule.len; 562 for (unsigned int i = 0; i < num_rules; i++) 563 { 564 if ((this+rule[i]).apply (c, lookup_context)) 565 return true; 566 } 567 return false; 568 } 569 570 inline bool sanitize (hb_sanitize_context_t *c) { 571 TRACE_SANITIZE (); 572 return rule.sanitize (c, this); 573 } 574 575 private: 576 OffsetArrayOf<Rule> 577 rule; /* Array of Rule tables 578 * ordered by preference */ 579 public: 580 DEFINE_SIZE_ARRAY (2, rule); 581}; 582 583 584struct ContextFormat1 585{ 586 friend struct Context; 587 588 private: 589 590 inline bool closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 591 { 592 TRACE_CLOSURE (); 593 594 const Coverage &cov = (this+coverage); 595 596 struct ContextClosureLookupContext lookup_context = { 597 {intersects_glyph, closure_func}, 598 NULL 599 }; 600 601 bool ret = false; 602 unsigned int count = ruleSet.len; 603 for (unsigned int i = 0; i < count; i++) 604 if (cov.intersects_coverage (c->glyphs, i)) { 605 const RuleSet &rule_set = this+ruleSet[i]; 606 ret = rule_set.closure (c, lookup_context) || ret; 607 } 608 return ret; 609 } 610 611 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 612 { 613 TRACE_APPLY (); 614 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint); 615 if (likely (index == NOT_COVERED)) 616 return false; 617 618 const RuleSet &rule_set = this+ruleSet[index]; 619 struct ContextApplyLookupContext lookup_context = { 620 {match_glyph, apply_func}, 621 NULL 622 }; 623 return rule_set.apply (c, lookup_context); 624 } 625 626 inline bool sanitize (hb_sanitize_context_t *c) { 627 TRACE_SANITIZE (); 628 return coverage.sanitize (c, this) 629 && ruleSet.sanitize (c, this); 630 } 631 632 private: 633 USHORT format; /* Format identifier--format = 1 */ 634 OffsetTo<Coverage> 635 coverage; /* Offset to Coverage table--from 636 * beginning of table */ 637 OffsetArrayOf<RuleSet> 638 ruleSet; /* Array of RuleSet tables 639 * ordered by Coverage Index */ 640 public: 641 DEFINE_SIZE_ARRAY (6, ruleSet); 642}; 643 644 645struct ContextFormat2 646{ 647 friend struct Context; 648 649 private: 650 651 inline bool closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 652 { 653 TRACE_CLOSURE (); 654 if (!(this+coverage).intersects (c->glyphs)) 655 return false; 656 657 const ClassDef &class_def = this+classDef; 658 659 struct ContextClosureLookupContext lookup_context = { 660 {intersects_class, closure_func}, 661 NULL 662 }; 663 664 bool ret = false; 665 unsigned int count = ruleSet.len; 666 for (unsigned int i = 0; i < count; i++) 667 if (class_def.intersects_class (c->glyphs, i)) { 668 const RuleSet &rule_set = this+ruleSet[i]; 669 ret = rule_set.closure (c, lookup_context) || ret; 670 } 671 return ret; 672 } 673 674 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 675 { 676 TRACE_APPLY (); 677 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint); 678 if (likely (index == NOT_COVERED)) 679 return false; 680 681 const ClassDef &class_def = this+classDef; 682 index = class_def (c->buffer->info[c->buffer->idx].codepoint); 683 const RuleSet &rule_set = this+ruleSet[index]; 684 struct ContextApplyLookupContext lookup_context = { 685 {match_class, apply_func}, 686 &class_def 687 }; 688 return rule_set.apply (c, lookup_context); 689 } 690 691 inline bool sanitize (hb_sanitize_context_t *c) { 692 TRACE_SANITIZE (); 693 return coverage.sanitize (c, this) 694 && classDef.sanitize (c, this) 695 && ruleSet.sanitize (c, this); 696 } 697 698 private: 699 USHORT format; /* Format identifier--format = 2 */ 700 OffsetTo<Coverage> 701 coverage; /* Offset to Coverage table--from 702 * beginning of table */ 703 OffsetTo<ClassDef> 704 classDef; /* Offset to glyph ClassDef table--from 705 * beginning of table */ 706 OffsetArrayOf<RuleSet> 707 ruleSet; /* Array of RuleSet tables 708 * ordered by class */ 709 public: 710 DEFINE_SIZE_ARRAY (8, ruleSet); 711}; 712 713 714struct ContextFormat3 715{ 716 friend struct Context; 717 718 private: 719 720 inline bool closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 721 { 722 TRACE_CLOSURE (); 723 if (!(this+coverage[0]).intersects (c->glyphs)) 724 return false; 725 726 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount); 727 struct ContextClosureLookupContext lookup_context = { 728 {intersects_coverage, closure_func}, 729 this 730 }; 731 return context_closure_lookup (c, 732 glyphCount, (const USHORT *) (coverage + 1), 733 lookupCount, lookupRecord, 734 lookup_context); 735 } 736 737 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 738 { 739 TRACE_APPLY (); 740 unsigned int index = (this+coverage[0]) (c->buffer->info[c->buffer->idx].codepoint); 741 if (likely (index == NOT_COVERED)) 742 return false; 743 744 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount); 745 struct ContextApplyLookupContext lookup_context = { 746 {match_coverage, apply_func}, 747 this 748 }; 749 return context_apply_lookup (c, 750 glyphCount, (const USHORT *) (coverage + 1), 751 lookupCount, lookupRecord, 752 lookup_context); 753 } 754 755 inline bool sanitize (hb_sanitize_context_t *c) { 756 TRACE_SANITIZE (); 757 if (!c->check_struct (this)) return false; 758 unsigned int count = glyphCount; 759 if (!c->check_array (coverage, coverage[0].static_size, count)) return false; 760 for (unsigned int i = 0; i < count; i++) 761 if (!coverage[i].sanitize (c, this)) return false; 762 LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count); 763 return c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount); 764 } 765 766 private: 767 USHORT format; /* Format identifier--format = 3 */ 768 USHORT glyphCount; /* Number of glyphs in the input glyph 769 * sequence */ 770 USHORT lookupCount; /* Number of LookupRecords */ 771 OffsetTo<Coverage> 772 coverage[VAR]; /* Array of offsets to Coverage 773 * table in glyph sequence order */ 774 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 775 * design order */ 776 public: 777 DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX); 778}; 779 780struct Context 781{ 782 protected: 783 784 inline bool closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 785 { 786 TRACE_CLOSURE (); 787 switch (u.format) { 788 case 1: return u.format1.closure (c, closure_func); 789 case 2: return u.format2.closure (c, closure_func); 790 case 3: return u.format3.closure (c, closure_func); 791 default:return false; 792 } 793 } 794 795 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 796 { 797 TRACE_APPLY (); 798 switch (u.format) { 799 case 1: return u.format1.apply (c, apply_func); 800 case 2: return u.format2.apply (c, apply_func); 801 case 3: return u.format3.apply (c, apply_func); 802 default:return false; 803 } 804 } 805 806 inline bool sanitize (hb_sanitize_context_t *c) { 807 TRACE_SANITIZE (); 808 if (!u.format.sanitize (c)) return false; 809 switch (u.format) { 810 case 1: return u.format1.sanitize (c); 811 case 2: return u.format2.sanitize (c); 812 case 3: return u.format3.sanitize (c); 813 default:return true; 814 } 815 } 816 817 private: 818 union { 819 USHORT format; /* Format identifier */ 820 ContextFormat1 format1; 821 ContextFormat2 format2; 822 ContextFormat3 format3; 823 } u; 824}; 825 826 827/* Chaining Contextual lookups */ 828 829struct ChainContextClosureLookupContext 830{ 831 ContextClosureFuncs funcs; 832 const void *intersects_data[3]; 833}; 834 835struct ChainContextApplyLookupContext 836{ 837 ContextApplyFuncs funcs; 838 const void *match_data[3]; 839}; 840 841static inline bool chain_context_closure_lookup (hb_closure_context_t *c, 842 unsigned int backtrackCount, 843 const USHORT backtrack[], 844 unsigned int inputCount, /* Including the first glyph (not matched) */ 845 const USHORT input[], /* Array of input values--start with second glyph */ 846 unsigned int lookaheadCount, 847 const USHORT lookahead[], 848 unsigned int lookupCount, 849 const LookupRecord lookupRecord[], 850 ChainContextClosureLookupContext &lookup_context) 851{ 852 return intersects_array (c, 853 backtrackCount, backtrack, 854 lookup_context.funcs.intersects, lookup_context.intersects_data[0]) 855 && intersects_array (c, 856 inputCount ? inputCount - 1 : 0, input, 857 lookup_context.funcs.intersects, lookup_context.intersects_data[1]) 858 && intersects_array (c, 859 lookaheadCount, lookahead, 860 lookup_context.funcs.intersects, lookup_context.intersects_data[2]) 861 && closure_lookup (c, 862 lookupCount, lookupRecord, 863 lookup_context.funcs.closure); 864} 865 866static inline bool chain_context_apply_lookup (hb_apply_context_t *c, 867 unsigned int backtrackCount, 868 const USHORT backtrack[], 869 unsigned int inputCount, /* Including the first glyph (not matched) */ 870 const USHORT input[], /* Array of input values--start with second glyph */ 871 unsigned int lookaheadCount, 872 const USHORT lookahead[], 873 unsigned int lookupCount, 874 const LookupRecord lookupRecord[], 875 ChainContextApplyLookupContext &lookup_context) 876{ 877 /* First guess */ 878 if (unlikely (c->buffer->backtrack_len () < backtrackCount || 879 c->buffer->idx + inputCount + lookaheadCount > c->buffer->len || 880 inputCount + lookaheadCount > c->context_length)) 881 return false; 882 883 hb_apply_context_t new_context = *c; 884 return match_backtrack (c, 885 backtrackCount, backtrack, 886 lookup_context.funcs.match, lookup_context.match_data[0]) 887 && match_input (c, 888 inputCount, input, 889 lookup_context.funcs.match, lookup_context.match_data[1], 890 &new_context.context_length) 891 && match_lookahead (c, 892 lookaheadCount, lookahead, 893 lookup_context.funcs.match, lookup_context.match_data[2], 894 new_context.context_length) 895 && apply_lookup (&new_context, 896 inputCount, 897 lookupCount, lookupRecord, 898 lookup_context.funcs.apply); 899} 900 901struct ChainRule 902{ 903 friend struct ChainRuleSet; 904 905 private: 906 907 inline bool closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 908 { 909 TRACE_CLOSURE (); 910 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 911 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 912 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 913 return chain_context_closure_lookup (c, 914 backtrack.len, backtrack.array, 915 input.len, input.array, 916 lookahead.len, lookahead.array, 917 lookup.len, lookup.array, 918 lookup_context); 919 } 920 921 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 922 { 923 TRACE_APPLY (); 924 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 925 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 926 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 927 return chain_context_apply_lookup (c, 928 backtrack.len, backtrack.array, 929 input.len, input.array, 930 lookahead.len, lookahead.array, 931 lookup.len, lookup.array, 932 lookup_context); 933 } 934 935 public: 936 inline bool sanitize (hb_sanitize_context_t *c) { 937 TRACE_SANITIZE (); 938 if (!backtrack.sanitize (c)) return false; 939 HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 940 if (!input.sanitize (c)) return false; 941 ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 942 if (!lookahead.sanitize (c)) return false; 943 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 944 return lookup.sanitize (c); 945 } 946 947 private: 948 ArrayOf<USHORT> 949 backtrack; /* Array of backtracking values 950 * (to be matched before the input 951 * sequence) */ 952 HeadlessArrayOf<USHORT> 953 inputX; /* Array of input values (start with 954 * second glyph) */ 955 ArrayOf<USHORT> 956 lookaheadX; /* Array of lookahead values's (to be 957 * matched after the input sequence) */ 958 ArrayOf<LookupRecord> 959 lookupX; /* Array of LookupRecords--in 960 * design order) */ 961 public: 962 DEFINE_SIZE_MIN (8); 963}; 964 965struct ChainRuleSet 966{ 967 inline bool closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 968 { 969 TRACE_CLOSURE (); 970 bool ret = false; 971 unsigned int num_rules = rule.len; 972 for (unsigned int i = 0; i < num_rules; i++) 973 ret = (this+rule[i]).closure (c, lookup_context) || ret; 974 return ret; 975 } 976 977 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 978 { 979 TRACE_APPLY (); 980 unsigned int num_rules = rule.len; 981 for (unsigned int i = 0; i < num_rules; i++) 982 { 983 if ((this+rule[i]).apply (c, lookup_context)) 984 return true; 985 } 986 987 return false; 988 } 989 990 inline bool sanitize (hb_sanitize_context_t *c) { 991 TRACE_SANITIZE (); 992 return rule.sanitize (c, this); 993 } 994 995 private: 996 OffsetArrayOf<ChainRule> 997 rule; /* Array of ChainRule tables 998 * ordered by preference */ 999 public: 1000 DEFINE_SIZE_ARRAY (2, rule); 1001}; 1002 1003struct ChainContextFormat1 1004{ 1005 friend struct ChainContext; 1006 1007 private: 1008 1009 inline bool closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 1010 { 1011 TRACE_CLOSURE (); 1012 const Coverage &cov = (this+coverage); 1013 1014 struct ChainContextClosureLookupContext lookup_context = { 1015 {intersects_glyph, closure_func}, 1016 {NULL, NULL, NULL} 1017 }; 1018 1019 bool ret = false; 1020 unsigned int count = ruleSet.len; 1021 for (unsigned int i = 0; i < count; i++) 1022 if (cov.intersects_coverage (c->glyphs, i)) { 1023 const ChainRuleSet &rule_set = this+ruleSet[i]; 1024 ret = rule_set.closure (c, lookup_context) || ret; 1025 } 1026 return ret; 1027 } 1028 1029 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 1030 { 1031 TRACE_APPLY (); 1032 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint); 1033 if (likely (index == NOT_COVERED)) 1034 return false; 1035 1036 const ChainRuleSet &rule_set = this+ruleSet[index]; 1037 struct ChainContextApplyLookupContext lookup_context = { 1038 {match_glyph, apply_func}, 1039 {NULL, NULL, NULL} 1040 }; 1041 return rule_set.apply (c, lookup_context); 1042 } 1043 1044 inline bool sanitize (hb_sanitize_context_t *c) { 1045 TRACE_SANITIZE (); 1046 return coverage.sanitize (c, this) 1047 && ruleSet.sanitize (c, this); 1048 } 1049 1050 private: 1051 USHORT format; /* Format identifier--format = 1 */ 1052 OffsetTo<Coverage> 1053 coverage; /* Offset to Coverage table--from 1054 * beginning of table */ 1055 OffsetArrayOf<ChainRuleSet> 1056 ruleSet; /* Array of ChainRuleSet tables 1057 * ordered by Coverage Index */ 1058 public: 1059 DEFINE_SIZE_ARRAY (6, ruleSet); 1060}; 1061 1062struct ChainContextFormat2 1063{ 1064 friend struct ChainContext; 1065 1066 private: 1067 1068 inline bool closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 1069 { 1070 TRACE_CLOSURE (); 1071 if (!(this+coverage).intersects (c->glyphs)) 1072 return false; 1073 1074 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1075 const ClassDef &input_class_def = this+inputClassDef; 1076 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1077 1078 struct ChainContextClosureLookupContext lookup_context = { 1079 {intersects_class, closure_func}, 1080 {&backtrack_class_def, 1081 &input_class_def, 1082 &lookahead_class_def} 1083 }; 1084 1085 bool ret = false; 1086 unsigned int count = ruleSet.len; 1087 for (unsigned int i = 0; i < count; i++) 1088 if (input_class_def.intersects_class (c->glyphs, i)) { 1089 const ChainRuleSet &rule_set = this+ruleSet[i]; 1090 ret = rule_set.closure (c, lookup_context) || ret; 1091 } 1092 return ret; 1093 } 1094 1095 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 1096 { 1097 TRACE_APPLY (); 1098 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint); 1099 if (likely (index == NOT_COVERED)) 1100 return false; 1101 1102 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1103 const ClassDef &input_class_def = this+inputClassDef; 1104 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1105 1106 index = input_class_def (c->buffer->info[c->buffer->idx].codepoint); 1107 const ChainRuleSet &rule_set = this+ruleSet[index]; 1108 struct ChainContextApplyLookupContext lookup_context = { 1109 {match_class, apply_func}, 1110 {&backtrack_class_def, 1111 &input_class_def, 1112 &lookahead_class_def} 1113 }; 1114 return rule_set.apply (c, lookup_context); 1115 } 1116 1117 inline bool sanitize (hb_sanitize_context_t *c) { 1118 TRACE_SANITIZE (); 1119 return coverage.sanitize (c, this) 1120 && backtrackClassDef.sanitize (c, this) 1121 && inputClassDef.sanitize (c, this) 1122 && lookaheadClassDef.sanitize (c, this) 1123 && ruleSet.sanitize (c, this); 1124 } 1125 1126 private: 1127 USHORT format; /* Format identifier--format = 2 */ 1128 OffsetTo<Coverage> 1129 coverage; /* Offset to Coverage table--from 1130 * beginning of table */ 1131 OffsetTo<ClassDef> 1132 backtrackClassDef; /* Offset to glyph ClassDef table 1133 * containing backtrack sequence 1134 * data--from beginning of table */ 1135 OffsetTo<ClassDef> 1136 inputClassDef; /* Offset to glyph ClassDef 1137 * table containing input sequence 1138 * data--from beginning of table */ 1139 OffsetTo<ClassDef> 1140 lookaheadClassDef; /* Offset to glyph ClassDef table 1141 * containing lookahead sequence 1142 * data--from beginning of table */ 1143 OffsetArrayOf<ChainRuleSet> 1144 ruleSet; /* Array of ChainRuleSet tables 1145 * ordered by class */ 1146 public: 1147 DEFINE_SIZE_ARRAY (12, ruleSet); 1148}; 1149 1150struct ChainContextFormat3 1151{ 1152 friend struct ChainContext; 1153 1154 private: 1155 1156 inline bool closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 1157 { 1158 TRACE_CLOSURE (); 1159 /* TODO FILLME */ 1160 return false; 1161 } 1162 1163 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 1164 { 1165 TRACE_APPLY (); 1166 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 1167 1168 unsigned int index = (this+input[0]) (c->buffer->info[c->buffer->idx].codepoint); 1169 if (likely (index == NOT_COVERED)) 1170 return false; 1171 1172 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 1173 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1174 struct ChainContextApplyLookupContext lookup_context = { 1175 {match_coverage, apply_func}, 1176 {this, this, this} 1177 }; 1178 return chain_context_apply_lookup (c, 1179 backtrack.len, (const USHORT *) backtrack.array, 1180 input.len, (const USHORT *) input.array + 1, 1181 lookahead.len, (const USHORT *) lookahead.array, 1182 lookup.len, lookup.array, 1183 lookup_context); 1184 } 1185 1186 inline bool sanitize (hb_sanitize_context_t *c) { 1187 TRACE_SANITIZE (); 1188 if (!backtrack.sanitize (c, this)) return false; 1189 OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 1190 if (!input.sanitize (c, this)) return false; 1191 OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 1192 if (!lookahead.sanitize (c, this)) return false; 1193 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1194 return lookup.sanitize (c); 1195 } 1196 1197 private: 1198 USHORT format; /* Format identifier--format = 3 */ 1199 OffsetArrayOf<Coverage> 1200 backtrack; /* Array of coverage tables 1201 * in backtracking sequence, in glyph 1202 * sequence order */ 1203 OffsetArrayOf<Coverage> 1204 inputX ; /* Array of coverage 1205 * tables in input sequence, in glyph 1206 * sequence order */ 1207 OffsetArrayOf<Coverage> 1208 lookaheadX; /* Array of coverage tables 1209 * in lookahead sequence, in glyph 1210 * sequence order */ 1211 ArrayOf<LookupRecord> 1212 lookupX; /* Array of LookupRecords--in 1213 * design order) */ 1214 public: 1215 DEFINE_SIZE_MIN (10); 1216}; 1217 1218struct ChainContext 1219{ 1220 protected: 1221 1222 inline bool closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const 1223 { 1224 TRACE_CLOSURE (); 1225 switch (u.format) { 1226 case 1: return u.format1.closure (c, closure_func); 1227 case 2: return u.format2.closure (c, closure_func); 1228 case 3: return u.format3.closure (c, closure_func); 1229 default:return false; 1230 } 1231 } 1232 1233 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const 1234 { 1235 TRACE_APPLY (); 1236 switch (u.format) { 1237 case 1: return u.format1.apply (c, apply_func); 1238 case 2: return u.format2.apply (c, apply_func); 1239 case 3: return u.format3.apply (c, apply_func); 1240 default:return false; 1241 } 1242 } 1243 1244 inline bool sanitize (hb_sanitize_context_t *c) { 1245 TRACE_SANITIZE (); 1246 if (!u.format.sanitize (c)) return false; 1247 switch (u.format) { 1248 case 1: return u.format1.sanitize (c); 1249 case 2: return u.format2.sanitize (c); 1250 case 3: return u.format3.sanitize (c); 1251 default:return true; 1252 } 1253 } 1254 1255 private: 1256 union { 1257 USHORT format; /* Format identifier */ 1258 ChainContextFormat1 format1; 1259 ChainContextFormat2 format2; 1260 ChainContextFormat3 format3; 1261 } u; 1262}; 1263 1264 1265struct ExtensionFormat1 1266{ 1267 friend struct Extension; 1268 1269 protected: 1270 inline unsigned int get_type (void) const { return extensionLookupType; } 1271 inline unsigned int get_offset (void) const { return extensionOffset; } 1272 1273 inline bool sanitize (hb_sanitize_context_t *c) { 1274 TRACE_SANITIZE (); 1275 return c->check_struct (this); 1276 } 1277 1278 private: 1279 USHORT format; /* Format identifier. Set to 1. */ 1280 USHORT extensionLookupType; /* Lookup type of subtable referenced 1281 * by ExtensionOffset (i.e. the 1282 * extension subtable). */ 1283 ULONG extensionOffset; /* Offset to the extension subtable, 1284 * of lookup type subtable. */ 1285 public: 1286 DEFINE_SIZE_STATIC (8); 1287}; 1288 1289struct Extension 1290{ 1291 inline unsigned int get_type (void) const 1292 { 1293 switch (u.format) { 1294 case 1: return u.format1.get_type (); 1295 default:return 0; 1296 } 1297 } 1298 inline unsigned int get_offset (void) const 1299 { 1300 switch (u.format) { 1301 case 1: return u.format1.get_offset (); 1302 default:return 0; 1303 } 1304 } 1305 1306 inline bool sanitize (hb_sanitize_context_t *c) { 1307 TRACE_SANITIZE (); 1308 if (!u.format.sanitize (c)) return false; 1309 switch (u.format) { 1310 case 1: return u.format1.sanitize (c); 1311 default:return true; 1312 } 1313 } 1314 1315 private: 1316 union { 1317 USHORT format; /* Format identifier */ 1318 ExtensionFormat1 format1; 1319 } u; 1320}; 1321 1322 1323/* 1324 * GSUB/GPOS Common 1325 */ 1326 1327struct GSUBGPOS 1328{ 1329 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB; 1330 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS; 1331 1332 inline unsigned int get_script_count (void) const 1333 { return (this+scriptList).len; } 1334 inline const Tag& get_script_tag (unsigned int i) const 1335 { return (this+scriptList).get_tag (i); } 1336 inline unsigned int get_script_tags (unsigned int start_offset, 1337 unsigned int *script_count /* IN/OUT */, 1338 hb_tag_t *script_tags /* OUT */) const 1339 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); } 1340 inline const Script& get_script (unsigned int i) const 1341 { return (this+scriptList)[i]; } 1342 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const 1343 { return (this+scriptList).find_index (tag, index); } 1344 1345 inline unsigned int get_feature_count (void) const 1346 { return (this+featureList).len; } 1347 inline const Tag& get_feature_tag (unsigned int i) const 1348 { return (this+featureList).get_tag (i); } 1349 inline unsigned int get_feature_tags (unsigned int start_offset, 1350 unsigned int *feature_count /* IN/OUT */, 1351 hb_tag_t *feature_tags /* OUT */) const 1352 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); } 1353 inline const Feature& get_feature (unsigned int i) const 1354 { return (this+featureList)[i]; } 1355 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const 1356 { return (this+featureList).find_index (tag, index); } 1357 1358 inline unsigned int get_lookup_count (void) const 1359 { return (this+lookupList).len; } 1360 inline const Lookup& get_lookup (unsigned int i) const 1361 { return (this+lookupList)[i]; } 1362 1363 inline bool sanitize (hb_sanitize_context_t *c) { 1364 TRACE_SANITIZE (); 1365 return version.sanitize (c) && likely (version.major == 1) 1366 && scriptList.sanitize (c, this) 1367 && featureList.sanitize (c, this) 1368 && lookupList.sanitize (c, this); 1369 } 1370 1371 protected: 1372 FixedVersion version; /* Version of the GSUB/GPOS table--initially set 1373 * to 0x00010000 */ 1374 OffsetTo<ScriptList> 1375 scriptList; /* ScriptList table */ 1376 OffsetTo<FeatureList> 1377 featureList; /* FeatureList table */ 1378 OffsetTo<LookupList> 1379 lookupList; /* LookupList table */ 1380 public: 1381 DEFINE_SIZE_STATIC (10); 1382}; 1383 1384 1385 1386#endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */ 1387