hb-ot-layout-gsubgpos-private.hh revision 60da763dfac96a7931d6e6bdef8b9973bd5209ab
1/*
2 * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3 * Copyright © 2010,2012  Google, Inc.
4 *
5 *  This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29#ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30#define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
31
32#include "hb-buffer-private.hh"
33#include "hb-ot-layout-gdef-table.hh"
34
35
36
37/* unique ligature id */
38/* component number in the ligature (0 = base) */
39static inline void
40set_lig_props (hb_glyph_info_t &info, unsigned int lig_id, unsigned int lig_comp)
41{
42  info.lig_props() = (lig_id << 4) | (lig_comp & 0x0F);
43}
44static inline unsigned int
45get_lig_id (hb_glyph_info_t &info)
46{
47  return info.lig_props() >> 4;
48}
49static inline unsigned int
50get_lig_comp (hb_glyph_info_t &info)
51{
52  return info.lig_props() & 0x0F;
53}
54
55static inline uint8_t allocate_lig_id (hb_buffer_t *buffer) {
56  uint8_t lig_id = buffer->next_serial () & 0x0F;
57  if (unlikely (!lig_id))
58    lig_id = allocate_lig_id (buffer); /* in case of overflow */
59  return lig_id;
60}
61
62
63
64#ifndef HB_DEBUG_CLOSURE
65#define HB_DEBUG_CLOSURE (HB_DEBUG+0)
66#endif
67
68#define TRACE_CLOSURE() \
69	hb_auto_trace_t<HB_DEBUG_CLOSURE> trace (&c->debug_depth, "CLOSURE", this, HB_FUNC, "");
70
71
72/* TODO Add TRACE_RETURN annotation for would_apply */
73
74
75struct hb_closure_context_t
76{
77  hb_face_t *face;
78  hb_set_t *glyphs;
79  unsigned int nesting_level_left;
80  unsigned int debug_depth;
81
82
83  hb_closure_context_t (hb_face_t *face_,
84			hb_set_t *glyphs_,
85		        unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
86			  face (face_), glyphs (glyphs_),
87			  nesting_level_left (nesting_level_left_),
88			  debug_depth (0) {}
89};
90
91
92
93#ifndef HB_DEBUG_APPLY
94#define HB_DEBUG_APPLY (HB_DEBUG+0)
95#endif
96
97#define TRACE_APPLY() \
98	hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", this, HB_FUNC, "idx %d codepoint %u", c->buffer->idx, c->buffer->cur().codepoint);
99
100
101
102struct hb_apply_context_t
103{
104  hb_font_t *font;
105  hb_face_t *face;
106  hb_buffer_t *buffer;
107  hb_direction_t direction;
108  hb_mask_t lookup_mask;
109  unsigned int nesting_level_left;
110  unsigned int lookup_props;
111  unsigned int property; /* propety of first glyph */
112  unsigned int debug_depth;
113  bool has_glyph_classes;
114
115
116  hb_apply_context_t (hb_font_t *font_,
117		      hb_face_t *face_,
118		      hb_buffer_t *buffer_,
119		      hb_mask_t lookup_mask_) :
120			font (font_), face (face_), buffer (buffer_),
121			direction (buffer_->props.direction),
122			lookup_mask (lookup_mask_),
123			nesting_level_left (MAX_NESTING_LEVEL),
124			lookup_props (0), property (0), debug_depth (0),
125			has_glyph_classes (hb_ot_layout_has_glyph_classes (face_)) {}
126
127  void set_lookup (const Lookup &l) {
128    lookup_props = l.get_props ();
129  }
130
131  struct mark_skipping_forward_iterator_t
132  {
133    inline mark_skipping_forward_iterator_t (hb_apply_context_t *c_,
134					     unsigned int start_index_,
135					     unsigned int num_items_,
136					     bool context_match = false)
137    {
138      c = c_;
139      idx = start_index_;
140      num_items = num_items_;
141      mask = context_match ? -1 : c->lookup_mask;
142      syllable = context_match ? 0 : c->buffer->cur().syllable ();
143      end = c->buffer->len;
144    }
145    inline bool has_no_chance (void) const
146    {
147      return unlikely (num_items && idx + num_items >= end);
148    }
149    inline void reject (void)
150    {
151      num_items++;
152    }
153    inline bool next (unsigned int *property_out,
154		      unsigned int lookup_props)
155    {
156      assert (num_items > 0);
157      do
158      {
159	if (has_no_chance ())
160	  return false;
161	idx++;
162      } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[idx], lookup_props, property_out));
163      num_items--;
164      return (c->buffer->info[idx].mask & mask) && (!syllable || syllable == c->buffer->info[idx].syllable ());
165    }
166    inline bool next (unsigned int *property_out = NULL)
167    {
168      return next (property_out, c->lookup_props);
169    }
170
171    unsigned int idx;
172    private:
173    hb_apply_context_t *c;
174    unsigned int num_items;
175    hb_mask_t mask;
176    uint8_t syllable;
177    unsigned int end;
178  };
179
180  struct mark_skipping_backward_iterator_t
181  {
182    inline mark_skipping_backward_iterator_t (hb_apply_context_t *c_,
183					      unsigned int start_index_,
184					      unsigned int num_items_,
185					      hb_mask_t mask_ = 0,
186					      bool match_syllable_ = true)
187    {
188      c = c_;
189      idx = start_index_;
190      num_items = num_items_;
191      mask = mask_ ? mask_ : c->lookup_mask;
192      syllable = match_syllable_ ? c->buffer->cur().syllable () : 0;
193    }
194    inline bool has_no_chance (void) const
195    {
196      return unlikely (idx < num_items);
197    }
198    inline void reject (void)
199    {
200      num_items++;
201    }
202    inline bool prev (unsigned int *property_out,
203		      unsigned int lookup_props)
204    {
205      assert (num_items > 0);
206      do
207      {
208	if (has_no_chance ())
209	  return false;
210	idx--;
211      } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->out_info[idx], lookup_props, property_out));
212      num_items--;
213      return (c->buffer->out_info[idx].mask & mask) && (!syllable || syllable == c->buffer->out_info[idx].syllable ());
214    }
215    inline bool prev (unsigned int *property_out = NULL)
216    {
217      return prev (property_out, c->lookup_props);
218    }
219
220    unsigned int idx;
221    private:
222    hb_apply_context_t *c;
223    unsigned int num_items;
224    hb_mask_t mask;
225    uint8_t syllable;
226  };
227
228  inline bool should_mark_skip_current_glyph (void) const
229  {
230    unsigned int property;
231    return _hb_ot_layout_skip_mark (face, &buffer->cur(), lookup_props, &property);
232  }
233
234  inline void set_klass_guess (unsigned int klass_guess) const
235  {
236    buffer->cur().props_cache() = has_glyph_classes ? 0 : klass_guess;
237  }
238
239  inline void output_glyph (hb_codepoint_t glyph_index,
240			    unsigned int klass_guess = 0) const
241  {
242    set_klass_guess (klass_guess);
243    buffer->output_glyph (glyph_index);
244  }
245  inline void replace_glyph (hb_codepoint_t glyph_index,
246			     unsigned int klass_guess = 0) const
247  {
248    set_klass_guess (klass_guess);
249    buffer->replace_glyph (glyph_index);
250  }
251  inline void replace_glyphs (unsigned int num_in,
252			      unsigned int num_out,
253			      hb_codepoint_t *glyph_data,
254			      unsigned int klass_guess = 0) const
255  {
256    set_klass_guess (klass_guess);
257    buffer->replace_glyphs (num_in, num_out, glyph_data);
258  }
259};
260
261
262
263typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
264typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
265typedef void (*closure_lookup_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
266typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
267
268struct ContextClosureFuncs
269{
270  intersects_func_t intersects;
271  closure_lookup_func_t closure;
272};
273struct ContextApplyFuncs
274{
275  match_func_t match;
276  apply_lookup_func_t apply;
277};
278
279static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
280{
281  return glyphs->has (value);
282}
283static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data)
284{
285  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
286  return class_def.intersects_class (glyphs, value);
287}
288static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
289{
290  const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
291  return (data+coverage).intersects (glyphs);
292}
293
294static inline bool intersects_array (hb_closure_context_t *c,
295				     unsigned int count,
296				     const USHORT values[],
297				     intersects_func_t intersects_func,
298				     const void *intersects_data)
299{
300  for (unsigned int i = 0; i < count; i++)
301    if (likely (!intersects_func (c->glyphs, values[i], intersects_data)))
302      return false;
303  return true;
304}
305
306
307static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
308{
309  return glyph_id == value;
310}
311static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
312{
313  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
314  return class_def.get_class (glyph_id) == value;
315}
316static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
317{
318  const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
319  return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
320}
321
322
323static inline bool match_input (hb_apply_context_t *c,
324				unsigned int count, /* Including the first glyph (not matched) */
325				const USHORT input[], /* Array of input values--start with second glyph */
326				match_func_t match_func,
327				const void *match_data,
328				unsigned int *end_offset = NULL)
329{
330  hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1);
331  if (skippy_iter.has_no_chance ())
332    return false;
333
334  for (unsigned int i = 1; i < count; i++)
335  {
336    if (!skippy_iter.next ())
337      return false;
338
339    if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, input[i - 1], match_data)))
340      return false;
341  }
342
343  if (end_offset)
344    *end_offset = skippy_iter.idx - c->buffer->idx + 1;
345
346  return true;
347}
348
349static inline bool match_backtrack (hb_apply_context_t *c,
350				    unsigned int count,
351				    const USHORT backtrack[],
352				    match_func_t match_func,
353				    const void *match_data)
354{
355  hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count, true);
356  if (skippy_iter.has_no_chance ())
357    return false;
358
359  for (unsigned int i = 0; i < count; i++)
360  {
361    if (!skippy_iter.prev ())
362      return false;
363
364    if (likely (!match_func (c->buffer->out_info[skippy_iter.idx].codepoint, backtrack[i], match_data)))
365      return false;
366  }
367
368  return true;
369}
370
371static inline bool match_lookahead (hb_apply_context_t *c,
372				    unsigned int count,
373				    const USHORT lookahead[],
374				    match_func_t match_func,
375				    const void *match_data,
376				    unsigned int offset)
377{
378  hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count, true);
379  if (skippy_iter.has_no_chance ())
380    return false;
381
382  for (unsigned int i = 0; i < count; i++)
383  {
384    if (!skippy_iter.next ())
385      return false;
386
387    if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, lookahead[i], match_data)))
388      return false;
389  }
390
391  return true;
392}
393
394
395
396struct LookupRecord
397{
398  inline bool sanitize (hb_sanitize_context_t *c) {
399    TRACE_SANITIZE ();
400    return TRACE_RETURN (c->check_struct (this));
401  }
402
403  USHORT	sequenceIndex;		/* Index into current glyph
404					 * sequence--first glyph = 0 */
405  USHORT	lookupListIndex;	/* Lookup to apply to that
406					 * position--zero--based */
407  public:
408  DEFINE_SIZE_STATIC (4);
409};
410
411
412static inline void closure_lookup (hb_closure_context_t *c,
413				   unsigned int lookupCount,
414				   const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
415				   closure_lookup_func_t closure_func)
416{
417  for (unsigned int i = 0; i < lookupCount; i++)
418    closure_func (c, lookupRecord->lookupListIndex);
419}
420
421static inline bool apply_lookup (hb_apply_context_t *c,
422				 unsigned int count, /* Including the first glyph */
423				 unsigned int lookupCount,
424				 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
425				 apply_lookup_func_t apply_func)
426{
427  unsigned int end = c->buffer->len;
428  if (unlikely (count == 0 || c->buffer->idx + count > end))
429    return false;
430
431  /* TODO We don't support lookupRecord arrays that are not increasing:
432   *      Should be easy for in_place ones at least. */
433
434  /* Note: If sublookup is reverse, it will underflow after the first loop
435   * and we jump out of it.  Not entirely disastrous.  So we don't check
436   * for reverse lookup here.
437   */
438  for (unsigned int i = 0; i < count; /* NOP */)
439  {
440    if (unlikely (c->buffer->idx == end))
441      return true;
442    while (c->should_mark_skip_current_glyph ())
443    {
444      /* No lookup applied for this index */
445      c->buffer->next_glyph ();
446      if (unlikely (c->buffer->idx == end))
447	return true;
448    }
449
450    if (lookupCount && i == lookupRecord->sequenceIndex)
451    {
452      unsigned int old_pos = c->buffer->idx;
453
454      /* Apply a lookup */
455      bool done = apply_func (c, lookupRecord->lookupListIndex);
456
457      lookupRecord++;
458      lookupCount--;
459      /* Err, this is wrong if the lookup jumped over some glyphs */
460      i += c->buffer->idx - old_pos;
461      if (unlikely (c->buffer->idx == end))
462	return true;
463
464      if (!done)
465	goto not_applied;
466    }
467    else
468    {
469    not_applied:
470      /* No lookup applied for this index */
471      c->buffer->next_glyph ();
472      i++;
473    }
474  }
475
476  return true;
477}
478
479
480
481/* Contextual lookups */
482
483struct ContextClosureLookupContext
484{
485  ContextClosureFuncs funcs;
486  const void *intersects_data;
487};
488
489struct ContextApplyLookupContext
490{
491  ContextApplyFuncs funcs;
492  const void *match_data;
493};
494
495static inline void context_closure_lookup (hb_closure_context_t *c,
496					   unsigned int inputCount, /* Including the first glyph (not matched) */
497					   const USHORT input[], /* Array of input values--start with second glyph */
498					   unsigned int lookupCount,
499					   const LookupRecord lookupRecord[],
500					   ContextClosureLookupContext &lookup_context)
501{
502  if (intersects_array (c,
503			inputCount ? inputCount - 1 : 0, input,
504			lookup_context.funcs.intersects, lookup_context.intersects_data))
505    closure_lookup (c,
506		    lookupCount, lookupRecord,
507		    lookup_context.funcs.closure);
508}
509
510
511static inline bool context_apply_lookup (hb_apply_context_t *c,
512					 unsigned int inputCount, /* Including the first glyph (not matched) */
513					 const USHORT input[], /* Array of input values--start with second glyph */
514					 unsigned int lookupCount,
515					 const LookupRecord lookupRecord[],
516					 ContextApplyLookupContext &lookup_context)
517{
518  return match_input (c,
519		      inputCount, input,
520		      lookup_context.funcs.match, lookup_context.match_data)
521      && apply_lookup (c,
522		       inputCount,
523		       lookupCount, lookupRecord,
524		       lookup_context.funcs.apply);
525}
526
527struct Rule
528{
529  friend struct RuleSet;
530
531  private:
532
533  inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
534  {
535    TRACE_CLOSURE ();
536    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
537    context_closure_lookup (c,
538			    inputCount, input,
539			    lookupCount, lookupRecord,
540			    lookup_context);
541  }
542
543  inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
544  {
545    TRACE_APPLY ();
546    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
547    return TRACE_RETURN (context_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context));
548  }
549
550  public:
551  inline bool sanitize (hb_sanitize_context_t *c) {
552    TRACE_SANITIZE ();
553    return inputCount.sanitize (c)
554	&& lookupCount.sanitize (c)
555	&& c->check_range (input,
556			   input[0].static_size * inputCount
557			   + lookupRecordX[0].static_size * lookupCount);
558  }
559
560  private:
561  USHORT	inputCount;		/* Total number of glyphs in input
562					 * glyph sequence--includes the first
563					 * glyph */
564  USHORT	lookupCount;		/* Number of LookupRecords */
565  USHORT	input[VAR];		/* Array of match inputs--start with
566					 * second glyph */
567  LookupRecord	lookupRecordX[VAR];	/* Array of LookupRecords--in
568					 * design order */
569  public:
570  DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
571};
572
573struct RuleSet
574{
575  inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
576  {
577    TRACE_CLOSURE ();
578    unsigned int num_rules = rule.len;
579    for (unsigned int i = 0; i < num_rules; i++)
580      (this+rule[i]).closure (c, lookup_context);
581  }
582
583  inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
584  {
585    TRACE_APPLY ();
586    unsigned int num_rules = rule.len;
587    for (unsigned int i = 0; i < num_rules; i++)
588    {
589      if ((this+rule[i]).apply (c, lookup_context))
590        return TRACE_RETURN (true);
591    }
592    return TRACE_RETURN (false);
593  }
594
595  inline bool sanitize (hb_sanitize_context_t *c) {
596    TRACE_SANITIZE ();
597    return TRACE_RETURN (rule.sanitize (c, this));
598  }
599
600  private:
601  OffsetArrayOf<Rule>
602		rule;			/* Array of Rule tables
603					 * ordered by preference */
604  public:
605  DEFINE_SIZE_ARRAY (2, rule);
606};
607
608
609struct ContextFormat1
610{
611  friend struct Context;
612
613  private:
614
615  inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
616  {
617    TRACE_CLOSURE ();
618
619    const Coverage &cov = (this+coverage);
620
621    struct ContextClosureLookupContext lookup_context = {
622      {intersects_glyph, closure_func},
623      NULL
624    };
625
626    unsigned int count = ruleSet.len;
627    for (unsigned int i = 0; i < count; i++)
628      if (cov.intersects_coverage (c->glyphs, i)) {
629	const RuleSet &rule_set = this+ruleSet[i];
630	rule_set.closure (c, lookup_context);
631      }
632  }
633
634  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
635  {
636    TRACE_APPLY ();
637    unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
638    if (likely (index == NOT_COVERED))
639      return TRACE_RETURN (false);
640
641    const RuleSet &rule_set = this+ruleSet[index];
642    struct ContextApplyLookupContext lookup_context = {
643      {match_glyph, apply_func},
644      NULL
645    };
646    return TRACE_RETURN (rule_set.apply (c, lookup_context));
647  }
648
649  inline bool sanitize (hb_sanitize_context_t *c) {
650    TRACE_SANITIZE ();
651    return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
652  }
653
654  private:
655  USHORT	format;			/* Format identifier--format = 1 */
656  OffsetTo<Coverage>
657		coverage;		/* Offset to Coverage table--from
658					 * beginning of table */
659  OffsetArrayOf<RuleSet>
660		ruleSet;		/* Array of RuleSet tables
661					 * ordered by Coverage Index */
662  public:
663  DEFINE_SIZE_ARRAY (6, ruleSet);
664};
665
666
667struct ContextFormat2
668{
669  friend struct Context;
670
671  private:
672
673  inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
674  {
675    TRACE_CLOSURE ();
676    if (!(this+coverage).intersects (c->glyphs))
677      return;
678
679    const ClassDef &class_def = this+classDef;
680
681    struct ContextClosureLookupContext lookup_context = {
682      {intersects_class, closure_func},
683      NULL
684    };
685
686    unsigned int count = ruleSet.len;
687    for (unsigned int i = 0; i < count; i++)
688      if (class_def.intersects_class (c->glyphs, i)) {
689	const RuleSet &rule_set = this+ruleSet[i];
690	rule_set.closure (c, lookup_context);
691      }
692  }
693
694  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
695  {
696    TRACE_APPLY ();
697    unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
698    if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
699
700    const ClassDef &class_def = this+classDef;
701    index = class_def (c->buffer->cur().codepoint);
702    const RuleSet &rule_set = this+ruleSet[index];
703    struct ContextApplyLookupContext lookup_context = {
704      {match_class, apply_func},
705      &class_def
706    };
707    return TRACE_RETURN (rule_set.apply (c, lookup_context));
708  }
709
710  inline bool sanitize (hb_sanitize_context_t *c) {
711    TRACE_SANITIZE ();
712    return TRACE_RETURN (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
713  }
714
715  private:
716  USHORT	format;			/* Format identifier--format = 2 */
717  OffsetTo<Coverage>
718		coverage;		/* Offset to Coverage table--from
719					 * beginning of table */
720  OffsetTo<ClassDef>
721		classDef;		/* Offset to glyph ClassDef table--from
722					 * beginning of table */
723  OffsetArrayOf<RuleSet>
724		ruleSet;		/* Array of RuleSet tables
725					 * ordered by class */
726  public:
727  DEFINE_SIZE_ARRAY (8, ruleSet);
728};
729
730
731struct ContextFormat3
732{
733  friend struct Context;
734
735  private:
736
737  inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
738  {
739    TRACE_CLOSURE ();
740    if (!(this+coverage[0]).intersects (c->glyphs))
741      return;
742
743    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
744    struct ContextClosureLookupContext lookup_context = {
745      {intersects_coverage, closure_func},
746      this
747    };
748    context_closure_lookup (c,
749			    glyphCount, (const USHORT *) (coverage + 1),
750			    lookupCount, lookupRecord,
751			    lookup_context);
752  }
753
754  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
755  {
756    TRACE_APPLY ();
757    unsigned int index = (this+coverage[0]) (c->buffer->cur().codepoint);
758    if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
759
760    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
761    struct ContextApplyLookupContext lookup_context = {
762      {match_coverage, apply_func},
763      this
764    };
765    return TRACE_RETURN (context_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context));
766  }
767
768  inline bool sanitize (hb_sanitize_context_t *c) {
769    TRACE_SANITIZE ();
770    if (!c->check_struct (this)) return TRACE_RETURN (false);
771    unsigned int count = glyphCount;
772    if (!c->check_array (coverage, coverage[0].static_size, count)) return TRACE_RETURN (false);
773    for (unsigned int i = 0; i < count; i++)
774      if (!coverage[i].sanitize (c, this)) return TRACE_RETURN (false);
775    LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
776    return TRACE_RETURN (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount));
777  }
778
779  private:
780  USHORT	format;			/* Format identifier--format = 3 */
781  USHORT	glyphCount;		/* Number of glyphs in the input glyph
782					 * sequence */
783  USHORT	lookupCount;		/* Number of LookupRecords */
784  OffsetTo<Coverage>
785		coverage[VAR];		/* Array of offsets to Coverage
786					 * table in glyph sequence order */
787  LookupRecord	lookupRecordX[VAR];	/* Array of LookupRecords--in
788					 * design order */
789  public:
790  DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
791};
792
793struct Context
794{
795  protected:
796
797  inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
798  {
799    TRACE_CLOSURE ();
800    switch (u.format) {
801    case 1: u.format1.closure (c, closure_func); break;
802    case 2: u.format2.closure (c, closure_func); break;
803    case 3: u.format3.closure (c, closure_func); break;
804    default:                                     break;
805    }
806  }
807
808  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
809  {
810    TRACE_APPLY ();
811    switch (u.format) {
812    case 1: return TRACE_RETURN (u.format1.apply (c, apply_func));
813    case 2: return TRACE_RETURN (u.format2.apply (c, apply_func));
814    case 3: return TRACE_RETURN (u.format3.apply (c, apply_func));
815    default:return TRACE_RETURN (false);
816    }
817  }
818
819  inline bool sanitize (hb_sanitize_context_t *c) {
820    TRACE_SANITIZE ();
821    if (!u.format.sanitize (c)) return TRACE_RETURN (false);
822    switch (u.format) {
823    case 1: return TRACE_RETURN (u.format1.sanitize (c));
824    case 2: return TRACE_RETURN (u.format2.sanitize (c));
825    case 3: return TRACE_RETURN (u.format3.sanitize (c));
826    default:return TRACE_RETURN (true);
827    }
828  }
829
830  private:
831  union {
832  USHORT		format;		/* Format identifier */
833  ContextFormat1	format1;
834  ContextFormat2	format2;
835  ContextFormat3	format3;
836  } u;
837};
838
839
840/* Chaining Contextual lookups */
841
842struct ChainContextClosureLookupContext
843{
844  ContextClosureFuncs funcs;
845  const void *intersects_data[3];
846};
847
848struct ChainContextApplyLookupContext
849{
850  ContextApplyFuncs funcs;
851  const void *match_data[3];
852};
853
854static inline void chain_context_closure_lookup (hb_closure_context_t *c,
855						 unsigned int backtrackCount,
856						 const USHORT backtrack[],
857						 unsigned int inputCount, /* Including the first glyph (not matched) */
858						 const USHORT input[], /* Array of input values--start with second glyph */
859						 unsigned int lookaheadCount,
860						 const USHORT lookahead[],
861						 unsigned int lookupCount,
862						 const LookupRecord lookupRecord[],
863						 ChainContextClosureLookupContext &lookup_context)
864{
865  if (intersects_array (c,
866			backtrackCount, backtrack,
867			lookup_context.funcs.intersects, lookup_context.intersects_data[0])
868   && intersects_array (c,
869			inputCount ? inputCount - 1 : 0, input,
870			lookup_context.funcs.intersects, lookup_context.intersects_data[1])
871  && intersects_array (c,
872		       lookaheadCount, lookahead,
873		       lookup_context.funcs.intersects, lookup_context.intersects_data[2]))
874    closure_lookup (c,
875		    lookupCount, lookupRecord,
876		    lookup_context.funcs.closure);
877}
878
879static inline bool chain_context_apply_lookup (hb_apply_context_t *c,
880					       unsigned int backtrackCount,
881					       const USHORT backtrack[],
882					       unsigned int inputCount, /* Including the first glyph (not matched) */
883					       const USHORT input[], /* Array of input values--start with second glyph */
884					       unsigned int lookaheadCount,
885					       const USHORT lookahead[],
886					       unsigned int lookupCount,
887					       const LookupRecord lookupRecord[],
888					       ChainContextApplyLookupContext &lookup_context)
889{
890  unsigned int lookahead_offset;
891  return match_input (c,
892		      inputCount, input,
893		      lookup_context.funcs.match, lookup_context.match_data[1],
894		      &lookahead_offset)
895      && match_backtrack (c,
896			  backtrackCount, backtrack,
897			  lookup_context.funcs.match, lookup_context.match_data[0])
898      && match_lookahead (c,
899			  lookaheadCount, lookahead,
900			  lookup_context.funcs.match, lookup_context.match_data[2],
901			  lookahead_offset)
902      && apply_lookup (c,
903		       inputCount,
904		       lookupCount, lookupRecord,
905		       lookup_context.funcs.apply);
906}
907
908struct ChainRule
909{
910  friend struct ChainRuleSet;
911
912  private:
913
914  inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
915  {
916    TRACE_CLOSURE ();
917    const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
918    const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
919    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
920    chain_context_closure_lookup (c,
921				  backtrack.len, backtrack.array,
922				  input.len, input.array,
923				  lookahead.len, lookahead.array,
924				  lookup.len, lookup.array,
925				  lookup_context);
926  }
927
928  inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
929  {
930    TRACE_APPLY ();
931    const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
932    const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
933    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
934    return TRACE_RETURN (chain_context_apply_lookup (c,
935						     backtrack.len, backtrack.array,
936						     input.len, input.array,
937						     lookahead.len, lookahead.array, lookup.len,
938						     lookup.array, lookup_context));
939  }
940
941  public:
942  inline bool sanitize (hb_sanitize_context_t *c) {
943    TRACE_SANITIZE ();
944    if (!backtrack.sanitize (c)) return TRACE_RETURN (false);
945    HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
946    if (!input.sanitize (c)) return TRACE_RETURN (false);
947    ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
948    if (!lookahead.sanitize (c)) return TRACE_RETURN (false);
949    ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
950    return TRACE_RETURN (lookup.sanitize (c));
951  }
952
953  private:
954  ArrayOf<USHORT>
955		backtrack;		/* Array of backtracking values
956					 * (to be matched before the input
957					 * sequence) */
958  HeadlessArrayOf<USHORT>
959		inputX;			/* Array of input values (start with
960					 * second glyph) */
961  ArrayOf<USHORT>
962		lookaheadX;		/* Array of lookahead values's (to be
963					 * matched after the input sequence) */
964  ArrayOf<LookupRecord>
965		lookupX;		/* Array of LookupRecords--in
966					 * design order) */
967  public:
968  DEFINE_SIZE_MIN (8);
969};
970
971struct ChainRuleSet
972{
973  inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
974  {
975    TRACE_CLOSURE ();
976    unsigned int num_rules = rule.len;
977    for (unsigned int i = 0; i < num_rules; i++)
978      (this+rule[i]).closure (c, lookup_context);
979  }
980
981  inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
982  {
983    TRACE_APPLY ();
984    unsigned int num_rules = rule.len;
985    for (unsigned int i = 0; i < num_rules; i++)
986      if ((this+rule[i]).apply (c, lookup_context))
987        return TRACE_RETURN (true);
988
989    return TRACE_RETURN (false);
990  }
991
992  inline bool sanitize (hb_sanitize_context_t *c) {
993    TRACE_SANITIZE ();
994    return TRACE_RETURN (rule.sanitize (c, this));
995  }
996
997  private:
998  OffsetArrayOf<ChainRule>
999		rule;			/* Array of ChainRule tables
1000					 * ordered by preference */
1001  public:
1002  DEFINE_SIZE_ARRAY (2, rule);
1003};
1004
1005struct ChainContextFormat1
1006{
1007  friend struct ChainContext;
1008
1009  private:
1010
1011  inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1012  {
1013    TRACE_CLOSURE ();
1014    const Coverage &cov = (this+coverage);
1015
1016    struct ChainContextClosureLookupContext lookup_context = {
1017      {intersects_glyph, closure_func},
1018      {NULL, NULL, NULL}
1019    };
1020
1021    unsigned int count = ruleSet.len;
1022    for (unsigned int i = 0; i < count; i++)
1023      if (cov.intersects_coverage (c->glyphs, i)) {
1024	const ChainRuleSet &rule_set = this+ruleSet[i];
1025	rule_set.closure (c, lookup_context);
1026      }
1027  }
1028
1029  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1030  {
1031    TRACE_APPLY ();
1032    unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
1033    if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1034
1035    const ChainRuleSet &rule_set = this+ruleSet[index];
1036    struct ChainContextApplyLookupContext lookup_context = {
1037      {match_glyph, apply_func},
1038      {NULL, NULL, NULL}
1039    };
1040    return TRACE_RETURN (rule_set.apply (c, lookup_context));
1041  }
1042
1043  inline bool sanitize (hb_sanitize_context_t *c) {
1044    TRACE_SANITIZE ();
1045    return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1046  }
1047
1048  private:
1049  USHORT	format;			/* Format identifier--format = 1 */
1050  OffsetTo<Coverage>
1051		coverage;		/* Offset to Coverage table--from
1052					 * beginning of table */
1053  OffsetArrayOf<ChainRuleSet>
1054		ruleSet;		/* Array of ChainRuleSet tables
1055					 * ordered by Coverage Index */
1056  public:
1057  DEFINE_SIZE_ARRAY (6, ruleSet);
1058};
1059
1060struct ChainContextFormat2
1061{
1062  friend struct ChainContext;
1063
1064  private:
1065
1066  inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1067  {
1068    TRACE_CLOSURE ();
1069    if (!(this+coverage).intersects (c->glyphs))
1070      return;
1071
1072    const ClassDef &backtrack_class_def = this+backtrackClassDef;
1073    const ClassDef &input_class_def = this+inputClassDef;
1074    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1075
1076    struct ChainContextClosureLookupContext lookup_context = {
1077      {intersects_class, closure_func},
1078      {&backtrack_class_def,
1079       &input_class_def,
1080       &lookahead_class_def}
1081    };
1082
1083    unsigned int count = ruleSet.len;
1084    for (unsigned int i = 0; i < count; i++)
1085      if (input_class_def.intersects_class (c->glyphs, i)) {
1086	const ChainRuleSet &rule_set = this+ruleSet[i];
1087	rule_set.closure (c, lookup_context);
1088      }
1089  }
1090
1091  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1092  {
1093    TRACE_APPLY ();
1094    unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
1095    if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1096
1097    const ClassDef &backtrack_class_def = this+backtrackClassDef;
1098    const ClassDef &input_class_def = this+inputClassDef;
1099    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1100
1101    index = input_class_def (c->buffer->cur().codepoint);
1102    const ChainRuleSet &rule_set = this+ruleSet[index];
1103    struct ChainContextApplyLookupContext lookup_context = {
1104      {match_class, apply_func},
1105      {&backtrack_class_def,
1106       &input_class_def,
1107       &lookahead_class_def}
1108    };
1109    return TRACE_RETURN (rule_set.apply (c, lookup_context));
1110  }
1111
1112  inline bool sanitize (hb_sanitize_context_t *c) {
1113    TRACE_SANITIZE ();
1114    return TRACE_RETURN (coverage.sanitize (c, this) && backtrackClassDef.sanitize (c, this) &&
1115			 inputClassDef.sanitize (c, this) && lookaheadClassDef.sanitize (c, this) &&
1116			 ruleSet.sanitize (c, this));
1117  }
1118
1119  private:
1120  USHORT	format;			/* Format identifier--format = 2 */
1121  OffsetTo<Coverage>
1122		coverage;		/* Offset to Coverage table--from
1123					 * beginning of table */
1124  OffsetTo<ClassDef>
1125		backtrackClassDef;	/* Offset to glyph ClassDef table
1126					 * containing backtrack sequence
1127					 * data--from beginning of table */
1128  OffsetTo<ClassDef>
1129		inputClassDef;		/* Offset to glyph ClassDef
1130					 * table containing input sequence
1131					 * data--from beginning of table */
1132  OffsetTo<ClassDef>
1133		lookaheadClassDef;	/* Offset to glyph ClassDef table
1134					 * containing lookahead sequence
1135					 * data--from beginning of table */
1136  OffsetArrayOf<ChainRuleSet>
1137		ruleSet;		/* Array of ChainRuleSet tables
1138					 * ordered by class */
1139  public:
1140  DEFINE_SIZE_ARRAY (12, ruleSet);
1141};
1142
1143struct ChainContextFormat3
1144{
1145  friend struct ChainContext;
1146
1147  private:
1148
1149  inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1150  {
1151    TRACE_CLOSURE ();
1152    const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1153
1154    if (!(this+input[0]).intersects (c->glyphs))
1155      return;
1156
1157    const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1158    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1159    struct ChainContextClosureLookupContext lookup_context = {
1160      {intersects_coverage, closure_func},
1161      {this, this, this}
1162    };
1163    chain_context_closure_lookup (c,
1164				  backtrack.len, (const USHORT *) backtrack.array,
1165				  input.len, (const USHORT *) input.array + 1,
1166				  lookahead.len, (const USHORT *) lookahead.array,
1167				  lookup.len, lookup.array,
1168				  lookup_context);
1169  }
1170
1171  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1172  {
1173    TRACE_APPLY ();
1174    const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1175
1176    unsigned int index = (this+input[0]) (c->buffer->cur().codepoint);
1177    if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1178
1179    const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1180    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1181    struct ChainContextApplyLookupContext lookup_context = {
1182      {match_coverage, apply_func},
1183      {this, this, this}
1184    };
1185    return TRACE_RETURN (chain_context_apply_lookup (c,
1186						     backtrack.len, (const USHORT *) backtrack.array,
1187						     input.len, (const USHORT *) input.array + 1,
1188						     lookahead.len, (const USHORT *) lookahead.array,
1189						     lookup.len, lookup.array, lookup_context));
1190  }
1191
1192  inline bool sanitize (hb_sanitize_context_t *c) {
1193    TRACE_SANITIZE ();
1194    if (!backtrack.sanitize (c, this)) return TRACE_RETURN (false);
1195    OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1196    if (!input.sanitize (c, this)) return TRACE_RETURN (false);
1197    OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1198    if (!lookahead.sanitize (c, this)) return TRACE_RETURN (false);
1199    ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1200    return TRACE_RETURN (lookup.sanitize (c));
1201  }
1202
1203  private:
1204  USHORT	format;			/* Format identifier--format = 3 */
1205  OffsetArrayOf<Coverage>
1206		backtrack;		/* Array of coverage tables
1207					 * in backtracking sequence, in  glyph
1208					 * sequence order */
1209  OffsetArrayOf<Coverage>
1210		inputX		;	/* Array of coverage
1211					 * tables in input sequence, in glyph
1212					 * sequence order */
1213  OffsetArrayOf<Coverage>
1214		lookaheadX;		/* Array of coverage tables
1215					 * in lookahead sequence, in glyph
1216					 * sequence order */
1217  ArrayOf<LookupRecord>
1218		lookupX;		/* Array of LookupRecords--in
1219					 * design order) */
1220  public:
1221  DEFINE_SIZE_MIN (10);
1222};
1223
1224struct ChainContext
1225{
1226  protected:
1227
1228  inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1229  {
1230    TRACE_CLOSURE ();
1231    switch (u.format) {
1232    case 1: u.format1.closure (c, closure_func); break;
1233    case 2: u.format2.closure (c, closure_func); break;
1234    case 3: u.format3.closure (c, closure_func); break;
1235    default:                                     break;
1236    }
1237  }
1238
1239  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1240  {
1241    TRACE_APPLY ();
1242    switch (u.format) {
1243    case 1: return TRACE_RETURN (u.format1.apply (c, apply_func));
1244    case 2: return TRACE_RETURN (u.format2.apply (c, apply_func));
1245    case 3: return TRACE_RETURN (u.format3.apply (c, apply_func));
1246    default:return TRACE_RETURN (false);
1247    }
1248  }
1249
1250  inline bool sanitize (hb_sanitize_context_t *c) {
1251    TRACE_SANITIZE ();
1252    if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1253    switch (u.format) {
1254    case 1: return TRACE_RETURN (u.format1.sanitize (c));
1255    case 2: return TRACE_RETURN (u.format2.sanitize (c));
1256    case 3: return TRACE_RETURN (u.format3.sanitize (c));
1257    default:return TRACE_RETURN (true);
1258    }
1259  }
1260
1261  private:
1262  union {
1263  USHORT		format;	/* Format identifier */
1264  ChainContextFormat1	format1;
1265  ChainContextFormat2	format2;
1266  ChainContextFormat3	format3;
1267  } u;
1268};
1269
1270
1271struct ExtensionFormat1
1272{
1273  friend struct Extension;
1274
1275  protected:
1276  inline unsigned int get_type (void) const { return extensionLookupType; }
1277  inline unsigned int get_offset (void) const { return extensionOffset; }
1278
1279  inline bool sanitize (hb_sanitize_context_t *c) {
1280    TRACE_SANITIZE ();
1281    return TRACE_RETURN (c->check_struct (this));
1282  }
1283
1284  private:
1285  USHORT	format;			/* Format identifier. Set to 1. */
1286  USHORT	extensionLookupType;	/* Lookup type of subtable referenced
1287					 * by ExtensionOffset (i.e. the
1288					 * extension subtable). */
1289  ULONG		extensionOffset;	/* Offset to the extension subtable,
1290					 * of lookup type subtable. */
1291  public:
1292  DEFINE_SIZE_STATIC (8);
1293};
1294
1295struct Extension
1296{
1297  inline unsigned int get_type (void) const
1298  {
1299    switch (u.format) {
1300    case 1: return u.format1.get_type ();
1301    default:return 0;
1302    }
1303  }
1304  inline unsigned int get_offset (void) const
1305  {
1306    switch (u.format) {
1307    case 1: return u.format1.get_offset ();
1308    default:return 0;
1309    }
1310  }
1311
1312  inline bool sanitize (hb_sanitize_context_t *c) {
1313    TRACE_SANITIZE ();
1314    if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1315    switch (u.format) {
1316    case 1: return TRACE_RETURN (u.format1.sanitize (c));
1317    default:return TRACE_RETURN (true);
1318    }
1319  }
1320
1321  private:
1322  union {
1323  USHORT		format;		/* Format identifier */
1324  ExtensionFormat1	format1;
1325  } u;
1326};
1327
1328
1329/*
1330 * GSUB/GPOS Common
1331 */
1332
1333struct GSUBGPOS
1334{
1335  static const hb_tag_t GSUBTag	= HB_OT_TAG_GSUB;
1336  static const hb_tag_t GPOSTag	= HB_OT_TAG_GPOS;
1337
1338  inline unsigned int get_script_count (void) const
1339  { return (this+scriptList).len; }
1340  inline const Tag& get_script_tag (unsigned int i) const
1341  { return (this+scriptList).get_tag (i); }
1342  inline unsigned int get_script_tags (unsigned int start_offset,
1343				       unsigned int *script_count /* IN/OUT */,
1344				       hb_tag_t     *script_tags /* OUT */) const
1345  { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
1346  inline const Script& get_script (unsigned int i) const
1347  { return (this+scriptList)[i]; }
1348  inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
1349  { return (this+scriptList).find_index (tag, index); }
1350
1351  inline unsigned int get_feature_count (void) const
1352  { return (this+featureList).len; }
1353  inline const Tag& get_feature_tag (unsigned int i) const
1354  { return (this+featureList).get_tag (i); }
1355  inline unsigned int get_feature_tags (unsigned int start_offset,
1356					unsigned int *feature_count /* IN/OUT */,
1357					hb_tag_t     *feature_tags /* OUT */) const
1358  { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
1359  inline const Feature& get_feature (unsigned int i) const
1360  { return (this+featureList)[i]; }
1361  inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
1362  { return (this+featureList).find_index (tag, index); }
1363
1364  inline unsigned int get_lookup_count (void) const
1365  { return (this+lookupList).len; }
1366  inline const Lookup& get_lookup (unsigned int i) const
1367  { return (this+lookupList)[i]; }
1368
1369  inline bool sanitize (hb_sanitize_context_t *c) {
1370    TRACE_SANITIZE ();
1371    return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) &&
1372			 scriptList.sanitize (c, this) &&
1373			 featureList.sanitize (c, this) &&
1374			 lookupList.sanitize (c, this));
1375  }
1376
1377  protected:
1378  FixedVersion	version;	/* Version of the GSUB/GPOS table--initially set
1379				 * to 0x00010000 */
1380  OffsetTo<ScriptList>
1381		scriptList;  	/* ScriptList table */
1382  OffsetTo<FeatureList>
1383		featureList; 	/* FeatureList table */
1384  OffsetTo<LookupList>
1385		lookupList; 	/* LookupList table */
1386  public:
1387  DEFINE_SIZE_STATIC (10);
1388};
1389
1390
1391
1392#endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */
1393