hb-ot-layout-gsubgpos-private.hh revision 03408ce73d003ed4e58e3f8472f9445e72b86bee
1/*
2 * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3 * Copyright © 2010  Google, Inc.
4 *
5 *  This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29#ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30#define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
31
32#include "hb-buffer-private.hh"
33#include "hb-ot-layout-gdef-table.hh"
34
35
36/* buffer var allocations */
37#define lig_id() var2.u8[2] /* unique ligature id */
38#define lig_comp() var2.u8[3] /* component number in the ligature (0 = base) */
39
40static inline uint8_t allocate_lig_id (hb_buffer_t *buffer) {
41  uint8_t lig_id = buffer->next_serial ();
42  if (unlikely (!lig_id)) lig_id = buffer->next_serial (); /* in case of overflow */
43  return lig_id;
44}
45
46
47
48#ifndef HB_DEBUG_APPLY
49#define HB_DEBUG_APPLY (HB_DEBUG+0)
50#endif
51
52#define TRACE_APPLY() \
53	hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", this, NULL, HB_FUNC);
54
55
56
57struct hb_apply_context_t
58{
59  unsigned int debug_depth;
60  hb_font_t *font;
61  hb_face_t *face;
62  hb_buffer_t *buffer;
63  hb_direction_t direction;
64  hb_mask_t lookup_mask;
65  unsigned int context_length;
66  unsigned int nesting_level_left;
67  unsigned int lookup_props;
68  unsigned int property; /* propety of first glyph */
69
70  struct mark_skipping_forward_iterator_t
71  {
72    inline mark_skipping_forward_iterator_t (hb_apply_context_t *c_,
73					     unsigned int start_index_,
74					     unsigned int num_items_)
75    {
76      c = c_;
77      idx = start_index_;
78      num_items = num_items_;
79      end = MIN (c->buffer->len, c->buffer->idx + c->context_length);
80    }
81    inline bool has_no_chance (void) const
82    {
83      return unlikely (num_items && idx + num_items >= end);
84    }
85    inline bool next (unsigned int *property_out,
86		      unsigned int lookup_props)
87    {
88      assert (num_items > 0);
89      do
90      {
91	if (has_no_chance ())
92	  return false;
93	idx++;
94      } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[idx], lookup_props, property_out));
95      num_items--;
96      return true;
97    }
98    inline bool next (unsigned int *property_out = NULL)
99    {
100      return next (property_out, c->lookup_props);
101    }
102
103    unsigned int idx;
104    private:
105    hb_apply_context_t *c;
106    unsigned int num_items;
107    unsigned int end;
108  };
109
110  struct mark_skipping_backward_iterator_t
111  {
112    inline mark_skipping_backward_iterator_t (hb_apply_context_t *c_,
113					      unsigned int start_index_,
114					      unsigned int num_items_)
115    {
116      c = c_;
117      idx = start_index_;
118      num_items = num_items_;
119    }
120    inline bool has_no_chance (void) const
121    {
122      return unlikely (idx < num_items);
123    }
124    inline bool prev (unsigned int *property_out,
125		      unsigned int lookup_props)
126    {
127      assert (num_items > 0);
128      do
129      {
130	if (has_no_chance ())
131	  return false;
132	idx--;
133      } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->out_info[idx], lookup_props, property_out));
134      num_items--;
135      return true;
136    }
137    inline bool prev (unsigned int *property_out = NULL)
138    {
139      return prev (property_out, c->lookup_props);
140    }
141
142    unsigned int idx;
143    private:
144    hb_apply_context_t *c;
145    unsigned int num_items;
146  };
147
148  inline bool should_mark_skip_current_glyph (void) const
149  {
150    return _hb_ot_layout_skip_mark (face, &buffer->info[buffer->idx], lookup_props, NULL);
151  }
152
153
154
155  inline void replace_glyph (hb_codepoint_t glyph_index) const
156  {
157    clear_property ();
158    buffer->replace_glyph (glyph_index);
159  }
160  inline void replace_glyphs_be16 (unsigned int num_in,
161				   unsigned int num_out,
162				   const uint16_t *glyph_data_be) const
163  {
164    clear_property ();
165    buffer->replace_glyphs_be16 (num_in, num_out, glyph_data_be);
166  }
167
168  inline void guess_glyph_class (unsigned int klass)
169  {
170    /* XXX if ! has gdef */
171    buffer->info[buffer->idx].props_cache() = klass;
172  }
173
174  private:
175  inline void clear_property (void) const
176  {
177    /* XXX if has gdef */
178    buffer->info[buffer->idx].props_cache() = 0;
179  }
180};
181
182
183
184typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
185typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
186
187struct ContextFuncs
188{
189  match_func_t match;
190  apply_lookup_func_t apply;
191};
192
193
194static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
195{
196  return glyph_id == value;
197}
198
199static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
200{
201  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
202  return class_def.get_class (glyph_id) == value;
203}
204
205static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
206{
207  const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
208  return (data+coverage) (glyph_id) != NOT_COVERED;
209}
210
211
212static inline bool match_input (hb_apply_context_t *c,
213				unsigned int count, /* Including the first glyph (not matched) */
214				const USHORT input[], /* Array of input values--start with second glyph */
215				match_func_t match_func,
216				const void *match_data,
217				unsigned int *context_length_out)
218{
219  hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1);
220  if (skippy_iter.has_no_chance ())
221    return false;
222
223  for (unsigned int i = 1; i < count; i++)
224  {
225    if (!skippy_iter.next ())
226      return false;
227
228    if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, input[i - 1], match_data)))
229      return false;
230  }
231
232  *context_length_out = skippy_iter.idx - c->buffer->idx + 1;
233
234  return true;
235}
236
237static inline bool match_backtrack (hb_apply_context_t *c,
238				    unsigned int count,
239				    const USHORT backtrack[],
240				    match_func_t match_func,
241				    const void *match_data)
242{
243  hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count);
244  if (skippy_iter.has_no_chance ())
245    return false;
246
247  for (unsigned int i = 0; i < count; i++)
248  {
249    if (!skippy_iter.prev ())
250      return false;
251
252    if (likely (!match_func (c->buffer->out_info[skippy_iter.idx].codepoint, backtrack[i], match_data)))
253      return false;
254  }
255
256  return true;
257}
258
259static inline bool match_lookahead (hb_apply_context_t *c,
260				    unsigned int count,
261				    const USHORT lookahead[],
262				    match_func_t match_func,
263				    const void *match_data,
264				    unsigned int offset)
265{
266  hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count);
267  if (skippy_iter.has_no_chance ())
268    return false;
269
270  for (unsigned int i = 0; i < count; i++)
271  {
272    if (!skippy_iter.next ())
273      return false;
274
275    if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, lookahead[i], match_data)))
276      return false;
277  }
278
279  return true;
280}
281
282
283
284struct LookupRecord
285{
286  inline bool sanitize (hb_sanitize_context_t *c) {
287    TRACE_SANITIZE ();
288    return c->check_struct (this);
289  }
290
291  USHORT	sequenceIndex;		/* Index into current glyph
292					 * sequence--first glyph = 0 */
293  USHORT	lookupListIndex;	/* Lookup to apply to that
294					 * position--zero--based */
295  public:
296  DEFINE_SIZE_STATIC (4);
297};
298
299
300
301static inline bool apply_lookup (hb_apply_context_t *c,
302				 unsigned int count, /* Including the first glyph */
303				 unsigned int lookupCount,
304				 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
305				 apply_lookup_func_t apply_func)
306{
307  unsigned int end = MIN (c->buffer->len, c->buffer->idx + c->context_length);
308  if (unlikely (count == 0 || c->buffer->idx + count > end))
309    return false;
310
311  /* TODO We don't support lookupRecord arrays that are not increasing:
312   *      Should be easy for in_place ones at least. */
313
314  /* Note: If sublookup is reverse, it will underflow after the first loop
315   * and we jump out of it.  Not entirely disastrous.  So we don't check
316   * for reverse lookup here.
317   */
318  for (unsigned int i = 0; i < count; /* NOP */)
319  {
320    if (unlikely (c->buffer->idx == end))
321      return true;
322    while (c->should_mark_skip_current_glyph ())
323    {
324      /* No lookup applied for this index */
325      c->buffer->next_glyph ();
326      if (unlikely (c->buffer->idx == end))
327	return true;
328    }
329
330    if (lookupCount && i == lookupRecord->sequenceIndex)
331    {
332      unsigned int old_pos = c->buffer->idx;
333
334      /* Apply a lookup */
335      bool done = apply_func (c, lookupRecord->lookupListIndex);
336
337      lookupRecord++;
338      lookupCount--;
339      /* Err, this is wrong if the lookup jumped over some glyphs */
340      i += c->buffer->idx - old_pos;
341      if (unlikely (c->buffer->idx == end))
342	return true;
343
344      if (!done)
345	goto not_applied;
346    }
347    else
348    {
349    not_applied:
350      /* No lookup applied for this index */
351      c->buffer->next_glyph ();
352      i++;
353    }
354  }
355
356  return true;
357}
358
359
360
361/* Contextual lookups */
362
363struct ContextLookupContext
364{
365  ContextFuncs funcs;
366  const void *match_data;
367};
368
369static inline bool context_lookup (hb_apply_context_t *c,
370				   unsigned int inputCount, /* Including the first glyph (not matched) */
371				   const USHORT input[], /* Array of input values--start with second glyph */
372				   unsigned int lookupCount,
373				   const LookupRecord lookupRecord[],
374				   ContextLookupContext &lookup_context)
375{
376  hb_apply_context_t new_context = *c;
377  return match_input (c,
378		      inputCount, input,
379		      lookup_context.funcs.match, lookup_context.match_data,
380		      &new_context.context_length)
381      && apply_lookup (&new_context,
382		       inputCount,
383		       lookupCount, lookupRecord,
384		       lookup_context.funcs.apply);
385}
386
387struct Rule
388{
389  friend struct RuleSet;
390
391  private:
392  inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
393  {
394    TRACE_APPLY ();
395    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
396    return context_lookup (c,
397			   inputCount, input,
398			   lookupCount, lookupRecord,
399			   lookup_context);
400  }
401
402  public:
403  inline bool sanitize (hb_sanitize_context_t *c) {
404    TRACE_SANITIZE ();
405    return inputCount.sanitize (c)
406	&& lookupCount.sanitize (c)
407	&& c->check_range (input,
408				 input[0].static_size * inputCount
409				 + lookupRecordX[0].static_size * lookupCount);
410  }
411
412  private:
413  USHORT	inputCount;		/* Total number of glyphs in input
414					 * glyph sequence--includes the  first
415					 * glyph */
416  USHORT	lookupCount;		/* Number of LookupRecords */
417  USHORT	input[VAR];		/* Array of match inputs--start with
418					 * second glyph */
419  LookupRecord	lookupRecordX[VAR];	/* Array of LookupRecords--in
420					 * design order */
421  public:
422  DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
423};
424
425struct RuleSet
426{
427  inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
428  {
429    TRACE_APPLY ();
430    unsigned int num_rules = rule.len;
431    for (unsigned int i = 0; i < num_rules; i++)
432    {
433      if ((this+rule[i]).apply (c, lookup_context))
434        return true;
435    }
436
437    return false;
438  }
439
440  inline bool sanitize (hb_sanitize_context_t *c) {
441    TRACE_SANITIZE ();
442    return rule.sanitize (c, this);
443  }
444
445  private:
446  OffsetArrayOf<Rule>
447		rule;			/* Array of Rule tables
448					 * ordered by preference */
449  public:
450  DEFINE_SIZE_ARRAY (2, rule);
451};
452
453
454struct ContextFormat1
455{
456  friend struct Context;
457
458  private:
459  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
460  {
461    TRACE_APPLY ();
462    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
463    if (likely (index == NOT_COVERED))
464      return false;
465
466    const RuleSet &rule_set = this+ruleSet[index];
467    struct ContextLookupContext lookup_context = {
468      {match_glyph, apply_func},
469      NULL
470    };
471    return rule_set.apply (c, lookup_context);
472  }
473
474  inline bool sanitize (hb_sanitize_context_t *c) {
475    TRACE_SANITIZE ();
476    return coverage.sanitize (c, this)
477	&& ruleSet.sanitize (c, this);
478  }
479
480  private:
481  USHORT	format;			/* Format identifier--format = 1 */
482  OffsetTo<Coverage>
483		coverage;		/* Offset to Coverage table--from
484					 * beginning of table */
485  OffsetArrayOf<RuleSet>
486		ruleSet;		/* Array of RuleSet tables
487					 * ordered by Coverage Index */
488  public:
489  DEFINE_SIZE_ARRAY (6, ruleSet);
490};
491
492
493struct ContextFormat2
494{
495  friend struct Context;
496
497  private:
498  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
499  {
500    TRACE_APPLY ();
501    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
502    if (likely (index == NOT_COVERED))
503      return false;
504
505    const ClassDef &class_def = this+classDef;
506    index = class_def (c->buffer->info[c->buffer->idx].codepoint);
507    const RuleSet &rule_set = this+ruleSet[index];
508    struct ContextLookupContext lookup_context = {
509      {match_class, apply_func},
510      &class_def
511    };
512    return rule_set.apply (c, lookup_context);
513  }
514
515  inline bool sanitize (hb_sanitize_context_t *c) {
516    TRACE_SANITIZE ();
517    return coverage.sanitize (c, this)
518        && classDef.sanitize (c, this)
519	&& ruleSet.sanitize (c, this);
520  }
521
522  private:
523  USHORT	format;			/* Format identifier--format = 2 */
524  OffsetTo<Coverage>
525		coverage;		/* Offset to Coverage table--from
526					 * beginning of table */
527  OffsetTo<ClassDef>
528		classDef;		/* Offset to glyph ClassDef table--from
529					 * beginning of table */
530  OffsetArrayOf<RuleSet>
531		ruleSet;		/* Array of RuleSet tables
532					 * ordered by class */
533  public:
534  DEFINE_SIZE_ARRAY (8, ruleSet);
535};
536
537
538struct ContextFormat3
539{
540  friend struct Context;
541
542  private:
543  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
544  {
545    TRACE_APPLY ();
546    unsigned int index = (this+coverage[0]) (c->buffer->info[c->buffer->idx].codepoint);
547    if (likely (index == NOT_COVERED))
548      return false;
549
550    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
551    struct ContextLookupContext lookup_context = {
552      {match_coverage, apply_func},
553      this
554    };
555    return context_lookup (c,
556			   glyphCount, (const USHORT *) (coverage + 1),
557			   lookupCount, lookupRecord,
558			   lookup_context);
559  }
560
561  inline bool sanitize (hb_sanitize_context_t *c) {
562    TRACE_SANITIZE ();
563    if (!c->check_struct (this)) return false;
564    unsigned int count = glyphCount;
565    if (!c->check_array (coverage, coverage[0].static_size, count)) return false;
566    for (unsigned int i = 0; i < count; i++)
567      if (!coverage[i].sanitize (c, this)) return false;
568    LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
569    return c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount);
570  }
571
572  private:
573  USHORT	format;			/* Format identifier--format = 3 */
574  USHORT	glyphCount;		/* Number of glyphs in the input glyph
575					 * sequence */
576  USHORT	lookupCount;		/* Number of LookupRecords */
577  OffsetTo<Coverage>
578		coverage[VAR];		/* Array of offsets to Coverage
579					 * table in glyph sequence order */
580  LookupRecord	lookupRecordX[VAR];	/* Array of LookupRecords--in
581					 * design order */
582  public:
583  DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
584};
585
586struct Context
587{
588  protected:
589  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
590  {
591    TRACE_APPLY ();
592    switch (u.format) {
593    case 1: return u.format1.apply (c, apply_func);
594    case 2: return u.format2.apply (c, apply_func);
595    case 3: return u.format3.apply (c, apply_func);
596    default:return false;
597    }
598  }
599
600  inline bool sanitize (hb_sanitize_context_t *c) {
601    TRACE_SANITIZE ();
602    if (!u.format.sanitize (c)) return false;
603    switch (u.format) {
604    case 1: return u.format1.sanitize (c);
605    case 2: return u.format2.sanitize (c);
606    case 3: return u.format3.sanitize (c);
607    default:return true;
608    }
609  }
610
611  private:
612  union {
613  USHORT		format;		/* Format identifier */
614  ContextFormat1	format1;
615  ContextFormat2	format2;
616  ContextFormat3	format3;
617  } u;
618};
619
620
621/* Chaining Contextual lookups */
622
623struct ChainContextLookupContext
624{
625  ContextFuncs funcs;
626  const void *match_data[3];
627};
628
629static inline bool chain_context_lookup (hb_apply_context_t *c,
630					 unsigned int backtrackCount,
631					 const USHORT backtrack[],
632					 unsigned int inputCount, /* Including the first glyph (not matched) */
633					 const USHORT input[], /* Array of input values--start with second glyph */
634					 unsigned int lookaheadCount,
635					 const USHORT lookahead[],
636					 unsigned int lookupCount,
637					 const LookupRecord lookupRecord[],
638					 ChainContextLookupContext &lookup_context)
639{
640  /* First guess */
641  if (unlikely (c->buffer->backtrack_len () < backtrackCount ||
642		c->buffer->idx + inputCount + lookaheadCount > c->buffer->len ||
643		inputCount + lookaheadCount > c->context_length))
644    return false;
645
646  hb_apply_context_t new_context = *c;
647  return match_backtrack (c,
648			  backtrackCount, backtrack,
649			  lookup_context.funcs.match, lookup_context.match_data[0])
650      && match_input (c,
651		      inputCount, input,
652		      lookup_context.funcs.match, lookup_context.match_data[1],
653		      &new_context.context_length)
654      && match_lookahead (c,
655			  lookaheadCount, lookahead,
656			  lookup_context.funcs.match, lookup_context.match_data[2],
657			  new_context.context_length)
658      && apply_lookup (&new_context,
659		       inputCount,
660		       lookupCount, lookupRecord,
661		       lookup_context.funcs.apply);
662}
663
664struct ChainRule
665{
666  friend struct ChainRuleSet;
667
668  private:
669  inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
670  {
671    TRACE_APPLY ();
672    const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
673    const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
674    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
675    return chain_context_lookup (c,
676				 backtrack.len, backtrack.array,
677				 input.len, input.array,
678				 lookahead.len, lookahead.array,
679				 lookup.len, lookup.array,
680				 lookup_context);
681  }
682
683  public:
684  inline bool sanitize (hb_sanitize_context_t *c) {
685    TRACE_SANITIZE ();
686    if (!backtrack.sanitize (c)) return false;
687    HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
688    if (!input.sanitize (c)) return false;
689    ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
690    if (!lookahead.sanitize (c)) return false;
691    ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
692    return lookup.sanitize (c);
693  }
694
695  private:
696  ArrayOf<USHORT>
697		backtrack;		/* Array of backtracking values
698					 * (to be matched before the input
699					 * sequence) */
700  HeadlessArrayOf<USHORT>
701		inputX;			/* Array of input values (start with
702					 * second glyph) */
703  ArrayOf<USHORT>
704		lookaheadX;		/* Array of lookahead values's (to be
705					 * matched after the input sequence) */
706  ArrayOf<LookupRecord>
707		lookupX;		/* Array of LookupRecords--in
708					 * design order) */
709  public:
710  DEFINE_SIZE_MIN (8);
711};
712
713struct ChainRuleSet
714{
715  inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
716  {
717    TRACE_APPLY ();
718    unsigned int num_rules = rule.len;
719    for (unsigned int i = 0; i < num_rules; i++)
720    {
721      if ((this+rule[i]).apply (c, lookup_context))
722        return true;
723    }
724
725    return false;
726  }
727
728  inline bool sanitize (hb_sanitize_context_t *c) {
729    TRACE_SANITIZE ();
730    return rule.sanitize (c, this);
731  }
732
733  private:
734  OffsetArrayOf<ChainRule>
735		rule;			/* Array of ChainRule tables
736					 * ordered by preference */
737  public:
738  DEFINE_SIZE_ARRAY (2, rule);
739};
740
741struct ChainContextFormat1
742{
743  friend struct ChainContext;
744
745  private:
746  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
747  {
748    TRACE_APPLY ();
749    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
750    if (likely (index == NOT_COVERED))
751      return false;
752
753    const ChainRuleSet &rule_set = this+ruleSet[index];
754    struct ChainContextLookupContext lookup_context = {
755      {match_glyph, apply_func},
756      {NULL, NULL, NULL}
757    };
758    return rule_set.apply (c, lookup_context);
759  }
760
761  inline bool sanitize (hb_sanitize_context_t *c) {
762    TRACE_SANITIZE ();
763    return coverage.sanitize (c, this)
764	&& ruleSet.sanitize (c, this);
765  }
766
767  private:
768  USHORT	format;			/* Format identifier--format = 1 */
769  OffsetTo<Coverage>
770		coverage;		/* Offset to Coverage table--from
771					 * beginning of table */
772  OffsetArrayOf<ChainRuleSet>
773		ruleSet;		/* Array of ChainRuleSet tables
774					 * ordered by Coverage Index */
775  public:
776  DEFINE_SIZE_ARRAY (6, ruleSet);
777};
778
779struct ChainContextFormat2
780{
781  friend struct ChainContext;
782
783  private:
784  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
785  {
786    TRACE_APPLY ();
787    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
788    if (likely (index == NOT_COVERED))
789      return false;
790
791    const ClassDef &backtrack_class_def = this+backtrackClassDef;
792    const ClassDef &input_class_def = this+inputClassDef;
793    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
794
795    index = input_class_def (c->buffer->info[c->buffer->idx].codepoint);
796    const ChainRuleSet &rule_set = this+ruleSet[index];
797    struct ChainContextLookupContext lookup_context = {
798      {match_class, apply_func},
799      {&backtrack_class_def,
800       &input_class_def,
801       &lookahead_class_def}
802    };
803    return rule_set.apply (c, lookup_context);
804  }
805
806  inline bool sanitize (hb_sanitize_context_t *c) {
807    TRACE_SANITIZE ();
808    return coverage.sanitize (c, this)
809	&& backtrackClassDef.sanitize (c, this)
810	&& inputClassDef.sanitize (c, this)
811	&& lookaheadClassDef.sanitize (c, this)
812	&& ruleSet.sanitize (c, this);
813  }
814
815  private:
816  USHORT	format;			/* Format identifier--format = 2 */
817  OffsetTo<Coverage>
818		coverage;		/* Offset to Coverage table--from
819					 * beginning of table */
820  OffsetTo<ClassDef>
821		backtrackClassDef;	/* Offset to glyph ClassDef table
822					 * containing backtrack sequence
823					 * data--from beginning of table */
824  OffsetTo<ClassDef>
825		inputClassDef;		/* Offset to glyph ClassDef
826					 * table containing input sequence
827					 * data--from beginning of table */
828  OffsetTo<ClassDef>
829		lookaheadClassDef;	/* Offset to glyph ClassDef table
830					 * containing lookahead sequence
831					 * data--from beginning of table */
832  OffsetArrayOf<ChainRuleSet>
833		ruleSet;		/* Array of ChainRuleSet tables
834					 * ordered by class */
835  public:
836  DEFINE_SIZE_ARRAY (12, ruleSet);
837};
838
839struct ChainContextFormat3
840{
841  friend struct ChainContext;
842
843  private:
844
845  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
846  {
847    TRACE_APPLY ();
848    const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
849
850    unsigned int index = (this+input[0]) (c->buffer->info[c->buffer->idx].codepoint);
851    if (likely (index == NOT_COVERED))
852      return false;
853
854    const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
855    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
856    struct ChainContextLookupContext lookup_context = {
857      {match_coverage, apply_func},
858      {this, this, this}
859    };
860    return chain_context_lookup (c,
861				 backtrack.len, (const USHORT *) backtrack.array,
862				 input.len, (const USHORT *) input.array + 1,
863				 lookahead.len, (const USHORT *) lookahead.array,
864				 lookup.len, lookup.array,
865				 lookup_context);
866  }
867
868  inline bool sanitize (hb_sanitize_context_t *c) {
869    TRACE_SANITIZE ();
870    if (!backtrack.sanitize (c, this)) return false;
871    OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
872    if (!input.sanitize (c, this)) return false;
873    OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
874    if (!lookahead.sanitize (c, this)) return false;
875    ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
876    return lookup.sanitize (c);
877  }
878
879  private:
880  USHORT	format;			/* Format identifier--format = 3 */
881  OffsetArrayOf<Coverage>
882		backtrack;		/* Array of coverage tables
883					 * in backtracking sequence, in  glyph
884					 * sequence order */
885  OffsetArrayOf<Coverage>
886		inputX		;	/* Array of coverage
887					 * tables in input sequence, in glyph
888					 * sequence order */
889  OffsetArrayOf<Coverage>
890		lookaheadX;		/* Array of coverage tables
891					 * in lookahead sequence, in glyph
892					 * sequence order */
893  ArrayOf<LookupRecord>
894		lookupX;		/* Array of LookupRecords--in
895					 * design order) */
896  public:
897  DEFINE_SIZE_MIN (10);
898};
899
900struct ChainContext
901{
902  protected:
903  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
904  {
905    TRACE_APPLY ();
906    switch (u.format) {
907    case 1: return u.format1.apply (c, apply_func);
908    case 2: return u.format2.apply (c, apply_func);
909    case 3: return u.format3.apply (c, apply_func);
910    default:return false;
911    }
912  }
913
914  inline bool sanitize (hb_sanitize_context_t *c) {
915    TRACE_SANITIZE ();
916    if (!u.format.sanitize (c)) return false;
917    switch (u.format) {
918    case 1: return u.format1.sanitize (c);
919    case 2: return u.format2.sanitize (c);
920    case 3: return u.format3.sanitize (c);
921    default:return true;
922    }
923  }
924
925  private:
926  union {
927  USHORT		format;	/* Format identifier */
928  ChainContextFormat1	format1;
929  ChainContextFormat2	format2;
930  ChainContextFormat3	format3;
931  } u;
932};
933
934
935struct ExtensionFormat1
936{
937  friend struct Extension;
938
939  protected:
940  inline unsigned int get_type (void) const { return extensionLookupType; }
941  inline unsigned int get_offset (void) const { return extensionOffset; }
942
943  inline bool sanitize (hb_sanitize_context_t *c) {
944    TRACE_SANITIZE ();
945    return c->check_struct (this);
946  }
947
948  private:
949  USHORT	format;			/* Format identifier. Set to 1. */
950  USHORT	extensionLookupType;	/* Lookup type of subtable referenced
951					 * by ExtensionOffset (i.e. the
952					 * extension subtable). */
953  ULONG		extensionOffset;	/* Offset to the extension subtable,
954					 * of lookup type subtable. */
955  public:
956  DEFINE_SIZE_STATIC (8);
957};
958
959struct Extension
960{
961  inline unsigned int get_type (void) const
962  {
963    switch (u.format) {
964    case 1: return u.format1.get_type ();
965    default:return 0;
966    }
967  }
968  inline unsigned int get_offset (void) const
969  {
970    switch (u.format) {
971    case 1: return u.format1.get_offset ();
972    default:return 0;
973    }
974  }
975
976  inline bool sanitize (hb_sanitize_context_t *c) {
977    TRACE_SANITIZE ();
978    if (!u.format.sanitize (c)) return false;
979    switch (u.format) {
980    case 1: return u.format1.sanitize (c);
981    default:return true;
982    }
983  }
984
985  private:
986  union {
987  USHORT		format;		/* Format identifier */
988  ExtensionFormat1	format1;
989  } u;
990};
991
992
993/*
994 * GSUB/GPOS Common
995 */
996
997struct GSUBGPOS
998{
999  static const hb_tag_t GSUBTag	= HB_OT_TAG_GSUB;
1000  static const hb_tag_t GPOSTag	= HB_OT_TAG_GPOS;
1001
1002  inline unsigned int get_script_count (void) const
1003  { return (this+scriptList).len; }
1004  inline const Tag& get_script_tag (unsigned int i) const
1005  { return (this+scriptList).get_tag (i); }
1006  inline unsigned int get_script_tags (unsigned int start_offset,
1007				       unsigned int *script_count /* IN/OUT */,
1008				       hb_tag_t     *script_tags /* OUT */) const
1009  { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
1010  inline const Script& get_script (unsigned int i) const
1011  { return (this+scriptList)[i]; }
1012  inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
1013  { return (this+scriptList).find_index (tag, index); }
1014
1015  inline unsigned int get_feature_count (void) const
1016  { return (this+featureList).len; }
1017  inline const Tag& get_feature_tag (unsigned int i) const
1018  { return (this+featureList).get_tag (i); }
1019  inline unsigned int get_feature_tags (unsigned int start_offset,
1020					unsigned int *feature_count /* IN/OUT */,
1021					hb_tag_t     *feature_tags /* OUT */) const
1022  { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
1023  inline const Feature& get_feature (unsigned int i) const
1024  { return (this+featureList)[i]; }
1025  inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
1026  { return (this+featureList).find_index (tag, index); }
1027
1028  inline unsigned int get_lookup_count (void) const
1029  { return (this+lookupList).len; }
1030  inline const Lookup& get_lookup (unsigned int i) const
1031  { return (this+lookupList)[i]; }
1032
1033  inline bool sanitize (hb_sanitize_context_t *c) {
1034    TRACE_SANITIZE ();
1035    return version.sanitize (c) && likely (version.major == 1)
1036	&& scriptList.sanitize (c, this)
1037	&& featureList.sanitize (c, this)
1038	&& lookupList.sanitize (c, this);
1039  }
1040
1041  protected:
1042  FixedVersion	version;	/* Version of the GSUB/GPOS table--initially set
1043				 * to 0x00010000 */
1044  OffsetTo<ScriptList>
1045		scriptList;  	/* ScriptList table */
1046  OffsetTo<FeatureList>
1047		featureList; 	/* FeatureList table */
1048  OffsetTo<LookupList>
1049		lookupList; 	/* LookupList table */
1050  public:
1051  DEFINE_SIZE_STATIC (10);
1052};
1053
1054
1055
1056#endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */
1057