hb-ot-layout-gsubgpos-private.hh revision 7d479900cd11bc88148cd601ee43bc5492ce5843
1/*
2 * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3 * Copyright © 2010  Google, Inc.
4 *
5 *  This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29#ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30#define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
31
32#include "hb-buffer-private.hh"
33#include "hb-ot-layout-gdef-table.hh"
34
35
36/* buffer var allocations */
37#define lig_id() var2.u8[2] /* unique ligature id */
38#define lig_comp() var2.u8[3] /* component number in the ligature (0 = base) */
39
40static inline uint8_t allocate_lig_id (hb_buffer_t *buffer) {
41  uint8_t lig_id = buffer->next_serial ();
42  if (unlikely (!lig_id)) lig_id = buffer->next_serial (); /* in case of overflow */
43  return lig_id;
44}
45
46
47
48#ifndef HB_DEBUG_APPLY
49#define HB_DEBUG_APPLY (HB_DEBUG+0)
50#endif
51
52#define TRACE_APPLY() \
53	hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", this, NULL, HB_FUNC);
54
55
56
57struct hb_apply_context_t
58{
59  unsigned int debug_depth;
60  hb_font_t *font;
61  hb_face_t *face;
62  hb_buffer_t *buffer;
63  hb_direction_t direction;
64  hb_mask_t lookup_mask;
65  unsigned int context_length;
66  unsigned int nesting_level_left;
67  unsigned int lookup_props;
68  unsigned int property; /* propety of first glyph */
69
70  struct mark_skipping_forward_iterator_t
71  {
72    inline mark_skipping_forward_iterator_t (hb_apply_context_t *c_,
73					     unsigned int start_index_,
74					     unsigned int num_items_)
75    {
76      c = c_;
77      idx = start_index_;
78      num_items = num_items_;
79      end = MIN (c->buffer->len, c->buffer->idx + c->context_length);
80    }
81    inline bool has_no_chance (void) const
82    {
83      return unlikely (num_items && idx + num_items >= end);
84    }
85    inline bool next (unsigned int *property_out,
86		      unsigned int lookup_props)
87    {
88      assert (num_items > 0);
89      do
90      {
91	if (has_no_chance ())
92	  return false;
93	idx++;
94      } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[idx], lookup_props, property_out));
95      num_items--;
96      return true;
97    }
98    inline bool next (unsigned int *property_out = NULL)
99    {
100      return next (property_out, c->lookup_props);
101    }
102
103    unsigned int idx;
104    private:
105    hb_apply_context_t *c;
106    unsigned int num_items;
107    unsigned int end;
108  };
109
110  struct mark_skipping_backward_iterator_t
111  {
112    inline mark_skipping_backward_iterator_t (hb_apply_context_t *c_,
113					      unsigned int start_index_,
114					      unsigned int num_items_)
115    {
116      c = c_;
117      idx = start_index_;
118      num_items = num_items_;
119    }
120    inline bool has_no_chance (void) const
121    {
122      return unlikely (idx < num_items);
123    }
124    inline bool prev (unsigned int *property_out,
125		      unsigned int lookup_props)
126    {
127      assert (num_items > 0);
128      do
129      {
130	if (has_no_chance ())
131	  return false;
132	idx--;
133      } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->out_info[idx], lookup_props, property_out));
134      num_items--;
135      return true;
136    }
137    inline bool prev (unsigned int *property_out = NULL)
138    {
139      return prev (property_out, c->lookup_props);
140    }
141
142    unsigned int idx;
143    private:
144    hb_apply_context_t *c;
145    unsigned int num_items;
146  };
147
148  inline bool should_mark_skip_current_glyph (void) const
149  {
150    return _hb_ot_layout_skip_mark (face, &buffer->info[buffer->idx], lookup_props, NULL);
151  }
152
153
154
155  inline void replace_glyph (hb_codepoint_t glyph_index) const
156  {
157    clear_property ();
158    buffer->replace_glyph (glyph_index);
159  }
160  inline void replace_glyphs_be16 (unsigned int num_in,
161				   unsigned int num_out,
162				   const uint16_t *glyph_data_be) const
163  {
164    clear_property ();
165    buffer->replace_glyphs_be16 (num_in, num_out, glyph_data_be);
166  }
167
168  inline void guess_glyph_class (unsigned int klass)
169  {
170    /* XXX if ! has gdef */
171    buffer->info[buffer->idx].props_cache() = klass;
172  }
173
174  private:
175  inline void clear_property (void) const
176  {
177    /* XXX if has gdef */
178    buffer->info[buffer->idx].props_cache() = 0;
179  }
180};
181
182
183
184typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
185typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
186
187struct ContextFuncs
188{
189  match_func_t match;
190  apply_lookup_func_t apply;
191};
192
193
194static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
195{
196  return glyph_id == value;
197}
198
199static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
200{
201  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
202  return class_def.get_class (glyph_id) == value;
203}
204
205static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
206{
207  const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
208  return (data+coverage) (glyph_id) != NOT_COVERED;
209}
210
211
212static inline bool match_input (hb_apply_context_t *c,
213				unsigned int count, /* Including the first glyph (not matched) */
214				const USHORT input[], /* Array of input values--start with second glyph */
215				match_func_t match_func,
216				const void *match_data,
217				unsigned int *context_length_out)
218{
219  hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1);
220  if (skippy_iter.has_no_chance ())
221    return false;
222
223  for (unsigned int i = 1; i < count; i++)
224  {
225    if (!skippy_iter.next ())
226      return false;
227
228    if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, input[i - 1], match_data)))
229      return false;
230  }
231
232  *context_length_out = skippy_iter.idx - c->buffer->idx + 1;
233
234  return true;
235}
236
237static inline bool match_backtrack (hb_apply_context_t *c,
238				    unsigned int count,
239				    const USHORT backtrack[],
240				    match_func_t match_func,
241				    const void *match_data)
242{
243  hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count);
244  if (skippy_iter.has_no_chance ())
245    return false;
246
247  for (unsigned int i = 0; i < count; i++)
248  {
249    if (!skippy_iter.prev ())
250      return false;
251
252    if (likely (!match_func (c->buffer->out_info[skippy_iter.idx].codepoint, backtrack[i], match_data)))
253      return false;
254  }
255
256  return true;
257}
258
259static inline bool match_lookahead (hb_apply_context_t *c,
260				    unsigned int count,
261				    const USHORT lookahead[],
262				    match_func_t match_func,
263				    const void *match_data,
264				    unsigned int offset)
265{
266  hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count);
267  if (skippy_iter.has_no_chance ())
268    return false;
269
270  for (unsigned int i = 0; i < count; i++)
271  {
272    if (!skippy_iter.next ())
273      return false;
274
275    if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, lookahead[i], match_data)))
276      return false;
277  }
278
279  return true;
280}
281
282
283
284struct LookupRecord
285{
286  inline bool sanitize (hb_sanitize_context_t *c) {
287    TRACE_SANITIZE ();
288    return c->check_struct (this);
289  }
290
291  USHORT	sequenceIndex;		/* Index into current glyph
292					 * sequence--first glyph = 0 */
293  USHORT	lookupListIndex;	/* Lookup to apply to that
294					 * position--zero--based */
295  public:
296  DEFINE_SIZE_STATIC (4);
297};
298
299
300
301static inline bool apply_lookup (hb_apply_context_t *c,
302				 unsigned int count, /* Including the first glyph */
303				 unsigned int lookupCount,
304				 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
305				 apply_lookup_func_t apply_func)
306{
307  unsigned int end = MIN (c->buffer->len, c->buffer->idx + c->context_length);
308  if (unlikely (count == 0 || c->buffer->idx + count > end))
309    return false;
310
311  /* TODO We don't support lookupRecord arrays that are not increasing:
312   *      Should be easy for in_place ones at least. */
313
314  /* Note: If sublookup is reverse, it will underflow after the first loop
315   * and we jump out of it.  Not entirely disastrous.  So we don't check
316   * for reverse lookup here.
317   */
318  for (unsigned int i = 0; i < count; /* NOP */)
319  {
320    while (c->should_mark_skip_current_glyph ())
321    {
322      if (unlikely (c->buffer->idx == end))
323	return true;
324      /* No lookup applied for this index */
325      c->buffer->next_glyph ();
326    }
327
328    if (lookupCount && i == lookupRecord->sequenceIndex)
329    {
330      unsigned int old_pos = c->buffer->idx;
331
332      /* Apply a lookup */
333      bool done = apply_func (c, lookupRecord->lookupListIndex);
334
335      lookupRecord++;
336      lookupCount--;
337      /* Err, this is wrong if the lookup jumped over some glyphs */
338      i += c->buffer->idx - old_pos;
339      if (unlikely (c->buffer->idx == end))
340	return true;
341
342      if (!done)
343	goto not_applied;
344    }
345    else
346    {
347    not_applied:
348      /* No lookup applied for this index */
349      c->buffer->next_glyph ();
350      i++;
351    }
352  }
353
354  return true;
355}
356
357
358
359/* Contextual lookups */
360
361struct ContextLookupContext
362{
363  ContextFuncs funcs;
364  const void *match_data;
365};
366
367static inline bool context_lookup (hb_apply_context_t *c,
368				   unsigned int inputCount, /* Including the first glyph (not matched) */
369				   const USHORT input[], /* Array of input values--start with second glyph */
370				   unsigned int lookupCount,
371				   const LookupRecord lookupRecord[],
372				   ContextLookupContext &lookup_context)
373{
374  hb_apply_context_t new_context = *c;
375  return match_input (c,
376		      inputCount, input,
377		      lookup_context.funcs.match, lookup_context.match_data,
378		      &new_context.context_length)
379      && apply_lookup (&new_context,
380		       inputCount,
381		       lookupCount, lookupRecord,
382		       lookup_context.funcs.apply);
383}
384
385struct Rule
386{
387  friend struct RuleSet;
388
389  private:
390  inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
391  {
392    TRACE_APPLY ();
393    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
394    return context_lookup (c,
395			   inputCount, input,
396			   lookupCount, lookupRecord,
397			   lookup_context);
398  }
399
400  public:
401  inline bool sanitize (hb_sanitize_context_t *c) {
402    TRACE_SANITIZE ();
403    return inputCount.sanitize (c)
404	&& lookupCount.sanitize (c)
405	&& c->check_range (input,
406				 input[0].static_size * inputCount
407				 + lookupRecordX[0].static_size * lookupCount);
408  }
409
410  private:
411  USHORT	inputCount;		/* Total number of glyphs in input
412					 * glyph sequence--includes the  first
413					 * glyph */
414  USHORT	lookupCount;		/* Number of LookupRecords */
415  USHORT	input[VAR];		/* Array of match inputs--start with
416					 * second glyph */
417  LookupRecord	lookupRecordX[VAR];	/* Array of LookupRecords--in
418					 * design order */
419  public:
420  DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
421};
422
423struct RuleSet
424{
425  inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
426  {
427    TRACE_APPLY ();
428    unsigned int num_rules = rule.len;
429    for (unsigned int i = 0; i < num_rules; i++)
430    {
431      if ((this+rule[i]).apply (c, lookup_context))
432        return true;
433    }
434
435    return false;
436  }
437
438  inline bool sanitize (hb_sanitize_context_t *c) {
439    TRACE_SANITIZE ();
440    return rule.sanitize (c, this);
441  }
442
443  private:
444  OffsetArrayOf<Rule>
445		rule;			/* Array of Rule tables
446					 * ordered by preference */
447  public:
448  DEFINE_SIZE_ARRAY (2, rule);
449};
450
451
452struct ContextFormat1
453{
454  friend struct Context;
455
456  private:
457  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
458  {
459    TRACE_APPLY ();
460    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
461    if (likely (index == NOT_COVERED))
462      return false;
463
464    const RuleSet &rule_set = this+ruleSet[index];
465    struct ContextLookupContext lookup_context = {
466      {match_glyph, apply_func},
467      NULL
468    };
469    return rule_set.apply (c, lookup_context);
470  }
471
472  inline bool sanitize (hb_sanitize_context_t *c) {
473    TRACE_SANITIZE ();
474    return coverage.sanitize (c, this)
475	&& ruleSet.sanitize (c, this);
476  }
477
478  private:
479  USHORT	format;			/* Format identifier--format = 1 */
480  OffsetTo<Coverage>
481		coverage;		/* Offset to Coverage table--from
482					 * beginning of table */
483  OffsetArrayOf<RuleSet>
484		ruleSet;		/* Array of RuleSet tables
485					 * ordered by Coverage Index */
486  public:
487  DEFINE_SIZE_ARRAY (6, ruleSet);
488};
489
490
491struct ContextFormat2
492{
493  friend struct Context;
494
495  private:
496  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
497  {
498    TRACE_APPLY ();
499    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
500    if (likely (index == NOT_COVERED))
501      return false;
502
503    const ClassDef &class_def = this+classDef;
504    index = class_def (c->buffer->info[c->buffer->idx].codepoint);
505    const RuleSet &rule_set = this+ruleSet[index];
506    struct ContextLookupContext lookup_context = {
507      {match_class, apply_func},
508      &class_def
509    };
510    return rule_set.apply (c, lookup_context);
511  }
512
513  inline bool sanitize (hb_sanitize_context_t *c) {
514    TRACE_SANITIZE ();
515    return coverage.sanitize (c, this)
516        && classDef.sanitize (c, this)
517	&& ruleSet.sanitize (c, this);
518  }
519
520  private:
521  USHORT	format;			/* Format identifier--format = 2 */
522  OffsetTo<Coverage>
523		coverage;		/* Offset to Coverage table--from
524					 * beginning of table */
525  OffsetTo<ClassDef>
526		classDef;		/* Offset to glyph ClassDef table--from
527					 * beginning of table */
528  OffsetArrayOf<RuleSet>
529		ruleSet;		/* Array of RuleSet tables
530					 * ordered by class */
531  public:
532  DEFINE_SIZE_ARRAY (8, ruleSet);
533};
534
535
536struct ContextFormat3
537{
538  friend struct Context;
539
540  private:
541  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
542  {
543    TRACE_APPLY ();
544    unsigned int index = (this+coverage[0]) (c->buffer->info[c->buffer->idx].codepoint);
545    if (likely (index == NOT_COVERED))
546      return false;
547
548    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
549    struct ContextLookupContext lookup_context = {
550      {match_coverage, apply_func},
551      this
552    };
553    return context_lookup (c,
554			   glyphCount, (const USHORT *) (coverage + 1),
555			   lookupCount, lookupRecord,
556			   lookup_context);
557  }
558
559  inline bool sanitize (hb_sanitize_context_t *c) {
560    TRACE_SANITIZE ();
561    if (!c->check_struct (this)) return false;
562    unsigned int count = glyphCount;
563    if (!c->check_array (coverage, coverage[0].static_size, count)) return false;
564    for (unsigned int i = 0; i < count; i++)
565      if (!coverage[i].sanitize (c, this)) return false;
566    LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
567    return c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount);
568  }
569
570  private:
571  USHORT	format;			/* Format identifier--format = 3 */
572  USHORT	glyphCount;		/* Number of glyphs in the input glyph
573					 * sequence */
574  USHORT	lookupCount;		/* Number of LookupRecords */
575  OffsetTo<Coverage>
576		coverage[VAR];		/* Array of offsets to Coverage
577					 * table in glyph sequence order */
578  LookupRecord	lookupRecordX[VAR];	/* Array of LookupRecords--in
579					 * design order */
580  public:
581  DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
582};
583
584struct Context
585{
586  protected:
587  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
588  {
589    TRACE_APPLY ();
590    switch (u.format) {
591    case 1: return u.format1.apply (c, apply_func);
592    case 2: return u.format2.apply (c, apply_func);
593    case 3: return u.format3.apply (c, apply_func);
594    default:return false;
595    }
596  }
597
598  inline bool sanitize (hb_sanitize_context_t *c) {
599    TRACE_SANITIZE ();
600    if (!u.format.sanitize (c)) return false;
601    switch (u.format) {
602    case 1: return u.format1.sanitize (c);
603    case 2: return u.format2.sanitize (c);
604    case 3: return u.format3.sanitize (c);
605    default:return true;
606    }
607  }
608
609  private:
610  union {
611  USHORT		format;		/* Format identifier */
612  ContextFormat1	format1;
613  ContextFormat2	format2;
614  ContextFormat3	format3;
615  } u;
616};
617
618
619/* Chaining Contextual lookups */
620
621struct ChainContextLookupContext
622{
623  ContextFuncs funcs;
624  const void *match_data[3];
625};
626
627static inline bool chain_context_lookup (hb_apply_context_t *c,
628					 unsigned int backtrackCount,
629					 const USHORT backtrack[],
630					 unsigned int inputCount, /* Including the first glyph (not matched) */
631					 const USHORT input[], /* Array of input values--start with second glyph */
632					 unsigned int lookaheadCount,
633					 const USHORT lookahead[],
634					 unsigned int lookupCount,
635					 const LookupRecord lookupRecord[],
636					 ChainContextLookupContext &lookup_context)
637{
638  /* First guess */
639  if (unlikely (c->buffer->backtrack_len () < backtrackCount ||
640		c->buffer->idx + inputCount + lookaheadCount > c->buffer->len ||
641		inputCount + lookaheadCount > c->context_length))
642    return false;
643
644  hb_apply_context_t new_context = *c;
645  return match_backtrack (c,
646			  backtrackCount, backtrack,
647			  lookup_context.funcs.match, lookup_context.match_data[0])
648      && match_input (c,
649		      inputCount, input,
650		      lookup_context.funcs.match, lookup_context.match_data[1],
651		      &new_context.context_length)
652      && match_lookahead (c,
653			  lookaheadCount, lookahead,
654			  lookup_context.funcs.match, lookup_context.match_data[2],
655			  new_context.context_length)
656      && apply_lookup (&new_context,
657		       inputCount,
658		       lookupCount, lookupRecord,
659		       lookup_context.funcs.apply);
660}
661
662struct ChainRule
663{
664  friend struct ChainRuleSet;
665
666  private:
667  inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
668  {
669    TRACE_APPLY ();
670    const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
671    const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
672    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
673    return chain_context_lookup (c,
674				 backtrack.len, backtrack.array,
675				 input.len, input.array,
676				 lookahead.len, lookahead.array,
677				 lookup.len, lookup.array,
678				 lookup_context);
679  }
680
681  public:
682  inline bool sanitize (hb_sanitize_context_t *c) {
683    TRACE_SANITIZE ();
684    if (!backtrack.sanitize (c)) return false;
685    HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
686    if (!input.sanitize (c)) return false;
687    ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
688    if (!lookahead.sanitize (c)) return false;
689    ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
690    return lookup.sanitize (c);
691  }
692
693  private:
694  ArrayOf<USHORT>
695		backtrack;		/* Array of backtracking values
696					 * (to be matched before the input
697					 * sequence) */
698  HeadlessArrayOf<USHORT>
699		inputX;			/* Array of input values (start with
700					 * second glyph) */
701  ArrayOf<USHORT>
702		lookaheadX;		/* Array of lookahead values's (to be
703					 * matched after the input sequence) */
704  ArrayOf<LookupRecord>
705		lookupX;		/* Array of LookupRecords--in
706					 * design order) */
707  public:
708  DEFINE_SIZE_MIN (8);
709};
710
711struct ChainRuleSet
712{
713  inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
714  {
715    TRACE_APPLY ();
716    unsigned int num_rules = rule.len;
717    for (unsigned int i = 0; i < num_rules; i++)
718    {
719      if ((this+rule[i]).apply (c, lookup_context))
720        return true;
721    }
722
723    return false;
724  }
725
726  inline bool sanitize (hb_sanitize_context_t *c) {
727    TRACE_SANITIZE ();
728    return rule.sanitize (c, this);
729  }
730
731  private:
732  OffsetArrayOf<ChainRule>
733		rule;			/* Array of ChainRule tables
734					 * ordered by preference */
735  public:
736  DEFINE_SIZE_ARRAY (2, rule);
737};
738
739struct ChainContextFormat1
740{
741  friend struct ChainContext;
742
743  private:
744  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
745  {
746    TRACE_APPLY ();
747    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
748    if (likely (index == NOT_COVERED))
749      return false;
750
751    const ChainRuleSet &rule_set = this+ruleSet[index];
752    struct ChainContextLookupContext lookup_context = {
753      {match_glyph, apply_func},
754      {NULL, NULL, NULL}
755    };
756    return rule_set.apply (c, lookup_context);
757  }
758
759  inline bool sanitize (hb_sanitize_context_t *c) {
760    TRACE_SANITIZE ();
761    return coverage.sanitize (c, this)
762	&& ruleSet.sanitize (c, this);
763  }
764
765  private:
766  USHORT	format;			/* Format identifier--format = 1 */
767  OffsetTo<Coverage>
768		coverage;		/* Offset to Coverage table--from
769					 * beginning of table */
770  OffsetArrayOf<ChainRuleSet>
771		ruleSet;		/* Array of ChainRuleSet tables
772					 * ordered by Coverage Index */
773  public:
774  DEFINE_SIZE_ARRAY (6, ruleSet);
775};
776
777struct ChainContextFormat2
778{
779  friend struct ChainContext;
780
781  private:
782  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
783  {
784    TRACE_APPLY ();
785    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
786    if (likely (index == NOT_COVERED))
787      return false;
788
789    const ClassDef &backtrack_class_def = this+backtrackClassDef;
790    const ClassDef &input_class_def = this+inputClassDef;
791    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
792
793    index = input_class_def (c->buffer->info[c->buffer->idx].codepoint);
794    const ChainRuleSet &rule_set = this+ruleSet[index];
795    struct ChainContextLookupContext lookup_context = {
796      {match_class, apply_func},
797      {&backtrack_class_def,
798       &input_class_def,
799       &lookahead_class_def}
800    };
801    return rule_set.apply (c, lookup_context);
802  }
803
804  inline bool sanitize (hb_sanitize_context_t *c) {
805    TRACE_SANITIZE ();
806    return coverage.sanitize (c, this)
807	&& backtrackClassDef.sanitize (c, this)
808	&& inputClassDef.sanitize (c, this)
809	&& lookaheadClassDef.sanitize (c, this)
810	&& ruleSet.sanitize (c, this);
811  }
812
813  private:
814  USHORT	format;			/* Format identifier--format = 2 */
815  OffsetTo<Coverage>
816		coverage;		/* Offset to Coverage table--from
817					 * beginning of table */
818  OffsetTo<ClassDef>
819		backtrackClassDef;	/* Offset to glyph ClassDef table
820					 * containing backtrack sequence
821					 * data--from beginning of table */
822  OffsetTo<ClassDef>
823		inputClassDef;		/* Offset to glyph ClassDef
824					 * table containing input sequence
825					 * data--from beginning of table */
826  OffsetTo<ClassDef>
827		lookaheadClassDef;	/* Offset to glyph ClassDef table
828					 * containing lookahead sequence
829					 * data--from beginning of table */
830  OffsetArrayOf<ChainRuleSet>
831		ruleSet;		/* Array of ChainRuleSet tables
832					 * ordered by class */
833  public:
834  DEFINE_SIZE_ARRAY (12, ruleSet);
835};
836
837struct ChainContextFormat3
838{
839  friend struct ChainContext;
840
841  private:
842
843  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
844  {
845    TRACE_APPLY ();
846    const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
847
848    unsigned int index = (this+input[0]) (c->buffer->info[c->buffer->idx].codepoint);
849    if (likely (index == NOT_COVERED))
850      return false;
851
852    const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
853    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
854    struct ChainContextLookupContext lookup_context = {
855      {match_coverage, apply_func},
856      {this, this, this}
857    };
858    return chain_context_lookup (c,
859				 backtrack.len, (const USHORT *) backtrack.array,
860				 input.len, (const USHORT *) input.array + 1,
861				 lookahead.len, (const USHORT *) lookahead.array,
862				 lookup.len, lookup.array,
863				 lookup_context);
864  }
865
866  inline bool sanitize (hb_sanitize_context_t *c) {
867    TRACE_SANITIZE ();
868    if (!backtrack.sanitize (c, this)) return false;
869    OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
870    if (!input.sanitize (c, this)) return false;
871    OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
872    if (!lookahead.sanitize (c, this)) return false;
873    ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
874    return lookup.sanitize (c);
875  }
876
877  private:
878  USHORT	format;			/* Format identifier--format = 3 */
879  OffsetArrayOf<Coverage>
880		backtrack;		/* Array of coverage tables
881					 * in backtracking sequence, in  glyph
882					 * sequence order */
883  OffsetArrayOf<Coverage>
884		inputX		;	/* Array of coverage
885					 * tables in input sequence, in glyph
886					 * sequence order */
887  OffsetArrayOf<Coverage>
888		lookaheadX;		/* Array of coverage tables
889					 * in lookahead sequence, in glyph
890					 * sequence order */
891  ArrayOf<LookupRecord>
892		lookupX;		/* Array of LookupRecords--in
893					 * design order) */
894  public:
895  DEFINE_SIZE_MIN (10);
896};
897
898struct ChainContext
899{
900  protected:
901  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
902  {
903    TRACE_APPLY ();
904    switch (u.format) {
905    case 1: return u.format1.apply (c, apply_func);
906    case 2: return u.format2.apply (c, apply_func);
907    case 3: return u.format3.apply (c, apply_func);
908    default:return false;
909    }
910  }
911
912  inline bool sanitize (hb_sanitize_context_t *c) {
913    TRACE_SANITIZE ();
914    if (!u.format.sanitize (c)) return false;
915    switch (u.format) {
916    case 1: return u.format1.sanitize (c);
917    case 2: return u.format2.sanitize (c);
918    case 3: return u.format3.sanitize (c);
919    default:return true;
920    }
921  }
922
923  private:
924  union {
925  USHORT		format;	/* Format identifier */
926  ChainContextFormat1	format1;
927  ChainContextFormat2	format2;
928  ChainContextFormat3	format3;
929  } u;
930};
931
932
933struct ExtensionFormat1
934{
935  friend struct Extension;
936
937  protected:
938  inline unsigned int get_type (void) const { return extensionLookupType; }
939  inline unsigned int get_offset (void) const { return extensionOffset; }
940
941  inline bool sanitize (hb_sanitize_context_t *c) {
942    TRACE_SANITIZE ();
943    return c->check_struct (this);
944  }
945
946  private:
947  USHORT	format;			/* Format identifier. Set to 1. */
948  USHORT	extensionLookupType;	/* Lookup type of subtable referenced
949					 * by ExtensionOffset (i.e. the
950					 * extension subtable). */
951  ULONG		extensionOffset;	/* Offset to the extension subtable,
952					 * of lookup type subtable. */
953  public:
954  DEFINE_SIZE_STATIC (8);
955};
956
957struct Extension
958{
959  inline unsigned int get_type (void) const
960  {
961    switch (u.format) {
962    case 1: return u.format1.get_type ();
963    default:return 0;
964    }
965  }
966  inline unsigned int get_offset (void) const
967  {
968    switch (u.format) {
969    case 1: return u.format1.get_offset ();
970    default:return 0;
971    }
972  }
973
974  inline bool sanitize (hb_sanitize_context_t *c) {
975    TRACE_SANITIZE ();
976    if (!u.format.sanitize (c)) return false;
977    switch (u.format) {
978    case 1: return u.format1.sanitize (c);
979    default:return true;
980    }
981  }
982
983  private:
984  union {
985  USHORT		format;		/* Format identifier */
986  ExtensionFormat1	format1;
987  } u;
988};
989
990
991/*
992 * GSUB/GPOS Common
993 */
994
995struct GSUBGPOS
996{
997  static const hb_tag_t GSUBTag	= HB_OT_TAG_GSUB;
998  static const hb_tag_t GPOSTag	= HB_OT_TAG_GPOS;
999
1000  inline unsigned int get_script_count (void) const
1001  { return (this+scriptList).len; }
1002  inline const Tag& get_script_tag (unsigned int i) const
1003  { return (this+scriptList).get_tag (i); }
1004  inline unsigned int get_script_tags (unsigned int start_offset,
1005				       unsigned int *script_count /* IN/OUT */,
1006				       hb_tag_t     *script_tags /* OUT */) const
1007  { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
1008  inline const Script& get_script (unsigned int i) const
1009  { return (this+scriptList)[i]; }
1010  inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
1011  { return (this+scriptList).find_index (tag, index); }
1012
1013  inline unsigned int get_feature_count (void) const
1014  { return (this+featureList).len; }
1015  inline const Tag& get_feature_tag (unsigned int i) const
1016  { return (this+featureList).get_tag (i); }
1017  inline unsigned int get_feature_tags (unsigned int start_offset,
1018					unsigned int *feature_count /* IN/OUT */,
1019					hb_tag_t     *feature_tags /* OUT */) const
1020  { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
1021  inline const Feature& get_feature (unsigned int i) const
1022  { return (this+featureList)[i]; }
1023  inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
1024  { return (this+featureList).find_index (tag, index); }
1025
1026  inline unsigned int get_lookup_count (void) const
1027  { return (this+lookupList).len; }
1028  inline const Lookup& get_lookup (unsigned int i) const
1029  { return (this+lookupList)[i]; }
1030
1031  inline bool sanitize (hb_sanitize_context_t *c) {
1032    TRACE_SANITIZE ();
1033    return version.sanitize (c) && likely (version.major == 1)
1034	&& scriptList.sanitize (c, this)
1035	&& featureList.sanitize (c, this)
1036	&& lookupList.sanitize (c, this);
1037  }
1038
1039  protected:
1040  FixedVersion	version;	/* Version of the GSUB/GPOS table--initially set
1041				 * to 0x00010000 */
1042  OffsetTo<ScriptList>
1043		scriptList;  	/* ScriptList table */
1044  OffsetTo<FeatureList>
1045		featureList; 	/* FeatureList table */
1046  OffsetTo<LookupList>
1047		lookupList; 	/* LookupList table */
1048  public:
1049  DEFINE_SIZE_STATIC (10);
1050};
1051
1052
1053
1054#endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */
1055