hb-ot-layout-gsubgpos-private.hh revision 41ae674f6871f43d0a6e4ca67a747074d63ae576
1/*
2 * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3 * Copyright © 2010  Google, Inc.
4 *
5 *  This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29#ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30#define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
31
32#include "hb-buffer-private.hh"
33#include "hb-ot-layout-gdef-table.hh"
34
35
36/* buffer var allocations */
37#define lig_id() var2.u8[2] /* unique ligature id */
38#define lig_comp() var2.u8[3] /* component number in the ligature (0 = base) */
39
40static inline uint8_t allocate_lig_id (hb_buffer_t *buffer) {
41  uint8_t lig_id = buffer->next_serial ();
42  if (unlikely (!lig_id)) lig_id = buffer->next_serial (); /* in case of overflow */
43  return lig_id;
44}
45
46
47
48#ifndef HB_DEBUG_APPLY
49#define HB_DEBUG_APPLY (HB_DEBUG+0)
50#endif
51
52#define TRACE_APPLY() \
53	hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", this, NULL, HB_FUNC);
54
55
56
57struct hb_apply_context_t
58{
59  unsigned int debug_depth;
60  hb_font_t *font;
61  hb_face_t *face;
62  hb_buffer_t *buffer;
63  hb_direction_t direction;
64  hb_mask_t lookup_mask;
65  unsigned int context_length;
66  unsigned int nesting_level_left;
67  unsigned int lookup_props;
68  unsigned int property; /* propety of first glyph */
69
70
71  hb_apply_context_t (hb_font_t *font_,
72		      hb_face_t *face_,
73		      hb_buffer_t *buffer_,
74		      hb_mask_t lookup_mask_,
75		      const Lookup &l,
76		      unsigned int context_length_ = NO_CONTEXT,
77		      unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
78			font (font_), face (face_), buffer (buffer_),
79			direction (buffer_->props.direction),
80			lookup_mask (lookup_mask_),
81			context_length (context_length_),
82			nesting_level_left (nesting_level_left_),
83			lookup_props (l.get_props ()),
84			property (0) {}
85
86  hb_apply_context_t (const hb_apply_context_t &c, const Lookup &l) {
87    *this = c;
88    nesting_level_left--;
89    lookup_props = l.get_props ();
90  }
91
92  struct mark_skipping_forward_iterator_t
93  {
94    inline mark_skipping_forward_iterator_t (hb_apply_context_t *c_,
95					     unsigned int start_index_,
96					     unsigned int num_items_)
97    {
98      c = c_;
99      idx = start_index_;
100      num_items = num_items_;
101      end = MIN (c->buffer->len, c->buffer->idx + c->context_length);
102    }
103    inline bool has_no_chance (void) const
104    {
105      return unlikely (num_items && idx + num_items >= end);
106    }
107    inline bool next (unsigned int *property_out,
108		      unsigned int lookup_props)
109    {
110      assert (num_items > 0);
111      do
112      {
113	if (has_no_chance ())
114	  return false;
115	idx++;
116      } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[idx], lookup_props, property_out));
117      num_items--;
118      return true;
119    }
120    inline bool next (unsigned int *property_out = NULL)
121    {
122      return next (property_out, c->lookup_props);
123    }
124
125    unsigned int idx;
126    private:
127    hb_apply_context_t *c;
128    unsigned int num_items;
129    unsigned int end;
130  };
131
132  struct mark_skipping_backward_iterator_t
133  {
134    inline mark_skipping_backward_iterator_t (hb_apply_context_t *c_,
135					      unsigned int start_index_,
136					      unsigned int num_items_)
137    {
138      c = c_;
139      idx = start_index_;
140      num_items = num_items_;
141    }
142    inline bool has_no_chance (void) const
143    {
144      return unlikely (idx < num_items);
145    }
146    inline bool prev (unsigned int *property_out,
147		      unsigned int lookup_props)
148    {
149      assert (num_items > 0);
150      do
151      {
152	if (has_no_chance ())
153	  return false;
154	idx--;
155      } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->out_info[idx], lookup_props, property_out));
156      num_items--;
157      return true;
158    }
159    inline bool prev (unsigned int *property_out = NULL)
160    {
161      return prev (property_out, c->lookup_props);
162    }
163
164    unsigned int idx;
165    private:
166    hb_apply_context_t *c;
167    unsigned int num_items;
168  };
169
170  inline bool should_mark_skip_current_glyph (void) const
171  {
172    return _hb_ot_layout_skip_mark (face, &buffer->info[buffer->idx], lookup_props, NULL);
173  }
174
175
176
177  inline void replace_glyph (hb_codepoint_t glyph_index) const
178  {
179    clear_property ();
180    buffer->replace_glyph (glyph_index);
181  }
182  inline void replace_glyphs_be16 (unsigned int num_in,
183				   unsigned int num_out,
184				   const uint16_t *glyph_data_be) const
185  {
186    clear_property ();
187    buffer->replace_glyphs_be16 (num_in, num_out, glyph_data_be);
188  }
189
190  inline void guess_glyph_class (unsigned int klass)
191  {
192    /* XXX if ! has gdef */
193    buffer->info[buffer->idx].props_cache() = klass;
194  }
195
196  private:
197  inline void clear_property (void) const
198  {
199    /* XXX if has gdef */
200    buffer->info[buffer->idx].props_cache() = 0;
201  }
202};
203
204
205
206typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
207typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
208
209struct ContextFuncs
210{
211  match_func_t match;
212  apply_lookup_func_t apply;
213};
214
215
216static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
217{
218  return glyph_id == value;
219}
220
221static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
222{
223  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
224  return class_def.get_class (glyph_id) == value;
225}
226
227static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
228{
229  const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
230  return (data+coverage) (glyph_id) != NOT_COVERED;
231}
232
233
234static inline bool match_input (hb_apply_context_t *c,
235				unsigned int count, /* Including the first glyph (not matched) */
236				const USHORT input[], /* Array of input values--start with second glyph */
237				match_func_t match_func,
238				const void *match_data,
239				unsigned int *context_length_out)
240{
241  hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1);
242  if (skippy_iter.has_no_chance ())
243    return false;
244
245  for (unsigned int i = 1; i < count; i++)
246  {
247    if (!skippy_iter.next ())
248      return false;
249
250    if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, input[i - 1], match_data)))
251      return false;
252  }
253
254  *context_length_out = skippy_iter.idx - c->buffer->idx + 1;
255
256  return true;
257}
258
259static inline bool match_backtrack (hb_apply_context_t *c,
260				    unsigned int count,
261				    const USHORT backtrack[],
262				    match_func_t match_func,
263				    const void *match_data)
264{
265  hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count);
266  if (skippy_iter.has_no_chance ())
267    return false;
268
269  for (unsigned int i = 0; i < count; i++)
270  {
271    if (!skippy_iter.prev ())
272      return false;
273
274    if (likely (!match_func (c->buffer->out_info[skippy_iter.idx].codepoint, backtrack[i], match_data)))
275      return false;
276  }
277
278  return true;
279}
280
281static inline bool match_lookahead (hb_apply_context_t *c,
282				    unsigned int count,
283				    const USHORT lookahead[],
284				    match_func_t match_func,
285				    const void *match_data,
286				    unsigned int offset)
287{
288  hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count);
289  if (skippy_iter.has_no_chance ())
290    return false;
291
292  for (unsigned int i = 0; i < count; i++)
293  {
294    if (!skippy_iter.next ())
295      return false;
296
297    if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, lookahead[i], match_data)))
298      return false;
299  }
300
301  return true;
302}
303
304
305
306struct LookupRecord
307{
308  inline bool sanitize (hb_sanitize_context_t *c) {
309    TRACE_SANITIZE ();
310    return c->check_struct (this);
311  }
312
313  USHORT	sequenceIndex;		/* Index into current glyph
314					 * sequence--first glyph = 0 */
315  USHORT	lookupListIndex;	/* Lookup to apply to that
316					 * position--zero--based */
317  public:
318  DEFINE_SIZE_STATIC (4);
319};
320
321
322
323static inline bool apply_lookup (hb_apply_context_t *c,
324				 unsigned int count, /* Including the first glyph */
325				 unsigned int lookupCount,
326				 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
327				 apply_lookup_func_t apply_func)
328{
329  unsigned int end = MIN (c->buffer->len, c->buffer->idx + c->context_length);
330  if (unlikely (count == 0 || c->buffer->idx + count > end))
331    return false;
332
333  /* TODO We don't support lookupRecord arrays that are not increasing:
334   *      Should be easy for in_place ones at least. */
335
336  /* Note: If sublookup is reverse, it will underflow after the first loop
337   * and we jump out of it.  Not entirely disastrous.  So we don't check
338   * for reverse lookup here.
339   */
340  for (unsigned int i = 0; i < count; /* NOP */)
341  {
342    if (unlikely (c->buffer->idx == end))
343      return true;
344    while (c->should_mark_skip_current_glyph ())
345    {
346      /* No lookup applied for this index */
347      c->buffer->next_glyph ();
348      if (unlikely (c->buffer->idx == end))
349	return true;
350    }
351
352    if (lookupCount && i == lookupRecord->sequenceIndex)
353    {
354      unsigned int old_pos = c->buffer->idx;
355
356      /* Apply a lookup */
357      bool done = apply_func (c, lookupRecord->lookupListIndex);
358
359      lookupRecord++;
360      lookupCount--;
361      /* Err, this is wrong if the lookup jumped over some glyphs */
362      i += c->buffer->idx - old_pos;
363      if (unlikely (c->buffer->idx == end))
364	return true;
365
366      if (!done)
367	goto not_applied;
368    }
369    else
370    {
371    not_applied:
372      /* No lookup applied for this index */
373      c->buffer->next_glyph ();
374      i++;
375    }
376  }
377
378  return true;
379}
380
381
382
383/* Contextual lookups */
384
385struct ContextLookupContext
386{
387  ContextFuncs funcs;
388  const void *match_data;
389};
390
391static inline bool context_lookup (hb_apply_context_t *c,
392				   unsigned int inputCount, /* Including the first glyph (not matched) */
393				   const USHORT input[], /* Array of input values--start with second glyph */
394				   unsigned int lookupCount,
395				   const LookupRecord lookupRecord[],
396				   ContextLookupContext &lookup_context)
397{
398  hb_apply_context_t new_context = *c;
399  return match_input (c,
400		      inputCount, input,
401		      lookup_context.funcs.match, lookup_context.match_data,
402		      &new_context.context_length)
403      && apply_lookup (&new_context,
404		       inputCount,
405		       lookupCount, lookupRecord,
406		       lookup_context.funcs.apply);
407}
408
409struct Rule
410{
411  friend struct RuleSet;
412
413  private:
414  inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
415  {
416    TRACE_APPLY ();
417    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
418    return context_lookup (c,
419			   inputCount, input,
420			   lookupCount, lookupRecord,
421			   lookup_context);
422  }
423
424  public:
425  inline bool sanitize (hb_sanitize_context_t *c) {
426    TRACE_SANITIZE ();
427    return inputCount.sanitize (c)
428	&& lookupCount.sanitize (c)
429	&& c->check_range (input,
430				 input[0].static_size * inputCount
431				 + lookupRecordX[0].static_size * lookupCount);
432  }
433
434  private:
435  USHORT	inputCount;		/* Total number of glyphs in input
436					 * glyph sequence--includes the  first
437					 * glyph */
438  USHORT	lookupCount;		/* Number of LookupRecords */
439  USHORT	input[VAR];		/* Array of match inputs--start with
440					 * second glyph */
441  LookupRecord	lookupRecordX[VAR];	/* Array of LookupRecords--in
442					 * design order */
443  public:
444  DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
445};
446
447struct RuleSet
448{
449  inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
450  {
451    TRACE_APPLY ();
452    unsigned int num_rules = rule.len;
453    for (unsigned int i = 0; i < num_rules; i++)
454    {
455      if ((this+rule[i]).apply (c, lookup_context))
456        return true;
457    }
458
459    return false;
460  }
461
462  inline bool sanitize (hb_sanitize_context_t *c) {
463    TRACE_SANITIZE ();
464    return rule.sanitize (c, this);
465  }
466
467  private:
468  OffsetArrayOf<Rule>
469		rule;			/* Array of Rule tables
470					 * ordered by preference */
471  public:
472  DEFINE_SIZE_ARRAY (2, rule);
473};
474
475
476struct ContextFormat1
477{
478  friend struct Context;
479
480  private:
481  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
482  {
483    TRACE_APPLY ();
484    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
485    if (likely (index == NOT_COVERED))
486      return false;
487
488    const RuleSet &rule_set = this+ruleSet[index];
489    struct ContextLookupContext lookup_context = {
490      {match_glyph, apply_func},
491      NULL
492    };
493    return rule_set.apply (c, lookup_context);
494  }
495
496  inline bool sanitize (hb_sanitize_context_t *c) {
497    TRACE_SANITIZE ();
498    return coverage.sanitize (c, this)
499	&& ruleSet.sanitize (c, this);
500  }
501
502  private:
503  USHORT	format;			/* Format identifier--format = 1 */
504  OffsetTo<Coverage>
505		coverage;		/* Offset to Coverage table--from
506					 * beginning of table */
507  OffsetArrayOf<RuleSet>
508		ruleSet;		/* Array of RuleSet tables
509					 * ordered by Coverage Index */
510  public:
511  DEFINE_SIZE_ARRAY (6, ruleSet);
512};
513
514
515struct ContextFormat2
516{
517  friend struct Context;
518
519  private:
520  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
521  {
522    TRACE_APPLY ();
523    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
524    if (likely (index == NOT_COVERED))
525      return false;
526
527    const ClassDef &class_def = this+classDef;
528    index = class_def (c->buffer->info[c->buffer->idx].codepoint);
529    const RuleSet &rule_set = this+ruleSet[index];
530    struct ContextLookupContext lookup_context = {
531      {match_class, apply_func},
532      &class_def
533    };
534    return rule_set.apply (c, lookup_context);
535  }
536
537  inline bool sanitize (hb_sanitize_context_t *c) {
538    TRACE_SANITIZE ();
539    return coverage.sanitize (c, this)
540        && classDef.sanitize (c, this)
541	&& ruleSet.sanitize (c, this);
542  }
543
544  private:
545  USHORT	format;			/* Format identifier--format = 2 */
546  OffsetTo<Coverage>
547		coverage;		/* Offset to Coverage table--from
548					 * beginning of table */
549  OffsetTo<ClassDef>
550		classDef;		/* Offset to glyph ClassDef table--from
551					 * beginning of table */
552  OffsetArrayOf<RuleSet>
553		ruleSet;		/* Array of RuleSet tables
554					 * ordered by class */
555  public:
556  DEFINE_SIZE_ARRAY (8, ruleSet);
557};
558
559
560struct ContextFormat3
561{
562  friend struct Context;
563
564  private:
565  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
566  {
567    TRACE_APPLY ();
568    unsigned int index = (this+coverage[0]) (c->buffer->info[c->buffer->idx].codepoint);
569    if (likely (index == NOT_COVERED))
570      return false;
571
572    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
573    struct ContextLookupContext lookup_context = {
574      {match_coverage, apply_func},
575      this
576    };
577    return context_lookup (c,
578			   glyphCount, (const USHORT *) (coverage + 1),
579			   lookupCount, lookupRecord,
580			   lookup_context);
581  }
582
583  inline bool sanitize (hb_sanitize_context_t *c) {
584    TRACE_SANITIZE ();
585    if (!c->check_struct (this)) return false;
586    unsigned int count = glyphCount;
587    if (!c->check_array (coverage, coverage[0].static_size, count)) return false;
588    for (unsigned int i = 0; i < count; i++)
589      if (!coverage[i].sanitize (c, this)) return false;
590    LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
591    return c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount);
592  }
593
594  private:
595  USHORT	format;			/* Format identifier--format = 3 */
596  USHORT	glyphCount;		/* Number of glyphs in the input glyph
597					 * sequence */
598  USHORT	lookupCount;		/* Number of LookupRecords */
599  OffsetTo<Coverage>
600		coverage[VAR];		/* Array of offsets to Coverage
601					 * table in glyph sequence order */
602  LookupRecord	lookupRecordX[VAR];	/* Array of LookupRecords--in
603					 * design order */
604  public:
605  DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
606};
607
608struct Context
609{
610  protected:
611  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
612  {
613    TRACE_APPLY ();
614    switch (u.format) {
615    case 1: return u.format1.apply (c, apply_func);
616    case 2: return u.format2.apply (c, apply_func);
617    case 3: return u.format3.apply (c, apply_func);
618    default:return false;
619    }
620  }
621
622  inline bool sanitize (hb_sanitize_context_t *c) {
623    TRACE_SANITIZE ();
624    if (!u.format.sanitize (c)) return false;
625    switch (u.format) {
626    case 1: return u.format1.sanitize (c);
627    case 2: return u.format2.sanitize (c);
628    case 3: return u.format3.sanitize (c);
629    default:return true;
630    }
631  }
632
633  private:
634  union {
635  USHORT		format;		/* Format identifier */
636  ContextFormat1	format1;
637  ContextFormat2	format2;
638  ContextFormat3	format3;
639  } u;
640};
641
642
643/* Chaining Contextual lookups */
644
645struct ChainContextLookupContext
646{
647  ContextFuncs funcs;
648  const void *match_data[3];
649};
650
651static inline bool chain_context_lookup (hb_apply_context_t *c,
652					 unsigned int backtrackCount,
653					 const USHORT backtrack[],
654					 unsigned int inputCount, /* Including the first glyph (not matched) */
655					 const USHORT input[], /* Array of input values--start with second glyph */
656					 unsigned int lookaheadCount,
657					 const USHORT lookahead[],
658					 unsigned int lookupCount,
659					 const LookupRecord lookupRecord[],
660					 ChainContextLookupContext &lookup_context)
661{
662  /* First guess */
663  if (unlikely (c->buffer->backtrack_len () < backtrackCount ||
664		c->buffer->idx + inputCount + lookaheadCount > c->buffer->len ||
665		inputCount + lookaheadCount > c->context_length))
666    return false;
667
668  hb_apply_context_t new_context = *c;
669  return match_backtrack (c,
670			  backtrackCount, backtrack,
671			  lookup_context.funcs.match, lookup_context.match_data[0])
672      && match_input (c,
673		      inputCount, input,
674		      lookup_context.funcs.match, lookup_context.match_data[1],
675		      &new_context.context_length)
676      && match_lookahead (c,
677			  lookaheadCount, lookahead,
678			  lookup_context.funcs.match, lookup_context.match_data[2],
679			  new_context.context_length)
680      && apply_lookup (&new_context,
681		       inputCount,
682		       lookupCount, lookupRecord,
683		       lookup_context.funcs.apply);
684}
685
686struct ChainRule
687{
688  friend struct ChainRuleSet;
689
690  private:
691  inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
692  {
693    TRACE_APPLY ();
694    const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
695    const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
696    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
697    return chain_context_lookup (c,
698				 backtrack.len, backtrack.array,
699				 input.len, input.array,
700				 lookahead.len, lookahead.array,
701				 lookup.len, lookup.array,
702				 lookup_context);
703  }
704
705  public:
706  inline bool sanitize (hb_sanitize_context_t *c) {
707    TRACE_SANITIZE ();
708    if (!backtrack.sanitize (c)) return false;
709    HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
710    if (!input.sanitize (c)) return false;
711    ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
712    if (!lookahead.sanitize (c)) return false;
713    ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
714    return lookup.sanitize (c);
715  }
716
717  private:
718  ArrayOf<USHORT>
719		backtrack;		/* Array of backtracking values
720					 * (to be matched before the input
721					 * sequence) */
722  HeadlessArrayOf<USHORT>
723		inputX;			/* Array of input values (start with
724					 * second glyph) */
725  ArrayOf<USHORT>
726		lookaheadX;		/* Array of lookahead values's (to be
727					 * matched after the input sequence) */
728  ArrayOf<LookupRecord>
729		lookupX;		/* Array of LookupRecords--in
730					 * design order) */
731  public:
732  DEFINE_SIZE_MIN (8);
733};
734
735struct ChainRuleSet
736{
737  inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
738  {
739    TRACE_APPLY ();
740    unsigned int num_rules = rule.len;
741    for (unsigned int i = 0; i < num_rules; i++)
742    {
743      if ((this+rule[i]).apply (c, lookup_context))
744        return true;
745    }
746
747    return false;
748  }
749
750  inline bool sanitize (hb_sanitize_context_t *c) {
751    TRACE_SANITIZE ();
752    return rule.sanitize (c, this);
753  }
754
755  private:
756  OffsetArrayOf<ChainRule>
757		rule;			/* Array of ChainRule tables
758					 * ordered by preference */
759  public:
760  DEFINE_SIZE_ARRAY (2, rule);
761};
762
763struct ChainContextFormat1
764{
765  friend struct ChainContext;
766
767  private:
768  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
769  {
770    TRACE_APPLY ();
771    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
772    if (likely (index == NOT_COVERED))
773      return false;
774
775    const ChainRuleSet &rule_set = this+ruleSet[index];
776    struct ChainContextLookupContext lookup_context = {
777      {match_glyph, apply_func},
778      {NULL, NULL, NULL}
779    };
780    return rule_set.apply (c, lookup_context);
781  }
782
783  inline bool sanitize (hb_sanitize_context_t *c) {
784    TRACE_SANITIZE ();
785    return coverage.sanitize (c, this)
786	&& ruleSet.sanitize (c, this);
787  }
788
789  private:
790  USHORT	format;			/* Format identifier--format = 1 */
791  OffsetTo<Coverage>
792		coverage;		/* Offset to Coverage table--from
793					 * beginning of table */
794  OffsetArrayOf<ChainRuleSet>
795		ruleSet;		/* Array of ChainRuleSet tables
796					 * ordered by Coverage Index */
797  public:
798  DEFINE_SIZE_ARRAY (6, ruleSet);
799};
800
801struct ChainContextFormat2
802{
803  friend struct ChainContext;
804
805  private:
806  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
807  {
808    TRACE_APPLY ();
809    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
810    if (likely (index == NOT_COVERED))
811      return false;
812
813    const ClassDef &backtrack_class_def = this+backtrackClassDef;
814    const ClassDef &input_class_def = this+inputClassDef;
815    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
816
817    index = input_class_def (c->buffer->info[c->buffer->idx].codepoint);
818    const ChainRuleSet &rule_set = this+ruleSet[index];
819    struct ChainContextLookupContext lookup_context = {
820      {match_class, apply_func},
821      {&backtrack_class_def,
822       &input_class_def,
823       &lookahead_class_def}
824    };
825    return rule_set.apply (c, lookup_context);
826  }
827
828  inline bool sanitize (hb_sanitize_context_t *c) {
829    TRACE_SANITIZE ();
830    return coverage.sanitize (c, this)
831	&& backtrackClassDef.sanitize (c, this)
832	&& inputClassDef.sanitize (c, this)
833	&& lookaheadClassDef.sanitize (c, this)
834	&& ruleSet.sanitize (c, this);
835  }
836
837  private:
838  USHORT	format;			/* Format identifier--format = 2 */
839  OffsetTo<Coverage>
840		coverage;		/* Offset to Coverage table--from
841					 * beginning of table */
842  OffsetTo<ClassDef>
843		backtrackClassDef;	/* Offset to glyph ClassDef table
844					 * containing backtrack sequence
845					 * data--from beginning of table */
846  OffsetTo<ClassDef>
847		inputClassDef;		/* Offset to glyph ClassDef
848					 * table containing input sequence
849					 * data--from beginning of table */
850  OffsetTo<ClassDef>
851		lookaheadClassDef;	/* Offset to glyph ClassDef table
852					 * containing lookahead sequence
853					 * data--from beginning of table */
854  OffsetArrayOf<ChainRuleSet>
855		ruleSet;		/* Array of ChainRuleSet tables
856					 * ordered by class */
857  public:
858  DEFINE_SIZE_ARRAY (12, ruleSet);
859};
860
861struct ChainContextFormat3
862{
863  friend struct ChainContext;
864
865  private:
866
867  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
868  {
869    TRACE_APPLY ();
870    const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
871
872    unsigned int index = (this+input[0]) (c->buffer->info[c->buffer->idx].codepoint);
873    if (likely (index == NOT_COVERED))
874      return false;
875
876    const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
877    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
878    struct ChainContextLookupContext lookup_context = {
879      {match_coverage, apply_func},
880      {this, this, this}
881    };
882    return chain_context_lookup (c,
883				 backtrack.len, (const USHORT *) backtrack.array,
884				 input.len, (const USHORT *) input.array + 1,
885				 lookahead.len, (const USHORT *) lookahead.array,
886				 lookup.len, lookup.array,
887				 lookup_context);
888  }
889
890  inline bool sanitize (hb_sanitize_context_t *c) {
891    TRACE_SANITIZE ();
892    if (!backtrack.sanitize (c, this)) return false;
893    OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
894    if (!input.sanitize (c, this)) return false;
895    OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
896    if (!lookahead.sanitize (c, this)) return false;
897    ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
898    return lookup.sanitize (c);
899  }
900
901  private:
902  USHORT	format;			/* Format identifier--format = 3 */
903  OffsetArrayOf<Coverage>
904		backtrack;		/* Array of coverage tables
905					 * in backtracking sequence, in  glyph
906					 * sequence order */
907  OffsetArrayOf<Coverage>
908		inputX		;	/* Array of coverage
909					 * tables in input sequence, in glyph
910					 * sequence order */
911  OffsetArrayOf<Coverage>
912		lookaheadX;		/* Array of coverage tables
913					 * in lookahead sequence, in glyph
914					 * sequence order */
915  ArrayOf<LookupRecord>
916		lookupX;		/* Array of LookupRecords--in
917					 * design order) */
918  public:
919  DEFINE_SIZE_MIN (10);
920};
921
922struct ChainContext
923{
924  protected:
925  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
926  {
927    TRACE_APPLY ();
928    switch (u.format) {
929    case 1: return u.format1.apply (c, apply_func);
930    case 2: return u.format2.apply (c, apply_func);
931    case 3: return u.format3.apply (c, apply_func);
932    default:return false;
933    }
934  }
935
936  inline bool sanitize (hb_sanitize_context_t *c) {
937    TRACE_SANITIZE ();
938    if (!u.format.sanitize (c)) return false;
939    switch (u.format) {
940    case 1: return u.format1.sanitize (c);
941    case 2: return u.format2.sanitize (c);
942    case 3: return u.format3.sanitize (c);
943    default:return true;
944    }
945  }
946
947  private:
948  union {
949  USHORT		format;	/* Format identifier */
950  ChainContextFormat1	format1;
951  ChainContextFormat2	format2;
952  ChainContextFormat3	format3;
953  } u;
954};
955
956
957struct ExtensionFormat1
958{
959  friend struct Extension;
960
961  protected:
962  inline unsigned int get_type (void) const { return extensionLookupType; }
963  inline unsigned int get_offset (void) const { return extensionOffset; }
964
965  inline bool sanitize (hb_sanitize_context_t *c) {
966    TRACE_SANITIZE ();
967    return c->check_struct (this);
968  }
969
970  private:
971  USHORT	format;			/* Format identifier. Set to 1. */
972  USHORT	extensionLookupType;	/* Lookup type of subtable referenced
973					 * by ExtensionOffset (i.e. the
974					 * extension subtable). */
975  ULONG		extensionOffset;	/* Offset to the extension subtable,
976					 * of lookup type subtable. */
977  public:
978  DEFINE_SIZE_STATIC (8);
979};
980
981struct Extension
982{
983  inline unsigned int get_type (void) const
984  {
985    switch (u.format) {
986    case 1: return u.format1.get_type ();
987    default:return 0;
988    }
989  }
990  inline unsigned int get_offset (void) const
991  {
992    switch (u.format) {
993    case 1: return u.format1.get_offset ();
994    default:return 0;
995    }
996  }
997
998  inline bool sanitize (hb_sanitize_context_t *c) {
999    TRACE_SANITIZE ();
1000    if (!u.format.sanitize (c)) return false;
1001    switch (u.format) {
1002    case 1: return u.format1.sanitize (c);
1003    default:return true;
1004    }
1005  }
1006
1007  private:
1008  union {
1009  USHORT		format;		/* Format identifier */
1010  ExtensionFormat1	format1;
1011  } u;
1012};
1013
1014
1015/*
1016 * GSUB/GPOS Common
1017 */
1018
1019struct GSUBGPOS
1020{
1021  static const hb_tag_t GSUBTag	= HB_OT_TAG_GSUB;
1022  static const hb_tag_t GPOSTag	= HB_OT_TAG_GPOS;
1023
1024  inline unsigned int get_script_count (void) const
1025  { return (this+scriptList).len; }
1026  inline const Tag& get_script_tag (unsigned int i) const
1027  { return (this+scriptList).get_tag (i); }
1028  inline unsigned int get_script_tags (unsigned int start_offset,
1029				       unsigned int *script_count /* IN/OUT */,
1030				       hb_tag_t     *script_tags /* OUT */) const
1031  { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
1032  inline const Script& get_script (unsigned int i) const
1033  { return (this+scriptList)[i]; }
1034  inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
1035  { return (this+scriptList).find_index (tag, index); }
1036
1037  inline unsigned int get_feature_count (void) const
1038  { return (this+featureList).len; }
1039  inline const Tag& get_feature_tag (unsigned int i) const
1040  { return (this+featureList).get_tag (i); }
1041  inline unsigned int get_feature_tags (unsigned int start_offset,
1042					unsigned int *feature_count /* IN/OUT */,
1043					hb_tag_t     *feature_tags /* OUT */) const
1044  { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
1045  inline const Feature& get_feature (unsigned int i) const
1046  { return (this+featureList)[i]; }
1047  inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
1048  { return (this+featureList).find_index (tag, index); }
1049
1050  inline unsigned int get_lookup_count (void) const
1051  { return (this+lookupList).len; }
1052  inline const Lookup& get_lookup (unsigned int i) const
1053  { return (this+lookupList)[i]; }
1054
1055  inline bool sanitize (hb_sanitize_context_t *c) {
1056    TRACE_SANITIZE ();
1057    return version.sanitize (c) && likely (version.major == 1)
1058	&& scriptList.sanitize (c, this)
1059	&& featureList.sanitize (c, this)
1060	&& lookupList.sanitize (c, this);
1061  }
1062
1063  protected:
1064  FixedVersion	version;	/* Version of the GSUB/GPOS table--initially set
1065				 * to 0x00010000 */
1066  OffsetTo<ScriptList>
1067		scriptList;  	/* ScriptList table */
1068  OffsetTo<FeatureList>
1069		featureList; 	/* FeatureList table */
1070  OffsetTo<LookupList>
1071		lookupList; 	/* LookupList table */
1072  public:
1073  DEFINE_SIZE_STATIC (10);
1074};
1075
1076
1077
1078#endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */
1079