hb-ot-layout-gsubgpos-private.hh revision 7e7007a1c9bf2c07a8369752126ece8fa6164248
1/*
2 * Copyright (C) 2007,2008,2009,2010  Red Hat, Inc.
3 *
4 *  This is part of HarfBuzz, a text shaping library.
5 *
6 * Permission is hereby granted, without written agreement and without
7 * license or royalty fees, to use, copy, modify, and distribute this
8 * software and its documentation for any purpose, provided that the
9 * above copyright notice and the following two paragraphs appear in
10 * all copies of this software.
11 *
12 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
13 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
14 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
15 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
16 * DAMAGE.
17 *
18 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
19 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
20 * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
21 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
22 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
23 *
24 * Red Hat Author(s): Behdad Esfahbod
25 */
26
27#ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
28#define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
29
30#include "hb-buffer-private.hh"
31#include "hb-ot-layout-gdef-private.hh"
32
33
34#ifndef HB_DEBUG_APPLY
35#define HB_DEBUG_APPLY HB_DEBUG+0
36#endif
37
38#define TRACE_APPLY() \
39	hb_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", HB_FUNC, this); \
40
41
42struct hb_apply_context_t
43{
44  unsigned int debug_depth;
45  hb_ot_layout_context_t *layout;
46  hb_buffer_t *buffer;
47  unsigned int context_length;
48  unsigned int nesting_level_left;
49  unsigned int lookup_flag;
50  unsigned int property; /* propety of first glyph (TODO remove) */
51};
52
53
54
55typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
56typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
57
58struct ContextFuncs
59{
60  match_func_t match;
61  apply_lookup_func_t apply;
62};
63
64
65static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
66{
67  return glyph_id == value;
68}
69
70static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
71{
72  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
73  return class_def.get_class (glyph_id) == value;
74}
75
76static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
77{
78  const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
79  return (data+coverage) (glyph_id) != NOT_COVERED;
80}
81
82
83static inline bool match_input (hb_apply_context_t *c,
84				unsigned int count, /* Including the first glyph (not matched) */
85				const USHORT input[], /* Array of input values--start with second glyph */
86				match_func_t match_func,
87				const void *match_data,
88				unsigned int *context_length_out)
89{
90  unsigned int i, j;
91  unsigned int end = MIN (c->buffer->in_length, c->buffer->in_pos + c->context_length);
92  if (unlikely (c->buffer->in_pos + count > end))
93    return false;
94
95  for (i = 1, j = c->buffer->in_pos + 1; i < count; i++, j++)
96  {
97    while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->info[j], c->lookup_flag, NULL))
98    {
99      if (unlikely (j + count - i == end))
100	return false;
101      j++;
102    }
103
104    if (likely (!match_func (c->buffer->info[j].codepoint, input[i - 1], match_data)))
105      return false;
106  }
107
108  *context_length_out = j - c->buffer->in_pos;
109
110  return true;
111}
112
113static inline bool match_backtrack (hb_apply_context_t *c,
114				    unsigned int count,
115				    const USHORT backtrack[],
116				    match_func_t match_func,
117				    const void *match_data)
118{
119  if (unlikely (c->buffer->out_length < count))
120    return false;
121
122  for (unsigned int i = 0, j = c->buffer->out_length - 1; i < count; i++, j--)
123  {
124    while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->out_string[j], c->lookup_flag, NULL))
125    {
126      if (unlikely (j + 1 == count - i))
127	return false;
128      j--;
129    }
130
131    if (likely (!match_func (c->buffer->out_string[j].codepoint, backtrack[i], match_data)))
132      return false;
133  }
134
135  return true;
136}
137
138static inline bool match_lookahead (hb_apply_context_t *c,
139				    unsigned int count,
140				    const USHORT lookahead[],
141				    match_func_t match_func,
142				    const void *match_data,
143				    unsigned int offset)
144{
145  unsigned int i, j;
146  unsigned int end = MIN (c->buffer->in_length, c->buffer->in_pos + c->context_length);
147  if (unlikely (c->buffer->in_pos + offset + count > end))
148    return false;
149
150  for (i = 0, j = c->buffer->in_pos + offset; i < count; i++, j++)
151  {
152    while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->info[j], c->lookup_flag, NULL))
153    {
154      if (unlikely (j + count - i == end))
155	return false;
156      j++;
157    }
158
159    if (likely (!match_func (c->buffer->info[j].codepoint, lookahead[i], match_data)))
160      return false;
161  }
162
163  return true;
164}
165
166
167struct LookupRecord
168{
169  inline bool sanitize (hb_sanitize_context_t *c) {
170    TRACE_SANITIZE ();
171    return c->check_struct (this);
172  }
173
174  USHORT	sequenceIndex;		/* Index into current glyph
175					 * sequence--first glyph = 0 */
176  USHORT	lookupListIndex;	/* Lookup to apply to that
177					 * position--zero--based */
178  public:
179  DEFINE_SIZE_STATIC (4);
180};
181
182static inline bool apply_lookup (hb_apply_context_t *c,
183				 unsigned int count, /* Including the first glyph */
184				 unsigned int lookupCount,
185				 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
186				 apply_lookup_func_t apply_func)
187{
188  unsigned int end = MIN (c->buffer->in_length, c->buffer->in_pos + c->context_length);
189  if (unlikely (c->buffer->in_pos + count > end))
190    return false;
191
192  /* TODO We don't support lookupRecord arrays that are not increasing:
193   *      Should be easy for in_place ones at least. */
194
195  /* Note: If sublookup is reverse, i will underflow after the first loop
196   * and we jump out of it.  Not entirely disastrous.  So we don't check
197   * for reverse lookup here.
198   */
199  for (unsigned int i = 0; i < count; /* NOP */)
200  {
201    while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->info[c->buffer->in_pos], c->lookup_flag, NULL))
202    {
203      if (unlikely (c->buffer->in_pos == end))
204	return true;
205      /* No lookup applied for this index */
206      c->buffer->next_glyph ();
207    }
208
209    if (lookupCount && i == lookupRecord->sequenceIndex)
210    {
211      unsigned int old_pos = c->buffer->in_pos;
212
213      /* Apply a lookup */
214      bool done = apply_func (c, lookupRecord->lookupListIndex);
215
216      lookupRecord++;
217      lookupCount--;
218      /* Err, this is wrong if the lookup jumped over some glyphs */
219      i += c->buffer->in_pos - old_pos;
220      if (unlikely (c->buffer->in_pos == end))
221	return true;
222
223      if (!done)
224	goto not_applied;
225    }
226    else
227    {
228    not_applied:
229      /* No lookup applied for this index */
230      c->buffer->next_glyph ();
231      i++;
232    }
233  }
234
235  return true;
236}
237
238
239/* Contextual lookups */
240
241struct ContextLookupContext
242{
243  ContextFuncs funcs;
244  const void *match_data;
245};
246
247static inline bool context_lookup (hb_apply_context_t *c,
248				   unsigned int inputCount, /* Including the first glyph (not matched) */
249				   const USHORT input[], /* Array of input values--start with second glyph */
250				   unsigned int lookupCount,
251				   const LookupRecord lookupRecord[],
252				   ContextLookupContext &lookup_context)
253{
254  hb_apply_context_t new_context = *c;
255  return match_input (c,
256		      inputCount, input,
257		      lookup_context.funcs.match, lookup_context.match_data,
258		      &new_context.context_length)
259      && apply_lookup (&new_context,
260		       inputCount,
261		       lookupCount, lookupRecord,
262		       lookup_context.funcs.apply);
263}
264
265struct Rule
266{
267  friend struct RuleSet;
268
269  private:
270  inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
271  {
272    TRACE_APPLY ();
273    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
274    return context_lookup (c,
275			   inputCount, input,
276			   lookupCount, lookupRecord,
277			   lookup_context);
278  }
279
280  public:
281  inline bool sanitize (hb_sanitize_context_t *c) {
282    TRACE_SANITIZE ();
283    return inputCount.sanitize (c)
284	&& lookupCount.sanitize (c)
285	&& c->check_range (input,
286				 input[0].static_size * inputCount
287				 + lookupRecordX[0].static_size * lookupCount);
288  }
289
290  private:
291  USHORT	inputCount;		/* Total number of glyphs in input
292					 * glyph sequence--includes the  first
293					 * glyph */
294  USHORT	lookupCount;		/* Number of LookupRecords */
295  USHORT	input[VAR];		/* Array of match inputs--start with
296					 * second glyph */
297  LookupRecord	lookupRecordX[VAR];	/* Array of LookupRecords--in
298					 * design order */
299  public:
300  DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
301};
302
303struct RuleSet
304{
305  inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
306  {
307    TRACE_APPLY ();
308    unsigned int num_rules = rule.len;
309    for (unsigned int i = 0; i < num_rules; i++)
310    {
311      if ((this+rule[i]).apply (c, lookup_context))
312        return true;
313    }
314
315    return false;
316  }
317
318  inline bool sanitize (hb_sanitize_context_t *c) {
319    TRACE_SANITIZE ();
320    return rule.sanitize (c, this);
321  }
322
323  private:
324  OffsetArrayOf<Rule>
325		rule;			/* Array of Rule tables
326					 * ordered by preference */
327  public:
328  DEFINE_SIZE_ARRAY (2, rule);
329};
330
331
332struct ContextFormat1
333{
334  friend struct Context;
335
336  private:
337  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
338  {
339    TRACE_APPLY ();
340    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->in_pos].codepoint);
341    if (likely (index == NOT_COVERED))
342      return false;
343
344    const RuleSet &rule_set = this+ruleSet[index];
345    struct ContextLookupContext lookup_context = {
346      {match_glyph, apply_func},
347      NULL
348    };
349    return rule_set.apply (c, lookup_context);
350  }
351
352  inline bool sanitize (hb_sanitize_context_t *c) {
353    TRACE_SANITIZE ();
354    return coverage.sanitize (c, this)
355	&& ruleSet.sanitize (c, this);
356  }
357
358  private:
359  USHORT	format;			/* Format identifier--format = 1 */
360  OffsetTo<Coverage>
361		coverage;		/* Offset to Coverage table--from
362					 * beginning of table */
363  OffsetArrayOf<RuleSet>
364		ruleSet;		/* Array of RuleSet tables
365					 * ordered by Coverage Index */
366  public:
367  DEFINE_SIZE_ARRAY (6, ruleSet);
368};
369
370
371struct ContextFormat2
372{
373  friend struct Context;
374
375  private:
376  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
377  {
378    TRACE_APPLY ();
379    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->in_pos].codepoint);
380    if (likely (index == NOT_COVERED))
381      return false;
382
383    const ClassDef &class_def = this+classDef;
384    index = class_def (c->buffer->info[c->buffer->in_pos].codepoint);
385    const RuleSet &rule_set = this+ruleSet[index];
386    /* LONGTERMTODO: Old code fetches glyph classes at most once and caches
387     * them across subrule lookups.  Not sure it's worth it.
388     */
389    struct ContextLookupContext lookup_context = {
390      {match_class, apply_func},
391      &class_def
392    };
393    return rule_set.apply (c, lookup_context);
394  }
395
396  inline bool sanitize (hb_sanitize_context_t *c) {
397    TRACE_SANITIZE ();
398    return coverage.sanitize (c, this)
399        && classDef.sanitize (c, this)
400	&& ruleSet.sanitize (c, this);
401  }
402
403  private:
404  USHORT	format;			/* Format identifier--format = 2 */
405  OffsetTo<Coverage>
406		coverage;		/* Offset to Coverage table--from
407					 * beginning of table */
408  OffsetTo<ClassDef>
409		classDef;		/* Offset to glyph ClassDef table--from
410					 * beginning of table */
411  OffsetArrayOf<RuleSet>
412		ruleSet;		/* Array of RuleSet tables
413					 * ordered by class */
414  public:
415  DEFINE_SIZE_ARRAY (8, ruleSet);
416};
417
418
419struct ContextFormat3
420{
421  friend struct Context;
422
423  private:
424  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
425  {
426    TRACE_APPLY ();
427    unsigned int index = (this+coverage[0]) (c->buffer->info[c->buffer->in_pos].codepoint);
428    if (likely (index == NOT_COVERED))
429      return false;
430
431    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
432    struct ContextLookupContext lookup_context = {
433      {match_coverage, apply_func},
434      this
435    };
436    return context_lookup (c,
437			   glyphCount, (const USHORT *) (coverage + 1),
438			   lookupCount, lookupRecord,
439			   lookup_context);
440  }
441
442  inline bool sanitize (hb_sanitize_context_t *c) {
443    TRACE_SANITIZE ();
444    if (!c->check_struct (this)) return false;
445    unsigned int count = glyphCount;
446    if (!c->check_array (coverage, coverage[0].static_size, count)) return false;
447    for (unsigned int i = 0; i < count; i++)
448      if (!coverage[i].sanitize (c, this)) return false;
449    LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
450    return c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount);
451  }
452
453  private:
454  USHORT	format;			/* Format identifier--format = 3 */
455  USHORT	glyphCount;		/* Number of glyphs in the input glyph
456					 * sequence */
457  USHORT	lookupCount;		/* Number of LookupRecords */
458  OffsetTo<Coverage>
459		coverage[VAR];		/* Array of offsets to Coverage
460					 * table in glyph sequence order */
461  LookupRecord	lookupRecordX[VAR];	/* Array of LookupRecords--in
462					 * design order */
463  public:
464  DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
465};
466
467struct Context
468{
469  protected:
470  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
471  {
472    TRACE_APPLY ();
473    switch (u.format) {
474    case 1: return u.format1.apply (c, apply_func);
475    case 2: return u.format2.apply (c, apply_func);
476    case 3: return u.format3.apply (c, apply_func);
477    default:return false;
478    }
479  }
480
481  inline bool sanitize (hb_sanitize_context_t *c) {
482    TRACE_SANITIZE ();
483    if (!u.format.sanitize (c)) return false;
484    switch (u.format) {
485    case 1: return u.format1.sanitize (c);
486    case 2: return u.format2.sanitize (c);
487    case 3: return u.format3.sanitize (c);
488    default:return true;
489    }
490  }
491
492  private:
493  union {
494  USHORT		format;		/* Format identifier */
495  ContextFormat1	format1;
496  ContextFormat2	format2;
497  ContextFormat3	format3;
498  } u;
499};
500
501
502/* Chaining Contextual lookups */
503
504struct ChainContextLookupContext
505{
506  ContextFuncs funcs;
507  const void *match_data[3];
508};
509
510static inline bool chain_context_lookup (hb_apply_context_t *c,
511					 unsigned int backtrackCount,
512					 const USHORT backtrack[],
513					 unsigned int inputCount, /* Including the first glyph (not matched) */
514					 const USHORT input[], /* Array of input values--start with second glyph */
515					 unsigned int lookaheadCount,
516					 const USHORT lookahead[],
517					 unsigned int lookupCount,
518					 const LookupRecord lookupRecord[],
519					 ChainContextLookupContext &lookup_context)
520{
521  /* First guess */
522  if (unlikely (c->buffer->out_length < backtrackCount ||
523		c->buffer->in_pos + inputCount + lookaheadCount > c->buffer->in_length ||
524		inputCount + lookaheadCount > c->context_length))
525    return false;
526
527  hb_apply_context_t new_context = *c;
528  return match_backtrack (c,
529			  backtrackCount, backtrack,
530			  lookup_context.funcs.match, lookup_context.match_data[0])
531      && match_input (c,
532		      inputCount, input,
533		      lookup_context.funcs.match, lookup_context.match_data[1],
534		      &new_context.context_length)
535      && match_lookahead (c,
536			  lookaheadCount, lookahead,
537			  lookup_context.funcs.match, lookup_context.match_data[2],
538			  new_context.context_length)
539      && apply_lookup (&new_context,
540		       inputCount,
541		       lookupCount, lookupRecord,
542		       lookup_context.funcs.apply);
543}
544
545struct ChainRule
546{
547  friend struct ChainRuleSet;
548
549  private:
550  inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
551  {
552    TRACE_APPLY ();
553    const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
554    const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
555    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
556    return chain_context_lookup (c,
557				 backtrack.len, backtrack.array,
558				 input.len, input.array,
559				 lookahead.len, lookahead.array,
560				 lookup.len, lookup.array,
561				 lookup_context);
562    return false;
563  }
564
565  public:
566  inline bool sanitize (hb_sanitize_context_t *c) {
567    TRACE_SANITIZE ();
568    if (!backtrack.sanitize (c)) return false;
569    HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
570    if (!input.sanitize (c)) return false;
571    ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
572    if (!lookahead.sanitize (c)) return false;
573    ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
574    return lookup.sanitize (c);
575  }
576
577  private:
578  ArrayOf<USHORT>
579		backtrack;		/* Array of backtracking values
580					 * (to be matched before the input
581					 * sequence) */
582  HeadlessArrayOf<USHORT>
583		inputX;			/* Array of input values (start with
584					 * second glyph) */
585  ArrayOf<USHORT>
586		lookaheadX;		/* Array of lookahead values's (to be
587					 * matched after the input sequence) */
588  ArrayOf<LookupRecord>
589		lookupX;		/* Array of LookupRecords--in
590					 * design order) */
591  public:
592  DEFINE_SIZE_MIN (8);
593};
594
595struct ChainRuleSet
596{
597  inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
598  {
599    TRACE_APPLY ();
600    unsigned int num_rules = rule.len;
601    for (unsigned int i = 0; i < num_rules; i++)
602    {
603      if ((this+rule[i]).apply (c, lookup_context))
604        return true;
605    }
606
607    return false;
608  }
609
610  inline bool sanitize (hb_sanitize_context_t *c) {
611    TRACE_SANITIZE ();
612    return rule.sanitize (c, this);
613  }
614
615  private:
616  OffsetArrayOf<ChainRule>
617		rule;			/* Array of ChainRule tables
618					 * ordered by preference */
619  public:
620  DEFINE_SIZE_ARRAY (2, rule);
621};
622
623struct ChainContextFormat1
624{
625  friend struct ChainContext;
626
627  private:
628  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
629  {
630    TRACE_APPLY ();
631    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->in_pos].codepoint);
632    if (likely (index == NOT_COVERED))
633      return false;
634
635    const ChainRuleSet &rule_set = this+ruleSet[index];
636    struct ChainContextLookupContext lookup_context = {
637      {match_glyph, apply_func},
638      {NULL, NULL, NULL}
639    };
640    return rule_set.apply (c, lookup_context);
641  }
642
643  inline bool sanitize (hb_sanitize_context_t *c) {
644    TRACE_SANITIZE ();
645    return coverage.sanitize (c, this)
646	&& ruleSet.sanitize (c, this);
647  }
648
649  private:
650  USHORT	format;			/* Format identifier--format = 1 */
651  OffsetTo<Coverage>
652		coverage;		/* Offset to Coverage table--from
653					 * beginning of table */
654  OffsetArrayOf<ChainRuleSet>
655		ruleSet;		/* Array of ChainRuleSet tables
656					 * ordered by Coverage Index */
657  public:
658  DEFINE_SIZE_ARRAY (6, ruleSet);
659};
660
661struct ChainContextFormat2
662{
663  friend struct ChainContext;
664
665  private:
666  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
667  {
668    TRACE_APPLY ();
669    unsigned int index = (this+coverage) (c->buffer->info[c->buffer->in_pos].codepoint);
670    if (likely (index == NOT_COVERED))
671      return false;
672
673    const ClassDef &backtrack_class_def = this+backtrackClassDef;
674    const ClassDef &input_class_def = this+inputClassDef;
675    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
676
677    index = input_class_def (c->buffer->info[c->buffer->in_pos].codepoint);
678    const ChainRuleSet &rule_set = this+ruleSet[index];
679    /* LONGTERMTODO: Old code fetches glyph classes at most once and caches
680     * them across subrule lookups.  Not sure it's worth it.
681     */
682    struct ChainContextLookupContext lookup_context = {
683      {match_class, apply_func},
684      {&backtrack_class_def,
685       &input_class_def,
686       &lookahead_class_def}
687    };
688    return rule_set.apply (c, lookup_context);
689  }
690
691  inline bool sanitize (hb_sanitize_context_t *c) {
692    TRACE_SANITIZE ();
693    return coverage.sanitize (c, this)
694	&& backtrackClassDef.sanitize (c, this)
695	&& inputClassDef.sanitize (c, this)
696	&& lookaheadClassDef.sanitize (c, this)
697	&& ruleSet.sanitize (c, this);
698  }
699
700  private:
701  USHORT	format;			/* Format identifier--format = 2 */
702  OffsetTo<Coverage>
703		coverage;		/* Offset to Coverage table--from
704					 * beginning of table */
705  OffsetTo<ClassDef>
706		backtrackClassDef;	/* Offset to glyph ClassDef table
707					 * containing backtrack sequence
708					 * data--from beginning of table */
709  OffsetTo<ClassDef>
710		inputClassDef;		/* Offset to glyph ClassDef
711					 * table containing input sequence
712					 * data--from beginning of table */
713  OffsetTo<ClassDef>
714		lookaheadClassDef;	/* Offset to glyph ClassDef table
715					 * containing lookahead sequence
716					 * data--from beginning of table */
717  OffsetArrayOf<ChainRuleSet>
718		ruleSet;		/* Array of ChainRuleSet tables
719					 * ordered by class */
720  public:
721  DEFINE_SIZE_ARRAY (12, ruleSet);
722};
723
724struct ChainContextFormat3
725{
726  friend struct ChainContext;
727
728  private:
729
730  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
731  {
732    TRACE_APPLY ();
733    const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
734
735    unsigned int index = (this+input[0]) (c->buffer->info[c->buffer->in_pos].codepoint);
736    if (likely (index == NOT_COVERED))
737      return false;
738
739    const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
740    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
741    struct ChainContextLookupContext lookup_context = {
742      {match_coverage, apply_func},
743      {this, this, this}
744    };
745    return chain_context_lookup (c,
746				 backtrack.len, (const USHORT *) backtrack.array,
747				 input.len, (const USHORT *) input.array + 1,
748				 lookahead.len, (const USHORT *) lookahead.array,
749				 lookup.len, lookup.array,
750				 lookup_context);
751    return false;
752  }
753
754  inline bool sanitize (hb_sanitize_context_t *c) {
755    TRACE_SANITIZE ();
756    if (!backtrack.sanitize (c, this)) return false;
757    OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
758    if (!input.sanitize (c, this)) return false;
759    OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
760    if (!lookahead.sanitize (c, this)) return false;
761    ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
762    return lookup.sanitize (c);
763  }
764
765  private:
766  USHORT	format;			/* Format identifier--format = 3 */
767  OffsetArrayOf<Coverage>
768		backtrack;		/* Array of coverage tables
769					 * in backtracking sequence, in  glyph
770					 * sequence order */
771  OffsetArrayOf<Coverage>
772		inputX		;	/* Array of coverage
773					 * tables in input sequence, in glyph
774					 * sequence order */
775  OffsetArrayOf<Coverage>
776		lookaheadX;		/* Array of coverage tables
777					 * in lookahead sequence, in glyph
778					 * sequence order */
779  ArrayOf<LookupRecord>
780		lookupX;		/* Array of LookupRecords--in
781					 * design order) */
782  public:
783  DEFINE_SIZE_MIN (10);
784};
785
786struct ChainContext
787{
788  protected:
789  inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
790  {
791    TRACE_APPLY ();
792    switch (u.format) {
793    case 1: return u.format1.apply (c, apply_func);
794    case 2: return u.format2.apply (c, apply_func);
795    case 3: return u.format3.apply (c, apply_func);
796    default:return false;
797    }
798  }
799
800  inline bool sanitize (hb_sanitize_context_t *c) {
801    TRACE_SANITIZE ();
802    if (!u.format.sanitize (c)) return false;
803    switch (u.format) {
804    case 1: return u.format1.sanitize (c);
805    case 2: return u.format2.sanitize (c);
806    case 3: return u.format3.sanitize (c);
807    default:return true;
808    }
809  }
810
811  private:
812  union {
813  USHORT		format;	/* Format identifier */
814  ChainContextFormat1	format1;
815  ChainContextFormat2	format2;
816  ChainContextFormat3	format3;
817  } u;
818};
819
820
821struct ExtensionFormat1
822{
823  friend struct Extension;
824
825  protected:
826  inline unsigned int get_type (void) const { return extensionLookupType; }
827  inline unsigned int get_offset (void) const { return extensionOffset; }
828
829  inline bool sanitize (hb_sanitize_context_t *c) {
830    TRACE_SANITIZE ();
831    return c->check_struct (this);
832  }
833
834  private:
835  USHORT	format;			/* Format identifier. Set to 1. */
836  USHORT	extensionLookupType;	/* Lookup type of subtable referenced
837					 * by ExtensionOffset (i.e. the
838					 * extension subtable). */
839  ULONG		extensionOffset;	/* Offset to the extension subtable,
840					 * of lookup type subtable. */
841  public:
842  DEFINE_SIZE_STATIC (8);
843};
844
845struct Extension
846{
847  inline unsigned int get_type (void) const
848  {
849    switch (u.format) {
850    case 1: return u.format1.get_type ();
851    default:return 0;
852    }
853  }
854  inline unsigned int get_offset (void) const
855  {
856    switch (u.format) {
857    case 1: return u.format1.get_offset ();
858    default:return 0;
859    }
860  }
861
862  inline bool sanitize (hb_sanitize_context_t *c) {
863    TRACE_SANITIZE ();
864    if (!u.format.sanitize (c)) return false;
865    switch (u.format) {
866    case 1: return u.format1.sanitize (c);
867    default:return true;
868    }
869  }
870
871  private:
872  union {
873  USHORT		format;		/* Format identifier */
874  ExtensionFormat1	format1;
875  } u;
876};
877
878
879/*
880 * GSUB/GPOS Common
881 */
882
883struct GSUBGPOS
884{
885  static const hb_tag_t GSUBTag	= HB_OT_TAG_GSUB;
886  static const hb_tag_t GPOSTag	= HB_OT_TAG_GPOS;
887
888  inline unsigned int get_script_count (void) const
889  { return (this+scriptList).len; }
890  inline const Tag& get_script_tag (unsigned int i) const
891  { return (this+scriptList).get_tag (i); }
892  inline unsigned int get_script_tags (unsigned int start_offset,
893				       unsigned int *script_count /* IN/OUT */,
894				       hb_tag_t     *script_tags /* OUT */) const
895  { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
896  inline const Script& get_script (unsigned int i) const
897  { return (this+scriptList)[i]; }
898  inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
899  { return (this+scriptList).find_index (tag, index); }
900
901  inline unsigned int get_feature_count (void) const
902  { return (this+featureList).len; }
903  inline const Tag& get_feature_tag (unsigned int i) const
904  { return (this+featureList).get_tag (i); }
905  inline unsigned int get_feature_tags (unsigned int start_offset,
906					unsigned int *feature_count /* IN/OUT */,
907					hb_tag_t     *feature_tags /* OUT */) const
908  { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
909  inline const Feature& get_feature (unsigned int i) const
910  { return (this+featureList)[i]; }
911  inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
912  { return (this+featureList).find_index (tag, index); }
913
914  inline unsigned int get_lookup_count (void) const
915  { return (this+lookupList).len; }
916  inline const Lookup& get_lookup (unsigned int i) const
917  { return (this+lookupList)[i]; }
918
919  inline bool sanitize (hb_sanitize_context_t *c) {
920    TRACE_SANITIZE ();
921    return version.sanitize (c) && likely (version.major == 1)
922	&& scriptList.sanitize (c, this)
923	&& featureList.sanitize (c, this)
924	&& lookupList.sanitize (c, this);
925  }
926
927  protected:
928  FixedVersion	version;	/* Version of the GSUB/GPOS table--initially set
929				 * to 0x00010000 */
930  OffsetTo<ScriptList>
931		scriptList;  	/* ScriptList table */
932  OffsetTo<FeatureList>
933		featureList; 	/* FeatureList table */
934  OffsetTo<LookupList>
935		lookupList; 	/* LookupList table */
936  public:
937  DEFINE_SIZE_STATIC (10);
938};
939
940
941#endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */
942