hb-ot-layout-gsubgpos-private.hh revision ed2e13594479c6ed7909401509962ea2f03f9a6e
1/*
2 * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3 * Copyright © 2010,2012  Google, Inc.
4 *
5 *  This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29#ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30#define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
31
32#include "hb-buffer-private.hh"
33#include "hb-ot-layout-gdef-table.hh"
34#include "hb-set-private.hh"
35
36
37namespace OT {
38
39
40
41#define TRACE_PROCESS(this) \
42	hb_auto_trace_t<context_t::max_debug_depth, typename context_t::return_t> trace \
43	(&c->debug_depth, c->get_name (), this, HB_FUNC, \
44	 "");
45
46
47#ifndef HB_DEBUG_CLOSURE
48#define HB_DEBUG_CLOSURE (HB_DEBUG+0)
49#endif
50
51#define TRACE_CLOSURE(this) \
52	hb_auto_trace_t<HB_DEBUG_CLOSURE, void_t> trace \
53	(&c->debug_depth, "CLOSURE", this, HB_FUNC, \
54	 "");
55
56struct hb_closure_context_t
57{
58  inline const char *get_name (void) { return "CLOSURE"; }
59  static const unsigned int max_debug_depth = HB_DEBUG_CLOSURE;
60  typedef void_t return_t;
61  typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
62  template <typename T>
63  inline return_t process (const T &obj) { obj.closure (this); return VOID; }
64  static return_t default_return_value (void) { return VOID; }
65  bool stop_sublookup_iteration (const return_t r) const { return false; }
66  return_t recurse (unsigned int lookup_index)
67  {
68    if (unlikely (nesting_level_left == 0))
69      return default_return_value ();
70
71    nesting_level_left--;
72    recurse_func (this, lookup_index);
73    nesting_level_left++;
74    return default_return_value ();
75  }
76
77  hb_face_t *face;
78  hb_set_t *glyphs;
79  recurse_func_t recurse_func;
80  unsigned int nesting_level_left;
81  unsigned int debug_depth;
82
83  hb_closure_context_t (hb_face_t *face_,
84			hb_set_t *glyphs_,
85			recurse_func_t recurse_func_,
86		        unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
87			  face (face_),
88			  glyphs (glyphs_),
89			  recurse_func (recurse_func_),
90			  nesting_level_left (nesting_level_left_),
91			  debug_depth (0) {}
92};
93
94
95
96#ifndef HB_DEBUG_WOULD_APPLY
97#define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0)
98#endif
99
100#define TRACE_WOULD_APPLY(this) \
101	hb_auto_trace_t<HB_DEBUG_WOULD_APPLY, bool> trace \
102	(&c->debug_depth, "WOULD_APPLY", this, HB_FUNC, \
103	 "%d glyphs", c->len);
104
105struct hb_would_apply_context_t
106{
107  inline const char *get_name (void) { return "WOULD_APPLY"; }
108  static const unsigned int max_debug_depth = HB_DEBUG_WOULD_APPLY;
109  typedef bool return_t;
110  template <typename T>
111  inline return_t process (const T &obj) { return obj.would_apply (this); }
112  static return_t default_return_value (void) { return false; }
113  bool stop_sublookup_iteration (const return_t r) const { return r; }
114  return_t recurse (unsigned int lookup_index) { return true; }
115
116  hb_face_t *face;
117  const hb_codepoint_t *glyphs;
118  unsigned int len;
119  bool zero_context;
120  unsigned int debug_depth;
121
122  hb_would_apply_context_t (hb_face_t *face_,
123			    const hb_codepoint_t *glyphs_,
124			    unsigned int len_,
125			    bool zero_context_) :
126			      face (face_),
127			      glyphs (glyphs_),
128			      len (len_),
129			      zero_context (zero_context_),
130			      debug_depth (0) {}
131};
132
133
134
135#ifndef HB_DEBUG_COLLECT_GLYPHS
136#define HB_DEBUG_COLLECT_GLYPHS (HB_DEBUG+0)
137#endif
138
139#define TRACE_COLLECT_GLYPHS(this) \
140	hb_auto_trace_t<HB_DEBUG_COLLECT_GLYPHS, void_t> trace \
141	(&c->debug_depth, "COLLECT_GLYPHS", this, HB_FUNC, \
142	 "");
143
144struct hb_collect_glyphs_context_t
145{
146  inline const char *get_name (void) { return "COLLECT_GLYPHS"; }
147  static const unsigned int max_debug_depth = HB_DEBUG_COLLECT_GLYPHS;
148  typedef void_t return_t;
149  template <typename T>
150  inline return_t process (const T &obj) { obj.collect_glyphs (this); return VOID; }
151  static return_t default_return_value (void) { return VOID; }
152  bool stop_iteration (const return_t r) const { return false; }
153  return_t recurse (unsigned int lookup_index)
154  {
155#if 0
156    /* XXX */
157#endif
158    return default_return_value ();
159  }
160
161  hb_face_t *face;
162  hb_set_t &before;
163  hb_set_t &input;
164  hb_set_t &after;
165  hb_set_t &output;
166  unsigned int debug_depth;
167
168  hb_collect_glyphs_context_t (hb_face_t *face_,
169			       hb_set_t  *glyphs_before, /* OUT. May be NULL */
170			       hb_set_t  *glyphs_input,  /* OUT. May be NULL */
171			       hb_set_t  *glyphs_after,  /* OUT. May be NULL */
172			       hb_set_t  *glyphs_output  /* OUT. May be NULL */) :
173			      face (face_),
174			      before (glyphs_before ? *glyphs_before : *hb_set_get_empty ()),
175			      input  (glyphs_input  ? *glyphs_input  : *hb_set_get_empty ()),
176			      after  (glyphs_after  ? *glyphs_after  : *hb_set_get_empty ()),
177			      output (glyphs_output ? *glyphs_output : *hb_set_get_empty ()),
178			      debug_depth (0) {}
179};
180
181
182
183struct hb_get_coverage_context_t
184{
185  inline const char *get_name (void) { return "GET_COVERAGE"; }
186  static const unsigned int max_debug_depth = 0;
187  typedef const Coverage &return_t;
188  template <typename T>
189  inline return_t process (const T &obj) { return obj.get_coverage (); }
190  static return_t default_return_value (void) { return Null(Coverage); }
191  bool stop_sublookup_iteration (const return_t r) const { return true; /* Unused */ }
192  return_t recurse (unsigned int lookup_index)
193  { return default_return_value (); }
194
195  hb_get_coverage_context_t (void) :
196			    debug_depth (0) {}
197
198  unsigned int debug_depth;
199};
200
201
202
203#ifndef HB_DEBUG_APPLY
204#define HB_DEBUG_APPLY (HB_DEBUG+0)
205#endif
206
207#define TRACE_APPLY(this) \
208	hb_auto_trace_t<HB_DEBUG_APPLY, bool> trace \
209	(&c->debug_depth, "APPLY", this, HB_FUNC, \
210	 "idx %d codepoint %u", c->buffer->idx, c->buffer->cur().codepoint);
211
212struct hb_apply_context_t
213{
214  inline const char *get_name (void) { return "APPLY"; }
215  static const unsigned int max_debug_depth = HB_DEBUG_APPLY;
216  typedef bool return_t;
217  typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
218  template <typename T>
219  inline return_t process (const T &obj) { return obj.apply (this); }
220  static return_t default_return_value (void) { return false; }
221  bool stop_sublookup_iteration (const return_t r) const { return r; }
222  return_t recurse (unsigned int lookup_index)
223  {
224    if (unlikely (nesting_level_left == 0 || !recurse_func))
225      return default_return_value ();
226
227    /* TODO Reuse context. */
228    hb_apply_context_t new_c (*this);
229    new_c.nesting_level_left--;
230    return recurse_func (&new_c, lookup_index);
231  }
232
233  hb_font_t *font;
234  hb_face_t *face;
235  hb_buffer_t *buffer;
236  hb_direction_t direction;
237  hb_mask_t lookup_mask;
238  recurse_func_t recurse_func;
239  unsigned int nesting_level_left;
240  unsigned int lookup_props;
241  unsigned int property; /* propety of first glyph */
242  const GDEF &gdef;
243  bool has_glyph_classes;
244  unsigned int debug_depth;
245
246
247  hb_apply_context_t (hb_font_t *font_,
248		      hb_buffer_t *buffer_,
249		      hb_mask_t lookup_mask_) :
250			font (font_), face (font->face), buffer (buffer_),
251			direction (buffer_->props.direction),
252			lookup_mask (lookup_mask_),
253			recurse_func (NULL),
254			nesting_level_left (MAX_NESTING_LEVEL),
255			lookup_props (0), property (0),
256			gdef (*hb_ot_layout_from_face (face)->gdef),
257			has_glyph_classes (gdef.has_glyph_classes ()),
258			debug_depth (0) {}
259
260  void set_recurse_func (recurse_func_t func) { recurse_func = func; }
261  void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
262  void set_lookup (const Lookup &l) { lookup_props = l.get_props (); }
263
264  struct mark_skipping_forward_iterator_t
265  {
266    inline mark_skipping_forward_iterator_t (hb_apply_context_t *c_,
267					     unsigned int start_index_,
268					     unsigned int num_items_,
269					     bool context_match = false)
270    {
271      c = c_;
272      idx = start_index_;
273      num_items = num_items_;
274      mask = context_match ? -1 : c->lookup_mask;
275      syllable = context_match ? 0 : c->buffer->cur().syllable ();
276      end = c->buffer->len;
277    }
278    inline bool has_no_chance (void) const
279    {
280      return unlikely (num_items && idx + num_items >= end);
281    }
282    inline void reject (void)
283    {
284      num_items++;
285    }
286    inline bool next (unsigned int *property_out,
287		      unsigned int  lookup_props)
288    {
289      assert (num_items > 0);
290      do
291      {
292	if (has_no_chance ())
293	  return false;
294	idx++;
295      } while (c->should_skip_mark (&c->buffer->info[idx], lookup_props, property_out));
296      num_items--;
297      return (c->buffer->info[idx].mask & mask) && (!syllable || syllable == c->buffer->info[idx].syllable ());
298    }
299    inline bool next (unsigned int *property_out = NULL)
300    {
301      return next (property_out, c->lookup_props);
302    }
303
304    unsigned int idx;
305    protected:
306    hb_apply_context_t *c;
307    unsigned int num_items;
308    hb_mask_t mask;
309    uint8_t syllable;
310    unsigned int end;
311  };
312
313  struct mark_skipping_backward_iterator_t
314  {
315    inline mark_skipping_backward_iterator_t (hb_apply_context_t *c_,
316					      unsigned int start_index_,
317					      unsigned int num_items_,
318					      hb_mask_t mask_ = 0,
319					      bool match_syllable_ = true)
320    {
321      c = c_;
322      idx = start_index_;
323      num_items = num_items_;
324      mask = mask_ ? mask_ : c->lookup_mask;
325      syllable = match_syllable_ ? c->buffer->cur().syllable () : 0;
326    }
327    inline bool has_no_chance (void) const
328    {
329      return unlikely (idx < num_items);
330    }
331    inline void reject (void)
332    {
333      num_items++;
334    }
335    inline bool prev (unsigned int *property_out,
336		      unsigned int  lookup_props)
337    {
338      assert (num_items > 0);
339      do
340      {
341	if (has_no_chance ())
342	  return false;
343	idx--;
344      } while (c->should_skip_mark (&c->buffer->out_info[idx], lookup_props, property_out));
345      num_items--;
346      return (c->buffer->out_info[idx].mask & mask) && (!syllable || syllable == c->buffer->out_info[idx].syllable ());
347    }
348    inline bool prev (unsigned int *property_out = NULL)
349    {
350      return prev (property_out, c->lookup_props);
351    }
352
353    unsigned int idx;
354    protected:
355    hb_apply_context_t *c;
356    unsigned int num_items;
357    hb_mask_t mask;
358    uint8_t syllable;
359  };
360
361  inline bool
362  match_properties_mark (hb_codepoint_t  glyph,
363			 unsigned int    glyph_props,
364			 unsigned int    lookup_props) const
365  {
366    /* If using mark filtering sets, the high short of
367     * lookup_props has the set index.
368     */
369    if (lookup_props & LookupFlag::UseMarkFilteringSet)
370      return gdef.mark_set_covers (lookup_props >> 16, glyph);
371
372    /* The second byte of lookup_props has the meaning
373     * "ignore marks of attachment type different than
374     * the attachment type specified."
375     */
376    if (lookup_props & LookupFlag::MarkAttachmentType)
377      return (lookup_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
378
379    return true;
380  }
381
382  inline bool
383  match_properties (hb_codepoint_t  glyph,
384		    unsigned int    glyph_props,
385		    unsigned int    lookup_props) const
386  {
387    /* Not covered, if, for example, glyph class is ligature and
388     * lookup_props includes LookupFlags::IgnoreLigatures
389     */
390    if (glyph_props & lookup_props & LookupFlag::IgnoreFlags)
391      return false;
392
393    if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
394      return match_properties_mark (glyph, glyph_props, lookup_props);
395
396    return true;
397  }
398
399  inline bool
400  check_glyph_property (hb_glyph_info_t *info,
401			unsigned int  lookup_props,
402			unsigned int *property_out) const
403  {
404    unsigned int property;
405
406    property = info->glyph_props();
407    *property_out = property;
408
409    return match_properties (info->codepoint, property, lookup_props);
410  }
411
412  inline bool
413  should_skip_mark (hb_glyph_info_t *info,
414		   unsigned int  lookup_props,
415		   unsigned int *property_out) const
416  {
417    unsigned int property;
418
419    property = info->glyph_props();
420    if (property_out)
421      *property_out = property;
422
423    /* If it's a mark, skip it if we don't accept it. */
424    if (unlikely (property & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
425      return !match_properties (info->codepoint, property, lookup_props);
426
427    /* If not a mark, don't skip. */
428    return false;
429  }
430
431
432  inline bool should_mark_skip_current_glyph (void) const
433  {
434    return should_skip_mark (&buffer->cur(), lookup_props, NULL);
435  }
436
437  inline void set_class (hb_codepoint_t glyph_index, unsigned int class_guess) const
438  {
439    if (likely (has_glyph_classes))
440      buffer->cur().glyph_props() = gdef.get_glyph_props (glyph_index);
441    else if (class_guess)
442      buffer->cur().glyph_props() = class_guess;
443  }
444
445  inline void output_glyph (hb_codepoint_t glyph_index,
446			    unsigned int class_guess = 0) const
447  {
448    set_class (glyph_index, class_guess);
449    buffer->output_glyph (glyph_index);
450  }
451  inline void replace_glyph (hb_codepoint_t glyph_index,
452			     unsigned int class_guess = 0) const
453  {
454    set_class (glyph_index, class_guess);
455    buffer->replace_glyph (glyph_index);
456  }
457  inline void replace_glyph_inplace (hb_codepoint_t glyph_index,
458				     unsigned int class_guess = 0) const
459  {
460    set_class (glyph_index, class_guess);
461    buffer->cur().codepoint = glyph_index;
462  }
463};
464
465
466
467typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
468typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
469
470struct ContextClosureFuncs
471{
472  intersects_func_t intersects;
473};
474struct ContextApplyFuncs
475{
476  match_func_t match;
477};
478
479static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
480{
481  return glyphs->has (value);
482}
483static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data)
484{
485  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
486  return class_def.intersects_class (glyphs, value);
487}
488static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
489{
490  const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
491  return (data+coverage).intersects (glyphs);
492}
493
494static inline bool intersects_array (hb_closure_context_t *c,
495				     unsigned int count,
496				     const USHORT values[],
497				     intersects_func_t intersects_func,
498				     const void *intersects_data)
499{
500  for (unsigned int i = 0; i < count; i++)
501    if (likely (!intersects_func (c->glyphs, values[i], intersects_data)))
502      return false;
503  return true;
504}
505
506
507static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
508{
509  return glyph_id == value;
510}
511static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
512{
513  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
514  return class_def.get_class (glyph_id) == value;
515}
516static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
517{
518  const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
519  return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
520}
521
522
523static inline bool would_match_input (hb_would_apply_context_t *c,
524				      unsigned int count, /* Including the first glyph (not matched) */
525				      const USHORT input[], /* Array of input values--start with second glyph */
526				      match_func_t match_func,
527				      const void *match_data)
528{
529  if (count != c->len)
530    return false;
531
532  for (unsigned int i = 1; i < count; i++)
533    if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
534      return false;
535
536  return true;
537}
538static inline bool match_input (hb_apply_context_t *c,
539				unsigned int count, /* Including the first glyph (not matched) */
540				const USHORT input[], /* Array of input values--start with second glyph */
541				match_func_t match_func,
542				const void *match_data,
543				unsigned int *end_offset = NULL,
544				bool *p_is_mark_ligature = NULL,
545				unsigned int *p_total_component_count = NULL)
546{
547  TRACE_APPLY (NULL);
548
549  hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1);
550  if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false);
551
552  /*
553   * This is perhaps the trickiest part of OpenType...  Remarks:
554   *
555   * - If all components of the ligature were marks, we call this a mark ligature.
556   *
557   * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
558   *   it as a ligature glyph.
559   *
560   * - Ligatures cannot be formed across glyphs attached to different components
561   *   of previous ligatures.  Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
562   *   LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
563   *   However, it would be wrong to ligate that SHADDA,FATHA sequence.o
564   *   There is an exception to this: If a ligature tries ligating with marks that
565   *   belong to it itself, go ahead, assuming that the font designer knows what
566   *   they are doing (otherwise it can break Indic stuff when a matra wants to
567   *   ligate with a conjunct...)
568   */
569
570  bool is_mark_ligature = !!(c->property & HB_OT_LAYOUT_GLYPH_PROPS_MARK);
571
572  unsigned int total_component_count = 0;
573  total_component_count += get_lig_num_comps (c->buffer->cur());
574
575  unsigned int first_lig_id = get_lig_id (c->buffer->cur());
576  unsigned int first_lig_comp = get_lig_comp (c->buffer->cur());
577
578  for (unsigned int i = 1; i < count; i++)
579  {
580    unsigned int property;
581
582    if (!skippy_iter.next (&property)) return TRACE_RETURN (false);
583
584    if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, input[i - 1], match_data))) return TRACE_RETURN (false);
585
586    unsigned int this_lig_id = get_lig_id (c->buffer->info[skippy_iter.idx]);
587    unsigned int this_lig_comp = get_lig_comp (c->buffer->info[skippy_iter.idx]);
588
589    if (first_lig_id && first_lig_comp) {
590      /* If first component was attached to a previous ligature component,
591       * all subsequent components should be attached to the same ligature
592       * component, otherwise we shouldn't ligate them. */
593      if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
594	return TRACE_RETURN (false);
595    } else {
596      /* If first component was NOT attached to a previous ligature component,
597       * all subsequent components should also NOT be attached to any ligature
598       * component, unless they are attached to the first component itself! */
599      if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
600	return TRACE_RETURN (false);
601    }
602
603    is_mark_ligature = is_mark_ligature && (property & HB_OT_LAYOUT_GLYPH_PROPS_MARK);
604    total_component_count += get_lig_num_comps (c->buffer->info[skippy_iter.idx]);
605  }
606
607  if (end_offset)
608    *end_offset = skippy_iter.idx - c->buffer->idx + 1;
609
610  if (p_is_mark_ligature)
611    *p_is_mark_ligature = is_mark_ligature;
612
613  if (p_total_component_count)
614    *p_total_component_count = total_component_count;
615
616  return TRACE_RETURN (true);
617}
618static inline void ligate_input (hb_apply_context_t *c,
619				 unsigned int count, /* Including the first glyph (not matched) */
620				 const USHORT input[], /* Array of input values--start with second glyph */
621				 hb_codepoint_t lig_glyph,
622				 match_func_t match_func,
623				 const void *match_data,
624				 bool is_mark_ligature,
625				 unsigned int total_component_count)
626{
627  /*
628   * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave
629   *   the ligature to keep its old ligature id.  This will allow it to attach to
630   *   a base ligature in GPOS.  Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
631   *   and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a
632   *   ligature id and component value of 2.  Then if SHADDA,FATHA form a ligature
633   *   later, we don't want them to lose their ligature id/component, otherwise
634   *   GPOS will fail to correctly position the mark ligature on top of the
635   *   LAM,LAM,HEH ligature.  See:
636   *     https://bugzilla.gnome.org/show_bug.cgi?id=676343
637   *
638   * - If a ligature is formed of components that some of which are also ligatures
639   *   themselves, and those ligature components had marks attached to *their*
640   *   components, we have to attach the marks to the new ligature component
641   *   positions!  Now *that*'s tricky!  And these marks may be following the
642   *   last component of the whole sequence, so we should loop forward looking
643   *   for them and update them.
644   *
645   *   Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
646   *   'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
647   *   id and component == 1.  Now, during 'liga', the LAM and the LAM-HEH ligature
648   *   form a LAM-LAM-HEH ligature.  We need to reassign the SHADDA and FATHA to
649   *   the new ligature with a component value of 2.
650   *
651   *   This in fact happened to a font...  See:
652   *   https://bugzilla.gnome.org/show_bug.cgi?id=437633
653   */
654
655  unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE;
656  unsigned int lig_id = is_mark_ligature ? 0 : allocate_lig_id (c->buffer);
657  unsigned int last_lig_id = get_lig_id (c->buffer->cur());
658  unsigned int last_num_components = get_lig_num_comps (c->buffer->cur());
659  unsigned int components_so_far = last_num_components;
660
661  if (!is_mark_ligature)
662    set_lig_props_for_ligature (c->buffer->cur(), lig_id, total_component_count);
663  c->replace_glyph (lig_glyph, klass);
664
665  for (unsigned int i = 1; i < count; i++)
666  {
667    while (c->should_mark_skip_current_glyph ())
668    {
669      if (!is_mark_ligature) {
670	unsigned int new_lig_comp = components_so_far - last_num_components +
671				    MIN (MAX (get_lig_comp (c->buffer->cur()), 1u), last_num_components);
672	set_lig_props_for_mark (c->buffer->cur(), lig_id, new_lig_comp);
673      }
674      c->buffer->next_glyph ();
675    }
676
677    last_lig_id = get_lig_id (c->buffer->cur());
678    last_num_components = get_lig_num_comps (c->buffer->cur());
679    components_so_far += last_num_components;
680
681    /* Skip the base glyph */
682    c->buffer->idx++;
683  }
684
685  if (!is_mark_ligature && last_lig_id) {
686    /* Re-adjust components for any marks following. */
687    for (unsigned int i = c->buffer->idx; i < c->buffer->len; i++) {
688      if (last_lig_id == get_lig_id (c->buffer->info[i])) {
689	unsigned int new_lig_comp = components_so_far - last_num_components +
690				    MIN (MAX (get_lig_comp (c->buffer->info[i]), 1u), last_num_components);
691	set_lig_props_for_mark (c->buffer->info[i], lig_id, new_lig_comp);
692      } else
693	break;
694    }
695  }
696}
697
698static inline bool match_backtrack (hb_apply_context_t *c,
699				    unsigned int count,
700				    const USHORT backtrack[],
701				    match_func_t match_func,
702				    const void *match_data)
703{
704  TRACE_APPLY (NULL);
705
706  hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count, true);
707  if (skippy_iter.has_no_chance ())
708    return TRACE_RETURN (false);
709
710  for (unsigned int i = 0; i < count; i++)
711  {
712    if (!skippy_iter.prev ())
713      return TRACE_RETURN (false);
714
715    if (likely (!match_func (c->buffer->out_info[skippy_iter.idx].codepoint, backtrack[i], match_data)))
716      return TRACE_RETURN (false);
717  }
718
719  return TRACE_RETURN (true);
720}
721
722static inline bool match_lookahead (hb_apply_context_t *c,
723				    unsigned int count,
724				    const USHORT lookahead[],
725				    match_func_t match_func,
726				    const void *match_data,
727				    unsigned int offset)
728{
729  TRACE_APPLY (NULL);
730
731  hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count, true);
732  if (skippy_iter.has_no_chance ())
733    return TRACE_RETURN (false);
734
735  for (unsigned int i = 0; i < count; i++)
736  {
737    if (!skippy_iter.next ())
738      return TRACE_RETURN (false);
739
740    if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, lookahead[i], match_data)))
741      return TRACE_RETURN (false);
742  }
743
744  return TRACE_RETURN (true);
745}
746
747
748
749struct LookupRecord
750{
751  inline bool sanitize (hb_sanitize_context_t *c) {
752    TRACE_SANITIZE (this);
753    return TRACE_RETURN (c->check_struct (this));
754  }
755
756  USHORT	sequenceIndex;		/* Index into current glyph
757					 * sequence--first glyph = 0 */
758  USHORT	lookupListIndex;	/* Lookup to apply to that
759					 * position--zero--based */
760  public:
761  DEFINE_SIZE_STATIC (4);
762};
763
764
765static inline void closure_lookup (hb_closure_context_t *c,
766				   unsigned int lookupCount,
767				   const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
768{
769  for (unsigned int i = 0; i < lookupCount; i++)
770    c->recurse (lookupRecord->lookupListIndex);
771}
772
773static inline bool apply_lookup (hb_apply_context_t *c,
774				 unsigned int count, /* Including the first glyph */
775				 unsigned int lookupCount,
776				 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
777{
778  TRACE_APPLY (NULL);
779
780  unsigned int end = c->buffer->len;
781  if (unlikely (count == 0 || c->buffer->idx + count > end))
782    return TRACE_RETURN (false);
783
784  /* TODO We don't support lookupRecord arrays that are not increasing:
785   *      Should be easy for in_place ones at least. */
786
787  /* Note: If sublookup is reverse, it will underflow after the first loop
788   * and we jump out of it.  Not entirely disastrous.  So we don't check
789   * for reverse lookup here.
790   */
791  for (unsigned int i = 0; i < count; /* NOP */)
792  {
793    if (unlikely (c->buffer->idx == end))
794      return TRACE_RETURN (true);
795    while (c->should_mark_skip_current_glyph ())
796    {
797      /* No lookup applied for this index */
798      c->buffer->next_glyph ();
799      if (unlikely (c->buffer->idx == end))
800	return TRACE_RETURN (true);
801    }
802
803    if (lookupCount && i == lookupRecord->sequenceIndex)
804    {
805      unsigned int old_pos = c->buffer->idx;
806
807      /* Apply a lookup */
808      bool done = c->recurse (lookupRecord->lookupListIndex);
809
810      lookupRecord++;
811      lookupCount--;
812      /* Err, this is wrong if the lookup jumped over some glyphs */
813      i += c->buffer->idx - old_pos;
814      if (unlikely (c->buffer->idx == end))
815	return TRACE_RETURN (true);
816
817      if (!done)
818	goto not_applied;
819    }
820    else
821    {
822    not_applied:
823      /* No lookup applied for this index */
824      c->buffer->next_glyph ();
825      i++;
826    }
827  }
828
829  return TRACE_RETURN (true);
830}
831
832
833
834/* Contextual lookups */
835
836struct ContextClosureLookupContext
837{
838  ContextClosureFuncs funcs;
839  const void *intersects_data;
840};
841
842struct ContextApplyLookupContext
843{
844  ContextApplyFuncs funcs;
845  const void *match_data;
846};
847
848static inline void context_closure_lookup (hb_closure_context_t *c,
849					   unsigned int inputCount, /* Including the first glyph (not matched) */
850					   const USHORT input[], /* Array of input values--start with second glyph */
851					   unsigned int lookupCount,
852					   const LookupRecord lookupRecord[],
853					   ContextClosureLookupContext &lookup_context)
854{
855  if (intersects_array (c,
856			inputCount ? inputCount - 1 : 0, input,
857			lookup_context.funcs.intersects, lookup_context.intersects_data))
858    closure_lookup (c,
859		    lookupCount, lookupRecord);
860}
861
862
863static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
864					       unsigned int inputCount, /* Including the first glyph (not matched) */
865					       const USHORT input[], /* Array of input values--start with second glyph */
866					       unsigned int lookupCount,
867					       const LookupRecord lookupRecord[],
868					       ContextApplyLookupContext &lookup_context)
869{
870  return would_match_input (c,
871			    inputCount, input,
872			    lookup_context.funcs.match, lookup_context.match_data);
873}
874static inline bool context_apply_lookup (hb_apply_context_t *c,
875					 unsigned int inputCount, /* Including the first glyph (not matched) */
876					 const USHORT input[], /* Array of input values--start with second glyph */
877					 unsigned int lookupCount,
878					 const LookupRecord lookupRecord[],
879					 ContextApplyLookupContext &lookup_context)
880{
881  return match_input (c,
882		      inputCount, input,
883		      lookup_context.funcs.match, lookup_context.match_data)
884      && apply_lookup (c,
885		       inputCount,
886		       lookupCount, lookupRecord);
887}
888
889struct Rule
890{
891  inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
892  {
893    TRACE_CLOSURE (this);
894    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
895    context_closure_lookup (c,
896			    inputCount, input,
897			    lookupCount, lookupRecord,
898			    lookup_context);
899  }
900
901  inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
902  {
903    TRACE_WOULD_APPLY (this);
904    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
905    return TRACE_RETURN (context_would_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context));
906  }
907
908  inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
909  {
910    TRACE_APPLY (this);
911    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
912    return TRACE_RETURN (context_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context));
913  }
914
915  public:
916  inline bool sanitize (hb_sanitize_context_t *c) {
917    TRACE_SANITIZE (this);
918    return inputCount.sanitize (c)
919	&& lookupCount.sanitize (c)
920	&& c->check_range (input,
921			   input[0].static_size * inputCount
922			   + lookupRecordX[0].static_size * lookupCount);
923  }
924
925  protected:
926  USHORT	inputCount;		/* Total number of glyphs in input
927					 * glyph sequence--includes the first
928					 * glyph */
929  USHORT	lookupCount;		/* Number of LookupRecords */
930  USHORT	input[VAR];		/* Array of match inputs--start with
931					 * second glyph */
932  LookupRecord	lookupRecordX[VAR];	/* Array of LookupRecords--in
933					 * design order */
934  public:
935  DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
936};
937
938struct RuleSet
939{
940  inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
941  {
942    TRACE_CLOSURE (this);
943    unsigned int num_rules = rule.len;
944    for (unsigned int i = 0; i < num_rules; i++)
945      (this+rule[i]).closure (c, lookup_context);
946  }
947
948  inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
949  {
950    TRACE_WOULD_APPLY (this);
951    unsigned int num_rules = rule.len;
952    for (unsigned int i = 0; i < num_rules; i++)
953    {
954      if ((this+rule[i]).would_apply (c, lookup_context))
955        return TRACE_RETURN (true);
956    }
957    return TRACE_RETURN (false);
958  }
959
960  inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
961  {
962    TRACE_APPLY (this);
963    unsigned int num_rules = rule.len;
964    for (unsigned int i = 0; i < num_rules; i++)
965    {
966      if ((this+rule[i]).apply (c, lookup_context))
967        return TRACE_RETURN (true);
968    }
969    return TRACE_RETURN (false);
970  }
971
972  inline bool sanitize (hb_sanitize_context_t *c) {
973    TRACE_SANITIZE (this);
974    return TRACE_RETURN (rule.sanitize (c, this));
975  }
976
977  protected:
978  OffsetArrayOf<Rule>
979		rule;			/* Array of Rule tables
980					 * ordered by preference */
981  public:
982  DEFINE_SIZE_ARRAY (2, rule);
983};
984
985
986struct ContextFormat1
987{
988  inline void closure (hb_closure_context_t *c) const
989  {
990    TRACE_CLOSURE (this);
991
992    const Coverage &cov = (this+coverage);
993
994    struct ContextClosureLookupContext lookup_context = {
995      {intersects_glyph},
996      NULL
997    };
998
999    unsigned int count = ruleSet.len;
1000    for (unsigned int i = 0; i < count; i++)
1001      if (cov.intersects_coverage (c->glyphs, i)) {
1002	const RuleSet &rule_set = this+ruleSet[i];
1003	rule_set.closure (c, lookup_context);
1004      }
1005  }
1006
1007  inline bool would_apply (hb_would_apply_context_t *c) const
1008  {
1009    TRACE_WOULD_APPLY (this);
1010
1011    const RuleSet &rule_set = this+ruleSet[(this+coverage) (c->glyphs[0])];
1012    struct ContextApplyLookupContext lookup_context = {
1013      {match_glyph},
1014      NULL
1015    };
1016    return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1017  }
1018
1019  inline const Coverage &get_coverage (void) const
1020  {
1021    return this+coverage;
1022  }
1023
1024  inline bool apply (hb_apply_context_t *c) const
1025  {
1026    TRACE_APPLY (this);
1027    unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
1028    if (likely (index == NOT_COVERED))
1029      return TRACE_RETURN (false);
1030
1031    const RuleSet &rule_set = this+ruleSet[index];
1032    struct ContextApplyLookupContext lookup_context = {
1033      {match_glyph},
1034      NULL
1035    };
1036    return TRACE_RETURN (rule_set.apply (c, lookup_context));
1037  }
1038
1039  inline bool sanitize (hb_sanitize_context_t *c) {
1040    TRACE_SANITIZE (this);
1041    return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1042  }
1043
1044  protected:
1045  USHORT	format;			/* Format identifier--format = 1 */
1046  OffsetTo<Coverage>
1047		coverage;		/* Offset to Coverage table--from
1048					 * beginning of table */
1049  OffsetArrayOf<RuleSet>
1050		ruleSet;		/* Array of RuleSet tables
1051					 * ordered by Coverage Index */
1052  public:
1053  DEFINE_SIZE_ARRAY (6, ruleSet);
1054};
1055
1056
1057struct ContextFormat2
1058{
1059  inline void closure (hb_closure_context_t *c) const
1060  {
1061    TRACE_CLOSURE (this);
1062    if (!(this+coverage).intersects (c->glyphs))
1063      return;
1064
1065    const ClassDef &class_def = this+classDef;
1066
1067    struct ContextClosureLookupContext lookup_context = {
1068      {intersects_class},
1069      NULL
1070    };
1071
1072    unsigned int count = ruleSet.len;
1073    for (unsigned int i = 0; i < count; i++)
1074      if (class_def.intersects_class (c->glyphs, i)) {
1075	const RuleSet &rule_set = this+ruleSet[i];
1076	rule_set.closure (c, lookup_context);
1077      }
1078  }
1079
1080  inline bool would_apply (hb_would_apply_context_t *c) const
1081  {
1082    TRACE_WOULD_APPLY (this);
1083
1084    const ClassDef &class_def = this+classDef;
1085    unsigned int index = class_def (c->glyphs[0]);
1086    const RuleSet &rule_set = this+ruleSet[index];
1087    struct ContextApplyLookupContext lookup_context = {
1088      {match_class},
1089      &class_def
1090    };
1091    return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1092  }
1093
1094  inline const Coverage &get_coverage (void) const
1095  {
1096    return this+coverage;
1097  }
1098
1099  inline bool apply (hb_apply_context_t *c) const
1100  {
1101    TRACE_APPLY (this);
1102    unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
1103    if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1104
1105    const ClassDef &class_def = this+classDef;
1106    index = class_def (c->buffer->cur().codepoint);
1107    const RuleSet &rule_set = this+ruleSet[index];
1108    struct ContextApplyLookupContext lookup_context = {
1109      {match_class},
1110      &class_def
1111    };
1112    return TRACE_RETURN (rule_set.apply (c, lookup_context));
1113  }
1114
1115  inline bool sanitize (hb_sanitize_context_t *c) {
1116    TRACE_SANITIZE (this);
1117    return TRACE_RETURN (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
1118  }
1119
1120  protected:
1121  USHORT	format;			/* Format identifier--format = 2 */
1122  OffsetTo<Coverage>
1123		coverage;		/* Offset to Coverage table--from
1124					 * beginning of table */
1125  OffsetTo<ClassDef>
1126		classDef;		/* Offset to glyph ClassDef table--from
1127					 * beginning of table */
1128  OffsetArrayOf<RuleSet>
1129		ruleSet;		/* Array of RuleSet tables
1130					 * ordered by class */
1131  public:
1132  DEFINE_SIZE_ARRAY (8, ruleSet);
1133};
1134
1135
1136struct ContextFormat3
1137{
1138  inline void closure (hb_closure_context_t *c) const
1139  {
1140    TRACE_CLOSURE (this);
1141    if (!(this+coverage[0]).intersects (c->glyphs))
1142      return;
1143
1144    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
1145    struct ContextClosureLookupContext lookup_context = {
1146      {intersects_coverage},
1147      this
1148    };
1149    context_closure_lookup (c,
1150			    glyphCount, (const USHORT *) (coverage + 1),
1151			    lookupCount, lookupRecord,
1152			    lookup_context);
1153  }
1154
1155  inline bool would_apply (hb_would_apply_context_t *c) const
1156  {
1157    TRACE_WOULD_APPLY (this);
1158
1159    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
1160    struct ContextApplyLookupContext lookup_context = {
1161      {match_coverage},
1162      this
1163    };
1164    return TRACE_RETURN (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context));
1165  }
1166
1167  inline const Coverage &get_coverage (void) const
1168  {
1169    return this+coverage[0];
1170  }
1171
1172  inline bool apply (hb_apply_context_t *c) const
1173  {
1174    TRACE_APPLY (this);
1175    unsigned int index = (this+coverage[0]) (c->buffer->cur().codepoint);
1176    if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1177
1178    const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
1179    struct ContextApplyLookupContext lookup_context = {
1180      {match_coverage},
1181      this
1182    };
1183    return TRACE_RETURN (context_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context));
1184  }
1185
1186  inline bool sanitize (hb_sanitize_context_t *c) {
1187    TRACE_SANITIZE (this);
1188    if (!c->check_struct (this)) return TRACE_RETURN (false);
1189    unsigned int count = glyphCount;
1190    if (!c->check_array (coverage, coverage[0].static_size, count)) return TRACE_RETURN (false);
1191    for (unsigned int i = 0; i < count; i++)
1192      if (!coverage[i].sanitize (c, this)) return TRACE_RETURN (false);
1193    LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
1194    return TRACE_RETURN (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount));
1195  }
1196
1197  protected:
1198  USHORT	format;			/* Format identifier--format = 3 */
1199  USHORT	glyphCount;		/* Number of glyphs in the input glyph
1200					 * sequence */
1201  USHORT	lookupCount;		/* Number of LookupRecords */
1202  OffsetTo<Coverage>
1203		coverage[VAR];		/* Array of offsets to Coverage
1204					 * table in glyph sequence order */
1205  LookupRecord	lookupRecordX[VAR];	/* Array of LookupRecords--in
1206					 * design order */
1207  public:
1208  DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
1209};
1210
1211struct Context
1212{
1213  template <typename context_t>
1214  inline typename context_t::return_t process (context_t *c) const
1215  {
1216    switch (u.format) {
1217    case 1: return c->process (u.format1);
1218    case 2: return c->process (u.format2);
1219    case 3: return c->process (u.format3);
1220    default:return c->default_return_value ();
1221    }
1222  }
1223
1224  inline bool sanitize (hb_sanitize_context_t *c) {
1225    TRACE_SANITIZE (this);
1226    if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1227    switch (u.format) {
1228    case 1: return TRACE_RETURN (u.format1.sanitize (c));
1229    case 2: return TRACE_RETURN (u.format2.sanitize (c));
1230    case 3: return TRACE_RETURN (u.format3.sanitize (c));
1231    default:return TRACE_RETURN (true);
1232    }
1233  }
1234
1235  protected:
1236  union {
1237  USHORT		format;		/* Format identifier */
1238  ContextFormat1	format1;
1239  ContextFormat2	format2;
1240  ContextFormat3	format3;
1241  } u;
1242};
1243
1244
1245/* Chaining Contextual lookups */
1246
1247struct ChainContextClosureLookupContext
1248{
1249  ContextClosureFuncs funcs;
1250  const void *intersects_data[3];
1251};
1252
1253struct ChainContextApplyLookupContext
1254{
1255  ContextApplyFuncs funcs;
1256  const void *match_data[3];
1257};
1258
1259static inline void chain_context_closure_lookup (hb_closure_context_t *c,
1260						 unsigned int backtrackCount,
1261						 const USHORT backtrack[],
1262						 unsigned int inputCount, /* Including the first glyph (not matched) */
1263						 const USHORT input[], /* Array of input values--start with second glyph */
1264						 unsigned int lookaheadCount,
1265						 const USHORT lookahead[],
1266						 unsigned int lookupCount,
1267						 const LookupRecord lookupRecord[],
1268						 ChainContextClosureLookupContext &lookup_context)
1269{
1270  if (intersects_array (c,
1271			backtrackCount, backtrack,
1272			lookup_context.funcs.intersects, lookup_context.intersects_data[0])
1273   && intersects_array (c,
1274			inputCount ? inputCount - 1 : 0, input,
1275			lookup_context.funcs.intersects, lookup_context.intersects_data[1])
1276  && intersects_array (c,
1277		       lookaheadCount, lookahead,
1278		       lookup_context.funcs.intersects, lookup_context.intersects_data[2]))
1279    closure_lookup (c,
1280		    lookupCount, lookupRecord);
1281}
1282
1283static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
1284						     unsigned int backtrackCount,
1285						     const USHORT backtrack[],
1286						     unsigned int inputCount, /* Including the first glyph (not matched) */
1287						     const USHORT input[], /* Array of input values--start with second glyph */
1288						     unsigned int lookaheadCount,
1289						     const USHORT lookahead[],
1290						     unsigned int lookupCount,
1291						     const LookupRecord lookupRecord[],
1292						     ChainContextApplyLookupContext &lookup_context)
1293{
1294  return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
1295      && would_match_input (c,
1296			    inputCount, input,
1297			    lookup_context.funcs.match, lookup_context.match_data[1]);
1298}
1299
1300static inline bool chain_context_apply_lookup (hb_apply_context_t *c,
1301					       unsigned int backtrackCount,
1302					       const USHORT backtrack[],
1303					       unsigned int inputCount, /* Including the first glyph (not matched) */
1304					       const USHORT input[], /* Array of input values--start with second glyph */
1305					       unsigned int lookaheadCount,
1306					       const USHORT lookahead[],
1307					       unsigned int lookupCount,
1308					       const LookupRecord lookupRecord[],
1309					       ChainContextApplyLookupContext &lookup_context)
1310{
1311  unsigned int lookahead_offset = 0;
1312  return match_input (c,
1313		      inputCount, input,
1314		      lookup_context.funcs.match, lookup_context.match_data[1],
1315		      &lookahead_offset)
1316      && match_backtrack (c,
1317			  backtrackCount, backtrack,
1318			  lookup_context.funcs.match, lookup_context.match_data[0])
1319      && match_lookahead (c,
1320			  lookaheadCount, lookahead,
1321			  lookup_context.funcs.match, lookup_context.match_data[2],
1322			  lookahead_offset)
1323      && apply_lookup (c,
1324		       inputCount,
1325		       lookupCount, lookupRecord);
1326}
1327
1328struct ChainRule
1329{
1330  inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1331  {
1332    TRACE_CLOSURE (this);
1333    const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1334    const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1335    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1336    chain_context_closure_lookup (c,
1337				  backtrack.len, backtrack.array,
1338				  input.len, input.array,
1339				  lookahead.len, lookahead.array,
1340				  lookup.len, lookup.array,
1341				  lookup_context);
1342  }
1343
1344  inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1345  {
1346    TRACE_WOULD_APPLY (this);
1347    const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1348    const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1349    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1350    return TRACE_RETURN (chain_context_would_apply_lookup (c,
1351							   backtrack.len, backtrack.array,
1352							   input.len, input.array,
1353							   lookahead.len, lookahead.array, lookup.len,
1354							   lookup.array, lookup_context));
1355  }
1356
1357  inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1358  {
1359    TRACE_APPLY (this);
1360    const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1361    const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1362    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1363    return TRACE_RETURN (chain_context_apply_lookup (c,
1364						     backtrack.len, backtrack.array,
1365						     input.len, input.array,
1366						     lookahead.len, lookahead.array, lookup.len,
1367						     lookup.array, lookup_context));
1368  }
1369
1370  inline bool sanitize (hb_sanitize_context_t *c) {
1371    TRACE_SANITIZE (this);
1372    if (!backtrack.sanitize (c)) return TRACE_RETURN (false);
1373    HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1374    if (!input.sanitize (c)) return TRACE_RETURN (false);
1375    ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1376    if (!lookahead.sanitize (c)) return TRACE_RETURN (false);
1377    ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1378    return TRACE_RETURN (lookup.sanitize (c));
1379  }
1380
1381  protected:
1382  ArrayOf<USHORT>
1383		backtrack;		/* Array of backtracking values
1384					 * (to be matched before the input
1385					 * sequence) */
1386  HeadlessArrayOf<USHORT>
1387		inputX;			/* Array of input values (start with
1388					 * second glyph) */
1389  ArrayOf<USHORT>
1390		lookaheadX;		/* Array of lookahead values's (to be
1391					 * matched after the input sequence) */
1392  ArrayOf<LookupRecord>
1393		lookupX;		/* Array of LookupRecords--in
1394					 * design order) */
1395  public:
1396  DEFINE_SIZE_MIN (8);
1397};
1398
1399struct ChainRuleSet
1400{
1401  inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1402  {
1403    TRACE_CLOSURE (this);
1404    unsigned int num_rules = rule.len;
1405    for (unsigned int i = 0; i < num_rules; i++)
1406      (this+rule[i]).closure (c, lookup_context);
1407  }
1408
1409  inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1410  {
1411    TRACE_WOULD_APPLY (this);
1412    unsigned int num_rules = rule.len;
1413    for (unsigned int i = 0; i < num_rules; i++)
1414      if ((this+rule[i]).would_apply (c, lookup_context))
1415        return TRACE_RETURN (true);
1416
1417    return TRACE_RETURN (false);
1418  }
1419
1420  inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1421  {
1422    TRACE_APPLY (this);
1423    unsigned int num_rules = rule.len;
1424    for (unsigned int i = 0; i < num_rules; i++)
1425      if ((this+rule[i]).apply (c, lookup_context))
1426        return TRACE_RETURN (true);
1427
1428    return TRACE_RETURN (false);
1429  }
1430
1431  inline bool sanitize (hb_sanitize_context_t *c) {
1432    TRACE_SANITIZE (this);
1433    return TRACE_RETURN (rule.sanitize (c, this));
1434  }
1435
1436  protected:
1437  OffsetArrayOf<ChainRule>
1438		rule;			/* Array of ChainRule tables
1439					 * ordered by preference */
1440  public:
1441  DEFINE_SIZE_ARRAY (2, rule);
1442};
1443
1444struct ChainContextFormat1
1445{
1446  inline void closure (hb_closure_context_t *c) const
1447  {
1448    TRACE_CLOSURE (this);
1449    const Coverage &cov = (this+coverage);
1450
1451    struct ChainContextClosureLookupContext lookup_context = {
1452      {intersects_glyph},
1453      {NULL, NULL, NULL}
1454    };
1455
1456    unsigned int count = ruleSet.len;
1457    for (unsigned int i = 0; i < count; i++)
1458      if (cov.intersects_coverage (c->glyphs, i)) {
1459	const ChainRuleSet &rule_set = this+ruleSet[i];
1460	rule_set.closure (c, lookup_context);
1461      }
1462  }
1463
1464  inline bool would_apply (hb_would_apply_context_t *c) const
1465  {
1466    TRACE_WOULD_APPLY (this);
1467
1468    const ChainRuleSet &rule_set = this+ruleSet[(this+coverage) (c->glyphs[0])];
1469    struct ChainContextApplyLookupContext lookup_context = {
1470      {match_glyph},
1471      {NULL, NULL, NULL}
1472    };
1473    return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1474  }
1475
1476  inline const Coverage &get_coverage (void) const
1477  {
1478    return this+coverage;
1479  }
1480
1481  inline bool apply (hb_apply_context_t *c) const
1482  {
1483    TRACE_APPLY (this);
1484    unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
1485    if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1486
1487    const ChainRuleSet &rule_set = this+ruleSet[index];
1488    struct ChainContextApplyLookupContext lookup_context = {
1489      {match_glyph},
1490      {NULL, NULL, NULL}
1491    };
1492    return TRACE_RETURN (rule_set.apply (c, lookup_context));
1493  }
1494
1495  inline bool sanitize (hb_sanitize_context_t *c) {
1496    TRACE_SANITIZE (this);
1497    return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1498  }
1499
1500  protected:
1501  USHORT	format;			/* Format identifier--format = 1 */
1502  OffsetTo<Coverage>
1503		coverage;		/* Offset to Coverage table--from
1504					 * beginning of table */
1505  OffsetArrayOf<ChainRuleSet>
1506		ruleSet;		/* Array of ChainRuleSet tables
1507					 * ordered by Coverage Index */
1508  public:
1509  DEFINE_SIZE_ARRAY (6, ruleSet);
1510};
1511
1512struct ChainContextFormat2
1513{
1514  inline void closure (hb_closure_context_t *c) const
1515  {
1516    TRACE_CLOSURE (this);
1517    if (!(this+coverage).intersects (c->glyphs))
1518      return;
1519
1520    const ClassDef &backtrack_class_def = this+backtrackClassDef;
1521    const ClassDef &input_class_def = this+inputClassDef;
1522    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1523
1524    struct ChainContextClosureLookupContext lookup_context = {
1525      {intersects_class},
1526      {&backtrack_class_def,
1527       &input_class_def,
1528       &lookahead_class_def}
1529    };
1530
1531    unsigned int count = ruleSet.len;
1532    for (unsigned int i = 0; i < count; i++)
1533      if (input_class_def.intersects_class (c->glyphs, i)) {
1534	const ChainRuleSet &rule_set = this+ruleSet[i];
1535	rule_set.closure (c, lookup_context);
1536      }
1537  }
1538
1539  inline bool would_apply (hb_would_apply_context_t *c) const
1540  {
1541    TRACE_WOULD_APPLY (this);
1542
1543    const ClassDef &input_class_def = this+inputClassDef;
1544
1545    unsigned int index = input_class_def (c->glyphs[0]);
1546    const ChainRuleSet &rule_set = this+ruleSet[index];
1547    struct ChainContextApplyLookupContext lookup_context = {
1548      {match_class},
1549      {NULL, &input_class_def, NULL}
1550    };
1551    return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1552  }
1553
1554  inline const Coverage &get_coverage (void) const
1555  {
1556    return this+coverage;
1557  }
1558
1559  inline bool apply (hb_apply_context_t *c) const
1560  {
1561    TRACE_APPLY (this);
1562    unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
1563    if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1564
1565    const ClassDef &backtrack_class_def = this+backtrackClassDef;
1566    const ClassDef &input_class_def = this+inputClassDef;
1567    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1568
1569    index = input_class_def (c->buffer->cur().codepoint);
1570    const ChainRuleSet &rule_set = this+ruleSet[index];
1571    struct ChainContextApplyLookupContext lookup_context = {
1572      {match_class},
1573      {&backtrack_class_def,
1574       &input_class_def,
1575       &lookahead_class_def}
1576    };
1577    return TRACE_RETURN (rule_set.apply (c, lookup_context));
1578  }
1579
1580  inline bool sanitize (hb_sanitize_context_t *c) {
1581    TRACE_SANITIZE (this);
1582    return TRACE_RETURN (coverage.sanitize (c, this) && backtrackClassDef.sanitize (c, this) &&
1583			 inputClassDef.sanitize (c, this) && lookaheadClassDef.sanitize (c, this) &&
1584			 ruleSet.sanitize (c, this));
1585  }
1586
1587  protected:
1588  USHORT	format;			/* Format identifier--format = 2 */
1589  OffsetTo<Coverage>
1590		coverage;		/* Offset to Coverage table--from
1591					 * beginning of table */
1592  OffsetTo<ClassDef>
1593		backtrackClassDef;	/* Offset to glyph ClassDef table
1594					 * containing backtrack sequence
1595					 * data--from beginning of table */
1596  OffsetTo<ClassDef>
1597		inputClassDef;		/* Offset to glyph ClassDef
1598					 * table containing input sequence
1599					 * data--from beginning of table */
1600  OffsetTo<ClassDef>
1601		lookaheadClassDef;	/* Offset to glyph ClassDef table
1602					 * containing lookahead sequence
1603					 * data--from beginning of table */
1604  OffsetArrayOf<ChainRuleSet>
1605		ruleSet;		/* Array of ChainRuleSet tables
1606					 * ordered by class */
1607  public:
1608  DEFINE_SIZE_ARRAY (12, ruleSet);
1609};
1610
1611struct ChainContextFormat3
1612{
1613  inline void closure (hb_closure_context_t *c) const
1614  {
1615    TRACE_CLOSURE (this);
1616    const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1617
1618    if (!(this+input[0]).intersects (c->glyphs))
1619      return;
1620
1621    const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1622    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1623    struct ChainContextClosureLookupContext lookup_context = {
1624      {intersects_coverage},
1625      {this, this, this}
1626    };
1627    chain_context_closure_lookup (c,
1628				  backtrack.len, (const USHORT *) backtrack.array,
1629				  input.len, (const USHORT *) input.array + 1,
1630				  lookahead.len, (const USHORT *) lookahead.array,
1631				  lookup.len, lookup.array,
1632				  lookup_context);
1633  }
1634
1635  inline bool would_apply (hb_would_apply_context_t *c) const
1636  {
1637    TRACE_WOULD_APPLY (this);
1638
1639    const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1640    const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1641    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1642    struct ChainContextApplyLookupContext lookup_context = {
1643      {match_coverage},
1644      {this, this, this}
1645    };
1646    return TRACE_RETURN (chain_context_would_apply_lookup (c,
1647							   backtrack.len, (const USHORT *) backtrack.array,
1648							   input.len, (const USHORT *) input.array + 1,
1649							   lookahead.len, (const USHORT *) lookahead.array,
1650							   lookup.len, lookup.array, lookup_context));
1651  }
1652
1653  inline const Coverage &get_coverage (void) const
1654  {
1655    const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1656    return this+input[0];
1657  }
1658
1659  inline bool apply (hb_apply_context_t *c) const
1660  {
1661    TRACE_APPLY (this);
1662    const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1663
1664    unsigned int index = (this+input[0]) (c->buffer->cur().codepoint);
1665    if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1666
1667    const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1668    const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1669    struct ChainContextApplyLookupContext lookup_context = {
1670      {match_coverage},
1671      {this, this, this}
1672    };
1673    return TRACE_RETURN (chain_context_apply_lookup (c,
1674						     backtrack.len, (const USHORT *) backtrack.array,
1675						     input.len, (const USHORT *) input.array + 1,
1676						     lookahead.len, (const USHORT *) lookahead.array,
1677						     lookup.len, lookup.array, lookup_context));
1678  }
1679
1680  inline bool sanitize (hb_sanitize_context_t *c) {
1681    TRACE_SANITIZE (this);
1682    if (!backtrack.sanitize (c, this)) return TRACE_RETURN (false);
1683    OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1684    if (!input.sanitize (c, this)) return TRACE_RETURN (false);
1685    OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1686    if (!lookahead.sanitize (c, this)) return TRACE_RETURN (false);
1687    ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1688    return TRACE_RETURN (lookup.sanitize (c));
1689  }
1690
1691  protected:
1692  USHORT	format;			/* Format identifier--format = 3 */
1693  OffsetArrayOf<Coverage>
1694		backtrack;		/* Array of coverage tables
1695					 * in backtracking sequence, in  glyph
1696					 * sequence order */
1697  OffsetArrayOf<Coverage>
1698		inputX		;	/* Array of coverage
1699					 * tables in input sequence, in glyph
1700					 * sequence order */
1701  OffsetArrayOf<Coverage>
1702		lookaheadX;		/* Array of coverage tables
1703					 * in lookahead sequence, in glyph
1704					 * sequence order */
1705  ArrayOf<LookupRecord>
1706		lookupX;		/* Array of LookupRecords--in
1707					 * design order) */
1708  public:
1709  DEFINE_SIZE_MIN (10);
1710};
1711
1712struct ChainContext
1713{
1714  template <typename context_t>
1715  inline typename context_t::return_t process (context_t *c) const
1716  {
1717    TRACE_PROCESS (this);
1718    switch (u.format) {
1719    case 1: return c->process (u.format1);
1720    case 2: return c->process (u.format2);
1721    case 3: return c->process (u.format3);
1722    default:return c->default_return_value ();
1723    }
1724  }
1725
1726  inline bool sanitize (hb_sanitize_context_t *c) {
1727    TRACE_SANITIZE (this);
1728    if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1729    switch (u.format) {
1730    case 1: return TRACE_RETURN (u.format1.sanitize (c));
1731    case 2: return TRACE_RETURN (u.format2.sanitize (c));
1732    case 3: return TRACE_RETURN (u.format3.sanitize (c));
1733    default:return TRACE_RETURN (true);
1734    }
1735  }
1736
1737  protected:
1738  union {
1739  USHORT		format;	/* Format identifier */
1740  ChainContextFormat1	format1;
1741  ChainContextFormat2	format2;
1742  ChainContextFormat3	format3;
1743  } u;
1744};
1745
1746
1747struct ExtensionFormat1
1748{
1749  inline unsigned int get_type (void) const { return extensionLookupType; }
1750  inline unsigned int get_offset (void) const { return extensionOffset; }
1751
1752  inline bool sanitize (hb_sanitize_context_t *c) {
1753    TRACE_SANITIZE (this);
1754    return TRACE_RETURN (c->check_struct (this));
1755  }
1756
1757  protected:
1758  USHORT	format;			/* Format identifier. Set to 1. */
1759  USHORT	extensionLookupType;	/* Lookup type of subtable referenced
1760					 * by ExtensionOffset (i.e. the
1761					 * extension subtable). */
1762  ULONG		extensionOffset;	/* Offset to the extension subtable,
1763					 * of lookup type subtable. */
1764  public:
1765  DEFINE_SIZE_STATIC (8);
1766};
1767
1768template <typename T>
1769struct Extension
1770{
1771  inline unsigned int get_type (void) const
1772  {
1773    switch (u.format) {
1774    case 1: return u.format1.get_type ();
1775    default:return 0;
1776    }
1777  }
1778  inline unsigned int get_offset (void) const
1779  {
1780    switch (u.format) {
1781    case 1: return u.format1.get_offset ();
1782    default:return 0;
1783    }
1784  }
1785
1786  template <typename X>
1787  inline const X& get_subtable (void) const
1788  {
1789    unsigned int offset = get_offset ();
1790    if (unlikely (!offset)) return Null(typename T::LookupSubTable);
1791    return StructAtOffset<typename T::LookupSubTable> (this, offset);
1792  }
1793
1794  template <typename context_t>
1795  inline typename context_t::return_t process (context_t *c) const
1796  {
1797    return get_subtable<typename T::LookupSubTable> ().process (c, get_type ());
1798  }
1799
1800  inline bool sanitize_self (hb_sanitize_context_t *c) {
1801    TRACE_SANITIZE (this);
1802    if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1803    switch (u.format) {
1804    case 1: return TRACE_RETURN (u.format1.sanitize (c));
1805    default:return TRACE_RETURN (true);
1806    }
1807  }
1808
1809  inline bool sanitize (hb_sanitize_context_t *c) {
1810    TRACE_SANITIZE (this);
1811    if (!sanitize_self (c)) return TRACE_RETURN (false);
1812    unsigned int offset = get_offset ();
1813    if (unlikely (!offset)) return TRACE_RETURN (true);
1814    return TRACE_RETURN (StructAtOffset<typename T::LookupSubTable> (this, offset).sanitize (c, get_type ()));
1815  }
1816
1817  protected:
1818  union {
1819  USHORT		format;		/* Format identifier */
1820  ExtensionFormat1	format1;
1821  } u;
1822};
1823
1824
1825/*
1826 * GSUB/GPOS Common
1827 */
1828
1829struct GSUBGPOS
1830{
1831  static const hb_tag_t GSUBTag	= HB_OT_TAG_GSUB;
1832  static const hb_tag_t GPOSTag	= HB_OT_TAG_GPOS;
1833
1834  inline unsigned int get_script_count (void) const
1835  { return (this+scriptList).len; }
1836  inline const Tag& get_script_tag (unsigned int i) const
1837  { return (this+scriptList).get_tag (i); }
1838  inline unsigned int get_script_tags (unsigned int start_offset,
1839				       unsigned int *script_count /* IN/OUT */,
1840				       hb_tag_t     *script_tags /* OUT */) const
1841  { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
1842  inline const Script& get_script (unsigned int i) const
1843  { return (this+scriptList)[i]; }
1844  inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
1845  { return (this+scriptList).find_index (tag, index); }
1846
1847  inline unsigned int get_feature_count (void) const
1848  { return (this+featureList).len; }
1849  inline const Tag& get_feature_tag (unsigned int i) const
1850  { return (this+featureList).get_tag (i); }
1851  inline unsigned int get_feature_tags (unsigned int start_offset,
1852					unsigned int *feature_count /* IN/OUT */,
1853					hb_tag_t     *feature_tags /* OUT */) const
1854  { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
1855  inline const Feature& get_feature (unsigned int i) const
1856  { return (this+featureList)[i]; }
1857  inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
1858  { return (this+featureList).find_index (tag, index); }
1859
1860  inline unsigned int get_lookup_count (void) const
1861  { return (this+lookupList).len; }
1862  inline const Lookup& get_lookup (unsigned int i) const
1863  { return (this+lookupList)[i]; }
1864
1865  inline bool sanitize (hb_sanitize_context_t *c) {
1866    TRACE_SANITIZE (this);
1867    return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) &&
1868			 scriptList.sanitize (c, this) &&
1869			 featureList.sanitize (c, this) &&
1870			 lookupList.sanitize (c, this));
1871  }
1872
1873  protected:
1874  FixedVersion	version;	/* Version of the GSUB/GPOS table--initially set
1875				 * to 0x00010000 */
1876  OffsetTo<ScriptList>
1877		scriptList;  	/* ScriptList table */
1878  OffsetTo<FeatureList>
1879		featureList; 	/* FeatureList table */
1880  OffsetTo<LookupList>
1881		lookupList; 	/* LookupList table */
1882  public:
1883  DEFINE_SIZE_STATIC (10);
1884};
1885
1886
1887} /* namespace OT */
1888
1889
1890#endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */
1891