hb-ot-layout-common-private.hh revision b3b89b66586897a69b410ef02e7434691de84ae6
1/*
2 * Copyright © 2007,2008,2009  Red Hat, Inc.
3 * Copyright © 2010,2012  Google, Inc.
4 *
5 *  This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29#ifndef HB_OT_LAYOUT_COMMON_PRIVATE_HH
30#define HB_OT_LAYOUT_COMMON_PRIVATE_HH
31
32#include "hb-ot-layout-private.hh"
33#include "hb-open-type-private.hh"
34#include "hb-set-private.hh"
35
36
37namespace OT {
38
39
40#define NOT_COVERED		((unsigned int) -1)
41#define MAX_NESTING_LEVEL	8
42
43
44
45/*
46 *
47 * OpenType Layout Common Table Formats
48 *
49 */
50
51
52/*
53 * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList
54 */
55
56template <typename Type>
57struct Record
58{
59  inline int cmp (hb_tag_t a) const {
60    return tag.cmp (a);
61  }
62
63  inline bool sanitize (hb_sanitize_context_t *c, void *base) {
64    TRACE_SANITIZE ();
65    return TRACE_RETURN (c->check_struct (this) && offset.sanitize (c, base));
66  }
67
68  Tag		tag;		/* 4-byte Tag identifier */
69  OffsetTo<Type>
70		offset;		/* Offset from beginning of object holding
71				 * the Record */
72  public:
73  DEFINE_SIZE_STATIC (6);
74};
75
76template <typename Type>
77struct RecordArrayOf : SortedArrayOf<Record<Type> > {
78  inline const Tag& get_tag (unsigned int i) const
79  {
80    /* We cheat slightly and don't define separate Null objects
81     * for Record types.  Instead, we return the correct Null(Tag)
82     * here. */
83    if (unlikely (i >= this->len)) return Null(Tag);
84    return (*this)[i].tag;
85  }
86  inline unsigned int get_tags (unsigned int start_offset,
87				unsigned int *record_count /* IN/OUT */,
88				hb_tag_t     *record_tags /* OUT */) const
89  {
90    if (record_count) {
91      const Record<Type> *arr = this->sub_array (start_offset, record_count);
92      unsigned int count = *record_count;
93      for (unsigned int i = 0; i < count; i++)
94	record_tags[i] = arr[i].tag;
95    }
96    return this->len;
97  }
98  inline bool find_index (hb_tag_t tag, unsigned int *index) const
99  {
100    int i = this->search (tag);
101    if (i != -1) {
102        if (index) *index = i;
103        return true;
104    } else {
105      if (index) *index = Index::NOT_FOUND_INDEX;
106      return false;
107    }
108  }
109};
110
111template <typename Type>
112struct RecordListOf : RecordArrayOf<Type>
113{
114  inline const Type& operator [] (unsigned int i) const
115  { return this+RecordArrayOf<Type>::operator [](i).offset; }
116
117  inline bool sanitize (hb_sanitize_context_t *c) {
118    TRACE_SANITIZE ();
119    return TRACE_RETURN (RecordArrayOf<Type>::sanitize (c, this));
120  }
121};
122
123
124struct RangeRecord
125{
126  inline int cmp (hb_codepoint_t g) const {
127    hb_codepoint_t a = start, b = end;
128    return g < a ? -1 : g <= b ? 0 : +1 ;
129  }
130
131  inline bool sanitize (hb_sanitize_context_t *c) {
132    TRACE_SANITIZE ();
133    return TRACE_RETURN (c->check_struct (this));
134  }
135
136  inline bool intersects (const hb_set_t *glyphs) const {
137    return glyphs->intersects (start, end);
138  }
139
140  template <typename set_t>
141  inline void add_coverage (set_t *glyphs) const {
142    glyphs->add_range (start, end);
143  }
144
145  GlyphID	start;		/* First GlyphID in the range */
146  GlyphID	end;		/* Last GlyphID in the range */
147  USHORT	value;		/* Value */
148  public:
149  DEFINE_SIZE_STATIC (6);
150};
151DEFINE_NULL_DATA (RangeRecord, "\000\001");
152
153
154struct IndexArray : ArrayOf<Index>
155{
156  inline unsigned int get_indexes (unsigned int start_offset,
157				   unsigned int *_count /* IN/OUT */,
158				   unsigned int *_indexes /* OUT */) const
159  {
160    if (_count) {
161      const USHORT *arr = this->sub_array (start_offset, _count);
162      unsigned int count = *_count;
163      for (unsigned int i = 0; i < count; i++)
164	_indexes[i] = arr[i];
165    }
166    return this->len;
167  }
168};
169
170
171struct Script;
172struct LangSys;
173struct Feature;
174
175
176struct LangSys
177{
178  inline unsigned int get_feature_count (void) const
179  { return featureIndex.len; }
180  inline hb_tag_t get_feature_index (unsigned int i) const
181  { return featureIndex[i]; }
182  inline unsigned int get_feature_indexes (unsigned int start_offset,
183					   unsigned int *feature_count /* IN/OUT */,
184					   unsigned int *feature_indexes /* OUT */) const
185  { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); }
186
187  inline bool has_required_feature (void) const { return reqFeatureIndex != 0xffff; }
188  inline unsigned int get_required_feature_index (void) const
189  {
190    if (reqFeatureIndex == 0xffff)
191      return Index::NOT_FOUND_INDEX;
192   return reqFeatureIndex;;
193  }
194
195  inline bool sanitize (hb_sanitize_context_t *c) {
196    TRACE_SANITIZE ();
197    return TRACE_RETURN (c->check_struct (this) && featureIndex.sanitize (c));
198  }
199
200  Offset	lookupOrder;	/* = Null (reserved for an offset to a
201				 * reordering table) */
202  USHORT	reqFeatureIndex;/* Index of a feature required for this
203				 * language system--if no required features
204				 * = 0xFFFF */
205  IndexArray	featureIndex;	/* Array of indices into the FeatureList */
206  public:
207  DEFINE_SIZE_ARRAY (6, featureIndex);
208};
209DEFINE_NULL_DATA (LangSys, "\0\0\xFF\xFF");
210
211
212struct Script
213{
214  inline unsigned int get_lang_sys_count (void) const
215  { return langSys.len; }
216  inline const Tag& get_lang_sys_tag (unsigned int i) const
217  { return langSys.get_tag (i); }
218  inline unsigned int get_lang_sys_tags (unsigned int start_offset,
219					 unsigned int *lang_sys_count /* IN/OUT */,
220					 hb_tag_t     *lang_sys_tags /* OUT */) const
221  { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); }
222  inline const LangSys& get_lang_sys (unsigned int i) const
223  {
224    if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys ();
225    return this+langSys[i].offset;
226  }
227  inline bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const
228  { return langSys.find_index (tag, index); }
229
230  inline bool has_default_lang_sys (void) const { return defaultLangSys != 0; }
231  inline const LangSys& get_default_lang_sys (void) const { return this+defaultLangSys; }
232
233  inline bool sanitize (hb_sanitize_context_t *c) {
234    TRACE_SANITIZE ();
235    return TRACE_RETURN (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
236  }
237
238  protected:
239  OffsetTo<LangSys>
240		defaultLangSys;	/* Offset to DefaultLangSys table--from
241				 * beginning of Script table--may be Null */
242  RecordArrayOf<LangSys>
243		langSys;	/* Array of LangSysRecords--listed
244				 * alphabetically by LangSysTag */
245  public:
246  DEFINE_SIZE_ARRAY (4, langSys);
247};
248
249typedef RecordListOf<Script> ScriptList;
250
251
252struct Feature
253{
254  inline unsigned int get_lookup_count (void) const
255  { return lookupIndex.len; }
256  inline hb_tag_t get_lookup_index (unsigned int i) const
257  { return lookupIndex[i]; }
258  inline unsigned int get_lookup_indexes (unsigned int start_index,
259					  unsigned int *lookup_count /* IN/OUT */,
260					  unsigned int *lookup_tags /* OUT */) const
261  { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); }
262
263  inline bool sanitize (hb_sanitize_context_t *c) {
264    TRACE_SANITIZE ();
265    return TRACE_RETURN (c->check_struct (this) && lookupIndex.sanitize (c));
266  }
267
268  Offset	featureParams;	/* Offset to Feature Parameters table (if one
269				 * has been defined for the feature), relative
270				 * to the beginning of the Feature Table; = Null
271				 * if not required */
272  IndexArray	 lookupIndex;	/* Array of LookupList indices */
273  public:
274  DEFINE_SIZE_ARRAY (4, lookupIndex);
275};
276
277typedef RecordListOf<Feature> FeatureList;
278
279
280struct LookupFlag : USHORT
281{
282  enum Flags {
283    RightToLeft		= 0x0001u,
284    IgnoreBaseGlyphs	= 0x0002u,
285    IgnoreLigatures	= 0x0004u,
286    IgnoreMarks		= 0x0008u,
287    IgnoreFlags		= 0x000Eu,
288    UseMarkFilteringSet	= 0x0010u,
289    Reserved		= 0x00E0u,
290    MarkAttachmentType	= 0xFF00u
291  };
292  public:
293  DEFINE_SIZE_STATIC (2);
294};
295
296struct Lookup
297{
298  inline unsigned int get_subtable_count (void) const { return subTable.len; }
299
300  inline unsigned int get_type (void) const { return lookupType; }
301
302  /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and
303   * higher 16-bit is mark-filtering-set if the lookup uses one.
304   * Not to be confused with glyph_props which is very similar. */
305  inline uint32_t get_props (void) const
306  {
307    unsigned int flag = lookupFlag;
308    if (unlikely (flag & LookupFlag::UseMarkFilteringSet))
309    {
310      const USHORT &markFilteringSet = StructAfter<USHORT> (subTable);
311      flag += (markFilteringSet << 16);
312    }
313    return flag;
314  }
315
316  inline bool serialize (hb_serialize_context_t *c,
317			 unsigned int lookup_type,
318			 uint32_t lookup_props,
319			 unsigned int num_subtables)
320  {
321    TRACE_SERIALIZE ();
322    if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
323    lookupType.set (lookup_type);
324    lookupFlag.set (lookup_props & 0xFFFF);
325    if (unlikely (!subTable.serialize (c, num_subtables))) return TRACE_RETURN (false);
326    if (lookupFlag & LookupFlag::UseMarkFilteringSet)
327    {
328      USHORT &markFilteringSet = StructAfter<USHORT> (subTable);
329      markFilteringSet.set (lookup_props >> 16);
330    }
331    return TRACE_RETURN (true);
332  }
333
334  inline bool sanitize (hb_sanitize_context_t *c) {
335    TRACE_SANITIZE ();
336    /* Real sanitize of the subtables is done by GSUB/GPOS/... */
337    if (!(c->check_struct (this) && subTable.sanitize (c))) return TRACE_RETURN (false);
338    if (lookupFlag & LookupFlag::UseMarkFilteringSet)
339    {
340      USHORT &markFilteringSet = StructAfter<USHORT> (subTable);
341      if (!markFilteringSet.sanitize (c)) return TRACE_RETURN (false);
342    }
343    return TRACE_RETURN (true);
344  }
345
346  USHORT	lookupType;		/* Different enumerations for GSUB and GPOS */
347  USHORT	lookupFlag;		/* Lookup qualifiers */
348  ArrayOf<Offset>
349		subTable;		/* Array of SubTables */
350  USHORT	markFilteringSetX[VAR];	/* Index (base 0) into GDEF mark glyph sets
351					 * structure. This field is only present if bit
352					 * UseMarkFilteringSet of lookup flags is set. */
353  public:
354  DEFINE_SIZE_ARRAY2 (6, subTable, markFilteringSetX);
355};
356
357typedef OffsetListOf<Lookup> LookupList;
358
359
360/*
361 * Coverage Table
362 */
363
364struct CoverageFormat1
365{
366  friend struct Coverage;
367
368  private:
369  inline unsigned int get_coverage (hb_codepoint_t glyph_id) const
370  {
371    int i = glyphArray.search (glyph_id);
372    ASSERT_STATIC (((unsigned int) -1) == NOT_COVERED);
373    return i;
374  }
375
376  inline bool serialize (hb_serialize_context_t *c,
377			 Supplier<GlyphID> &glyphs,
378			 unsigned int num_glyphs)
379  {
380    TRACE_SERIALIZE ();
381    if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
382    glyphArray.len.set (num_glyphs);
383    if (unlikely (!c->extend (glyphArray))) return TRACE_RETURN (false);
384    for (unsigned int i = 0; i < num_glyphs; i++)
385      glyphArray[i].set (glyphs[i]);
386    glyphs.advance (num_glyphs);
387    return TRACE_RETURN (true);
388  }
389
390  inline bool sanitize (hb_sanitize_context_t *c) {
391    TRACE_SANITIZE ();
392    return TRACE_RETURN (glyphArray.sanitize (c));
393  }
394
395  inline bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const {
396    return glyphs->has (glyphArray[index]);
397  }
398
399  template <typename set_t>
400  inline void add_coverage (set_t *glyphs) const {
401    unsigned int count = glyphArray.len;
402    for (unsigned int i = 0; i < count; i++)
403      glyphs->add (glyphArray[i]);
404  }
405
406  struct Iter {
407    inline void init (const struct CoverageFormat1 &c_) { c = &c_; i = 0; };
408    inline bool more (void) { return i < c->glyphArray.len; }
409    inline void next (void) { i++; }
410    inline uint16_t get_glyph (void) { return c->glyphArray[i]; }
411    inline uint16_t get_coverage (void) { return i; }
412
413    private:
414    const struct CoverageFormat1 *c;
415    unsigned int i;
416  };
417
418  protected:
419  USHORT	coverageFormat;	/* Format identifier--format = 1 */
420  SortedArrayOf<GlyphID>
421		glyphArray;	/* Array of GlyphIDs--in numerical order */
422  public:
423  DEFINE_SIZE_ARRAY (4, glyphArray);
424};
425
426struct CoverageFormat2
427{
428  friend struct Coverage;
429
430  private:
431  inline unsigned int get_coverage (hb_codepoint_t glyph_id) const
432  {
433    int i = rangeRecord.search (glyph_id);
434    if (i != -1) {
435      const RangeRecord &range = rangeRecord[i];
436      return (unsigned int) range.value + (glyph_id - range.start);
437    }
438    return NOT_COVERED;
439  }
440
441  inline bool serialize (hb_serialize_context_t *c,
442			 Supplier<GlyphID> &glyphs,
443			 unsigned int num_glyphs)
444  {
445    TRACE_SERIALIZE ();
446    if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
447
448    if (unlikely (!num_glyphs)) return TRACE_RETURN (true);
449
450    unsigned int num_ranges = 1;
451    for (unsigned int i = 1; i < num_glyphs; i++)
452      if (glyphs[i - 1] + 1 != glyphs[i])
453        num_ranges++;
454    rangeRecord.len.set (num_ranges);
455    if (unlikely (!c->extend (rangeRecord))) return TRACE_RETURN (false);
456
457    unsigned int range = 0;
458    rangeRecord[range].start.set (glyphs[0]);
459    rangeRecord[range].value.set (0);
460    for (unsigned int i = 1; i < num_glyphs; i++)
461      if (glyphs[i - 1] + 1 != glyphs[i]) {
462	rangeRecord[range].start.set (glyphs[i]);
463	rangeRecord[range].value.set (i);
464	range++;
465      } else {
466        rangeRecord[range].end = glyphs[i];
467      }
468    glyphs.advance (num_glyphs);
469    return TRACE_RETURN (true);
470  }
471
472  inline bool sanitize (hb_sanitize_context_t *c) {
473    TRACE_SANITIZE ();
474    return TRACE_RETURN (rangeRecord.sanitize (c));
475  }
476
477  inline bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const {
478    unsigned int i;
479    unsigned int count = rangeRecord.len;
480    for (i = 0; i < count; i++) {
481      const RangeRecord &range = rangeRecord[i];
482      if (range.value <= index &&
483	  index < (unsigned int) range.value + (range.end - range.start) &&
484	  range.intersects (glyphs))
485        return true;
486      else if (index < range.value)
487        return false;
488    }
489    return false;
490  }
491
492  template <typename set_t>
493  inline void add_coverage (set_t *glyphs) const {
494    unsigned int count = rangeRecord.len;
495    for (unsigned int i = 0; i < count; i++)
496      rangeRecord[i].add_coverage (glyphs);
497  }
498
499  struct Iter {
500    inline void init (const CoverageFormat2 &c_) {
501      c = &c_;
502      coverage = 0;
503      i = 0;
504      j = c->rangeRecord.len ? c_.rangeRecord[0].start : 0;
505    }
506    inline bool more (void) { return i < c->rangeRecord.len; }
507    inline void next (void) {
508      coverage++;
509      if (j == c->rangeRecord[i].end) {
510        i++;
511	if (more ())
512	  j = c->rangeRecord[i].start;
513	return;
514      }
515      j++;
516    }
517    inline uint16_t get_glyph (void) { return j; }
518    inline uint16_t get_coverage (void) { return coverage; }
519
520    private:
521    const struct CoverageFormat2 *c;
522    unsigned int i, j, coverage;
523  };
524
525  protected:
526  USHORT	coverageFormat;	/* Format identifier--format = 2 */
527  SortedArrayOf<RangeRecord>
528		rangeRecord;	/* Array of glyph ranges--ordered by
529				 * Start GlyphID. rangeCount entries
530				 * long */
531  public:
532  DEFINE_SIZE_ARRAY (4, rangeRecord);
533};
534
535struct Coverage
536{
537  inline unsigned int operator () (hb_codepoint_t glyph_id) const { return get_coverage (glyph_id); }
538
539  inline unsigned int get_coverage (hb_codepoint_t glyph_id) const
540  {
541    switch (u.format) {
542    case 1: return u.format1.get_coverage(glyph_id);
543    case 2: return u.format2.get_coverage(glyph_id);
544    default:return NOT_COVERED;
545    }
546  }
547
548  inline bool serialize (hb_serialize_context_t *c,
549			 Supplier<GlyphID> &glyphs,
550			 unsigned int num_glyphs)
551  {
552    TRACE_SERIALIZE ();
553    if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
554    unsigned int num_ranges = 1;
555    for (unsigned int i = 1; i < num_glyphs; i++)
556      if (glyphs[i - 1] + 1 != glyphs[i])
557        num_ranges++;
558    u.format.set (num_glyphs * 2 < num_ranges * 3 ? 1 : 2);
559    switch (u.format) {
560    case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, num_glyphs));
561    case 2: return TRACE_RETURN (u.format2.serialize (c, glyphs, num_glyphs));
562    default:return TRACE_RETURN (false);
563    }
564  }
565
566  inline bool sanitize (hb_sanitize_context_t *c) {
567    TRACE_SANITIZE ();
568    if (!u.format.sanitize (c)) return TRACE_RETURN (false);
569    switch (u.format) {
570    case 1: return TRACE_RETURN (u.format1.sanitize (c));
571    case 2: return TRACE_RETURN (u.format2.sanitize (c));
572    default:return TRACE_RETURN (true);
573    }
574  }
575
576  inline bool intersects (const hb_set_t *glyphs) const {
577    /* TODO speed this up */
578    Coverage::Iter iter;
579    for (iter.init (*this); iter.more (); iter.next ()) {
580      if (glyphs->has (iter.get_glyph ()))
581        return true;
582    }
583    return false;
584  }
585
586  inline bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const {
587    switch (u.format) {
588    case 1: return u.format1.intersects_coverage (glyphs, index);
589    case 2: return u.format2.intersects_coverage (glyphs, index);
590    default:return false;
591    }
592  }
593
594  template <typename set_t>
595  inline void add_coverage (set_t *glyphs) const {
596    switch (u.format) {
597    case 1: u.format1.add_coverage (glyphs); break;
598    case 2: u.format2.add_coverage (glyphs); break;
599    default:                                 break;
600    }
601  }
602
603  struct Iter {
604    Iter (void) : format (0) {};
605    inline void init (const Coverage &c_) {
606      format = c_.u.format;
607      switch (format) {
608      case 1: return u.format1.init (c_.u.format1);
609      case 2: return u.format2.init (c_.u.format2);
610      default:return;
611      }
612    }
613    inline bool more (void) {
614      switch (format) {
615      case 1: return u.format1.more ();
616      case 2: return u.format2.more ();
617      default:return true;
618      }
619    }
620    inline void next (void) {
621      switch (format) {
622      case 1: u.format1.next (); break;
623      case 2: u.format2.next (); break;
624      default:                   break;
625      }
626    }
627    inline uint16_t get_glyph (void) {
628      switch (format) {
629      case 1: return u.format1.get_glyph ();
630      case 2: return u.format2.get_glyph ();
631      default:return true;
632      }
633    }
634    inline uint16_t get_coverage (void) {
635      switch (format) {
636      case 1: return u.format1.get_coverage ();
637      case 2: return u.format2.get_coverage ();
638      default:return true;
639      }
640    }
641
642    private:
643    unsigned int format;
644    union {
645    CoverageFormat1::Iter	format1;
646    CoverageFormat2::Iter	format2;
647    } u;
648  };
649
650  protected:
651  union {
652  USHORT		format;		/* Format identifier */
653  CoverageFormat1	format1;
654  CoverageFormat2	format2;
655  } u;
656  public:
657  DEFINE_SIZE_UNION (2, format);
658};
659
660
661/*
662 * Class Definition Table
663 */
664
665struct ClassDefFormat1
666{
667  friend struct ClassDef;
668
669  private:
670  inline unsigned int get_class (hb_codepoint_t glyph_id) const
671  {
672    if (unlikely ((unsigned int) (glyph_id - startGlyph) < classValue.len))
673      return classValue[glyph_id - startGlyph];
674    return 0;
675  }
676
677  inline bool sanitize (hb_sanitize_context_t *c) {
678    TRACE_SANITIZE ();
679    return TRACE_RETURN (c->check_struct (this) && classValue.sanitize (c));
680  }
681
682  inline bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const {
683    unsigned int count = classValue.len;
684    for (unsigned int i = 0; i < count; i++)
685      if (classValue[i] == klass && glyphs->has (startGlyph + i))
686        return true;
687    return false;
688  }
689
690  protected:
691  USHORT	classFormat;		/* Format identifier--format = 1 */
692  GlyphID	startGlyph;		/* First GlyphID of the classValueArray */
693  ArrayOf<USHORT>
694		classValue;		/* Array of Class Values--one per GlyphID */
695  public:
696  DEFINE_SIZE_ARRAY (6, classValue);
697};
698
699struct ClassDefFormat2
700{
701  friend struct ClassDef;
702
703  private:
704  inline unsigned int get_class (hb_codepoint_t glyph_id) const
705  {
706    int i = rangeRecord.search (glyph_id);
707    if (i != -1)
708      return rangeRecord[i].value;
709    return 0;
710  }
711
712  inline bool sanitize (hb_sanitize_context_t *c) {
713    TRACE_SANITIZE ();
714    return TRACE_RETURN (rangeRecord.sanitize (c));
715  }
716
717  inline bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const {
718    unsigned int count = rangeRecord.len;
719    for (unsigned int i = 0; i < count; i++)
720      if (rangeRecord[i].value == klass && rangeRecord[i].intersects (glyphs))
721        return true;
722    return false;
723  }
724
725  protected:
726  USHORT	classFormat;	/* Format identifier--format = 2 */
727  SortedArrayOf<RangeRecord>
728		rangeRecord;	/* Array of glyph ranges--ordered by
729				 * Start GlyphID */
730  public:
731  DEFINE_SIZE_ARRAY (4, rangeRecord);
732};
733
734struct ClassDef
735{
736  inline unsigned int operator () (hb_codepoint_t glyph_id) const { return get_class (glyph_id); }
737
738  inline unsigned int get_class (hb_codepoint_t glyph_id) const
739  {
740    switch (u.format) {
741    case 1: return u.format1.get_class(glyph_id);
742    case 2: return u.format2.get_class(glyph_id);
743    default:return 0;
744    }
745  }
746
747  inline bool sanitize (hb_sanitize_context_t *c) {
748    TRACE_SANITIZE ();
749    if (!u.format.sanitize (c)) return TRACE_RETURN (false);
750    switch (u.format) {
751    case 1: return TRACE_RETURN (u.format1.sanitize (c));
752    case 2: return TRACE_RETURN (u.format2.sanitize (c));
753    default:return TRACE_RETURN (true);
754    }
755  }
756
757  inline bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const {
758    switch (u.format) {
759    case 1: return u.format1.intersects_class (glyphs, klass);
760    case 2: return u.format2.intersects_class (glyphs, klass);
761    default:return false;
762    }
763  }
764
765  protected:
766  union {
767  USHORT		format;		/* Format identifier */
768  ClassDefFormat1	format1;
769  ClassDefFormat2	format2;
770  } u;
771  public:
772  DEFINE_SIZE_UNION (2, format);
773};
774
775
776/*
777 * Device Tables
778 */
779
780struct Device
781{
782
783  inline hb_position_t get_x_delta (hb_font_t *font) const
784  { return get_delta (font->x_ppem, font->x_scale); }
785
786  inline hb_position_t get_y_delta (hb_font_t *font) const
787  { return get_delta (font->y_ppem, font->y_scale); }
788
789  inline int get_delta (unsigned int ppem, int scale) const
790  {
791    if (!ppem) return 0;
792
793    int pixels = get_delta_pixels (ppem);
794
795    if (!pixels) return 0;
796
797    return pixels * (int64_t) scale / ppem;
798  }
799
800
801  inline int get_delta_pixels (unsigned int ppem_size) const
802  {
803    unsigned int f = deltaFormat;
804    if (unlikely (f < 1 || f > 3))
805      return 0;
806
807    if (ppem_size < startSize || ppem_size > endSize)
808      return 0;
809
810    unsigned int s = ppem_size - startSize;
811
812    unsigned int byte = deltaValue[s >> (4 - f)];
813    unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f)));
814    unsigned int mask = (0xFFFF >> (16 - (1 << f)));
815
816    int delta = bits & mask;
817
818    if ((unsigned int) delta >= ((mask + 1) >> 1))
819      delta -= mask + 1;
820
821    return delta;
822  }
823
824  inline unsigned int get_size (void) const
825  {
826    unsigned int f = deltaFormat;
827    if (unlikely (f < 1 || f > 3 || startSize > endSize)) return 3 * USHORT::static_size;
828    return USHORT::static_size * (4 + ((endSize - startSize) >> (4 - f)));
829  }
830
831  inline bool sanitize (hb_sanitize_context_t *c) {
832    TRACE_SANITIZE ();
833    return TRACE_RETURN (c->check_struct (this) && c->check_range (this, this->get_size ()));
834  }
835
836  protected:
837  USHORT	startSize;		/* Smallest size to correct--in ppem */
838  USHORT	endSize;		/* Largest size to correct--in ppem */
839  USHORT	deltaFormat;		/* Format of DeltaValue array data: 1, 2, or 3
840					 * 1	Signed 2-bit value, 8 values per uint16
841					 * 2	Signed 4-bit value, 4 values per uint16
842					 * 3	Signed 8-bit value, 2 values per uint16
843					 */
844  USHORT	deltaValue[VAR];	/* Array of compressed data */
845  public:
846  DEFINE_SIZE_ARRAY (6, deltaValue);
847};
848
849
850} // namespace OT
851
852
853#endif /* HB_OT_LAYOUT_COMMON_PRIVATE_HH */
854