1/*
2 * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3 * Copyright © 2012  Google, Inc.
4 *
5 *  This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29#ifndef HB_OPEN_TYPE_PRIVATE_HH
30#define HB_OPEN_TYPE_PRIVATE_HH
31
32#include "hb-private.hh"
33
34
35namespace OT {
36
37
38
39/*
40 * Casts
41 */
42
43/* Cast to struct T, reference to reference */
44template<typename Type, typename TObject>
45static inline const Type& CastR(const TObject &X)
46{ return reinterpret_cast<const Type&> (X); }
47template<typename Type, typename TObject>
48static inline Type& CastR(TObject &X)
49{ return reinterpret_cast<Type&> (X); }
50
51/* Cast to struct T, pointer to pointer */
52template<typename Type, typename TObject>
53static inline const Type* CastP(const TObject *X)
54{ return reinterpret_cast<const Type*> (X); }
55template<typename Type, typename TObject>
56static inline Type* CastP(TObject *X)
57{ return reinterpret_cast<Type*> (X); }
58
59/* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
60 * location pointed to by P plus Ofs bytes. */
61template<typename Type>
62static inline const Type& StructAtOffset(const void *P, unsigned int offset)
63{ return * reinterpret_cast<const Type*> ((const char *) P + offset); }
64template<typename Type>
65static inline Type& StructAtOffset(void *P, unsigned int offset)
66{ return * reinterpret_cast<Type*> ((char *) P + offset); }
67
68/* StructAfter<T>(X) returns the struct T& that is placed after X.
69 * Works with X of variable size also.  X must implement get_size() */
70template<typename Type, typename TObject>
71static inline const Type& StructAfter(const TObject &X)
72{ return StructAtOffset<Type>(&X, X.get_size()); }
73template<typename Type, typename TObject>
74static inline Type& StructAfter(TObject &X)
75{ return StructAtOffset<Type>(&X, X.get_size()); }
76
77
78
79/*
80 * Size checking
81 */
82
83/* Check _assertion in a method environment */
84#define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
85  inline void _instance_assertion_on_line_##_line (void) const \
86  { \
87    ASSERT_STATIC (_assertion); \
88    ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
89  }
90# define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
91# define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
92
93/* Check that _code compiles in a method environment */
94#define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
95  inline void _compiles_assertion_on_line_##_line (void) const \
96  { _code; }
97# define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
98# define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
99
100
101#define DEFINE_SIZE_STATIC(size) \
102  DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
103  static const unsigned int static_size = (size); \
104  static const unsigned int min_size = (size)
105
106#define DEFINE_SIZE_UNION(size, _member) \
107  DEFINE_INSTANCE_ASSERTION (this->u._member.static_size == (size)); \
108  static const unsigned int min_size = (size)
109
110#define DEFINE_SIZE_MIN(size) \
111  DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
112  static const unsigned int min_size = (size)
113
114#define DEFINE_SIZE_ARRAY(size, array) \
115  DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
116  DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
117  static const unsigned int min_size = (size)
118
119#define DEFINE_SIZE_ARRAY2(size, array1, array2) \
120  DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
121  DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
122  static const unsigned int min_size = (size)
123
124
125
126/*
127 * Null objects
128 */
129
130/* Global nul-content Null pool.  Enlarge as necessary. */
131/* TODO This really should be a extern HB_INTERNAL and defined somewhere... */
132static const void *_NullPool[(256+8) / sizeof (void *)];
133
134/* Generic nul-content Null objects. */
135template <typename Type>
136static inline const Type& Null (void) {
137  ASSERT_STATIC (sizeof (Type) <= sizeof (_NullPool));
138  return *CastP<Type> (_NullPool);
139}
140
141/* Specializaiton for arbitrary-content arbitrary-sized Null objects. */
142#define DEFINE_NULL_DATA(Type, data) \
143static const char _Null##Type[sizeof (Type) + 1] = data; /* +1 is for nul-termination in data */ \
144template <> \
145/*static*/ inline const Type& Null<Type> (void) { \
146  return *CastP<Type> (_Null##Type); \
147} /* The following line really exists such that we end in a place needing semicolon */ \
148ASSERT_STATIC (Type::min_size + 1 <= sizeof (_Null##Type))
149
150/* Accessor macro. */
151#define Null(Type) Null<Type>()
152
153
154/*
155 * Dispatch
156 */
157
158template <typename Context, typename Return, unsigned int MaxDebugDepth>
159struct hb_dispatch_context_t
160{
161  static const unsigned int max_debug_depth = MaxDebugDepth;
162  typedef Return return_t;
163  template <typename T, typename F>
164  inline bool may_dispatch (const T *obj, const F *format) { return true; }
165  static return_t no_dispatch_return_value (void) { return Context::default_return_value (); }
166};
167
168
169/*
170 * Sanitize
171 */
172
173#ifndef HB_DEBUG_SANITIZE
174#define HB_DEBUG_SANITIZE (HB_DEBUG+0)
175#endif
176
177
178#define TRACE_SANITIZE(this) \
179	hb_auto_trace_t<HB_DEBUG_SANITIZE, bool> trace \
180	(&c->debug_depth, c->get_name (), this, HB_FUNC, \
181	 "");
182
183/* This limits sanitizing time on really broken fonts. */
184#ifndef HB_SANITIZE_MAX_EDITS
185#define HB_SANITIZE_MAX_EDITS 32
186#endif
187
188struct hb_sanitize_context_t :
189       hb_dispatch_context_t<hb_sanitize_context_t, bool, HB_DEBUG_SANITIZE>
190{
191  inline hb_sanitize_context_t (void) :
192	debug_depth (0),
193	start (NULL), end (NULL),
194	writable (false), edit_count (0),
195	blob (NULL) {}
196
197  inline const char *get_name (void) { return "SANITIZE"; }
198  template <typename T, typename F>
199  inline bool may_dispatch (const T *obj, const F *format)
200  { return format->sanitize (this); }
201  template <typename T>
202  inline return_t dispatch (const T &obj) { return obj.sanitize (this); }
203  static return_t default_return_value (void) { return true; }
204  static return_t no_dispatch_return_value (void) { return false; }
205  bool stop_sublookup_iteration (const return_t r) const { return !r; }
206
207  inline void init (hb_blob_t *b)
208  {
209    this->blob = hb_blob_reference (b);
210    this->writable = false;
211  }
212
213  inline void start_processing (void)
214  {
215    this->start = hb_blob_get_data (this->blob, NULL);
216    this->end = this->start + hb_blob_get_length (this->blob);
217    assert (this->start <= this->end); /* Must not overflow. */
218    this->edit_count = 0;
219    this->debug_depth = 0;
220
221    DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
222		     "start [%p..%p] (%lu bytes)",
223		     this->start, this->end,
224		     (unsigned long) (this->end - this->start));
225  }
226
227  inline void end_processing (void)
228  {
229    DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1,
230		     "end [%p..%p] %u edit requests",
231		     this->start, this->end, this->edit_count);
232
233    hb_blob_destroy (this->blob);
234    this->blob = NULL;
235    this->start = this->end = NULL;
236  }
237
238  inline bool check_range (const void *base, unsigned int len) const
239  {
240    const char *p = (const char *) base;
241    bool ok = this->start <= p && p <= this->end && (unsigned int) (this->end - p) >= len;
242
243    DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
244       "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s",
245       p, p + len, len,
246       this->start, this->end,
247       ok ? "OK" : "OUT-OF-RANGE");
248
249    return likely (ok);
250  }
251
252  inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
253  {
254    const char *p = (const char *) base;
255    bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);
256    unsigned int array_size = record_size * len;
257    bool ok = !overflows && this->check_range (base, array_size);
258
259    DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
260       "check_array [%p..%p] (%d*%d=%d bytes) in [%p..%p] -> %s",
261       p, p + (record_size * len), record_size, len, (unsigned int) array_size,
262       this->start, this->end,
263       overflows ? "OVERFLOWS" : ok ? "OK" : "OUT-OF-RANGE");
264
265    return likely (ok);
266  }
267
268  template <typename Type>
269  inline bool check_struct (const Type *obj) const
270  {
271    return likely (this->check_range (obj, obj->min_size));
272  }
273
274  inline bool may_edit (const void *base HB_UNUSED, unsigned int len HB_UNUSED)
275  {
276    if (this->edit_count >= HB_SANITIZE_MAX_EDITS)
277      return false;
278
279    const char *p = (const char *) base;
280    this->edit_count++;
281
282    DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
283       "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
284       this->edit_count,
285       p, p + len, len,
286       this->start, this->end,
287       this->writable ? "GRANTED" : "DENIED");
288
289    return this->writable;
290  }
291
292  template <typename Type, typename ValueType>
293  inline bool try_set (const Type *obj, const ValueType &v) {
294    if (this->may_edit (obj, obj->static_size)) {
295      const_cast<Type *> (obj)->set (v);
296      return true;
297    }
298    return false;
299  }
300
301  mutable unsigned int debug_depth;
302  const char *start, *end;
303  bool writable;
304  unsigned int edit_count;
305  hb_blob_t *blob;
306};
307
308
309
310/* Template to sanitize an object. */
311template <typename Type>
312struct Sanitizer
313{
314  static hb_blob_t *sanitize (hb_blob_t *blob) {
315    hb_sanitize_context_t c[1];
316    bool sane;
317
318    /* TODO is_sane() stuff */
319
320    c->init (blob);
321
322  retry:
323    DEBUG_MSG_FUNC (SANITIZE, c->start, "start");
324
325    c->start_processing ();
326
327    if (unlikely (!c->start)) {
328      c->end_processing ();
329      return blob;
330    }
331
332    Type *t = CastP<Type> (const_cast<char *> (c->start));
333
334    sane = t->sanitize (c);
335    if (sane) {
336      if (c->edit_count) {
337	DEBUG_MSG_FUNC (SANITIZE, c->start, "passed first round with %d edits; going for second round", c->edit_count);
338
339        /* sanitize again to ensure no toe-stepping */
340        c->edit_count = 0;
341	sane = t->sanitize (c);
342	if (c->edit_count) {
343	  DEBUG_MSG_FUNC (SANITIZE, c->start, "requested %d edits in second round; FAILLING", c->edit_count);
344	  sane = false;
345	}
346      }
347    } else {
348      unsigned int edit_count = c->edit_count;
349      if (edit_count && !c->writable) {
350        c->start = hb_blob_get_data_writable (blob, NULL);
351	c->end = c->start + hb_blob_get_length (blob);
352
353	if (c->start) {
354	  c->writable = true;
355	  /* ok, we made it writable by relocating.  try again */
356	  DEBUG_MSG_FUNC (SANITIZE, c->start, "retry");
357	  goto retry;
358	}
359      }
360    }
361
362    c->end_processing ();
363
364    DEBUG_MSG_FUNC (SANITIZE, c->start, sane ? "PASSED" : "FAILED");
365    if (sane)
366      return blob;
367    else {
368      hb_blob_destroy (blob);
369      return hb_blob_get_empty ();
370    }
371  }
372
373  static const Type* lock_instance (hb_blob_t *blob) {
374    hb_blob_make_immutable (blob);
375    const char *base = hb_blob_get_data (blob, NULL);
376    return unlikely (!base) ? &Null(Type) : CastP<Type> (base);
377  }
378};
379
380
381
382/*
383 * Serialize
384 */
385
386#ifndef HB_DEBUG_SERIALIZE
387#define HB_DEBUG_SERIALIZE (HB_DEBUG+0)
388#endif
389
390
391#define TRACE_SERIALIZE(this) \
392	hb_auto_trace_t<HB_DEBUG_SERIALIZE, bool> trace \
393	(&c->debug_depth, "SERIALIZE", c, HB_FUNC, \
394	 "");
395
396
397struct hb_serialize_context_t
398{
399  inline hb_serialize_context_t (void *start_, unsigned int size)
400  {
401    this->start = (char *) start_;
402    this->end = this->start + size;
403
404    this->ran_out_of_room = false;
405    this->head = this->start;
406    this->debug_depth = 0;
407  }
408
409  template <typename Type>
410  inline Type *start_serialize (void)
411  {
412    DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
413		     "start [%p..%p] (%lu bytes)",
414		     this->start, this->end,
415		     (unsigned long) (this->end - this->start));
416
417    return start_embed<Type> ();
418  }
419
420  inline void end_serialize (void)
421  {
422    DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
423		     "end [%p..%p] serialized %d bytes; %s",
424		     this->start, this->end,
425		     (int) (this->head - this->start),
426		     this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room");
427
428  }
429
430  template <typename Type>
431  inline Type *copy (void)
432  {
433    assert (!this->ran_out_of_room);
434    unsigned int len = this->head - this->start;
435    void *p = malloc (len);
436    if (p)
437      memcpy (p, this->start, len);
438    return reinterpret_cast<Type *> (p);
439  }
440
441  template <typename Type>
442  inline Type *allocate_size (unsigned int size)
443  {
444    if (unlikely (this->ran_out_of_room || this->end - this->head < ptrdiff_t (size))) {
445      this->ran_out_of_room = true;
446      return NULL;
447    }
448    memset (this->head, 0, size);
449    char *ret = this->head;
450    this->head += size;
451    return reinterpret_cast<Type *> (ret);
452  }
453
454  template <typename Type>
455  inline Type *allocate_min (void)
456  {
457    return this->allocate_size<Type> (Type::min_size);
458  }
459
460  template <typename Type>
461  inline Type *start_embed (void)
462  {
463    Type *ret = reinterpret_cast<Type *> (this->head);
464    return ret;
465  }
466
467  template <typename Type>
468  inline Type *embed (const Type &obj)
469  {
470    unsigned int size = obj.get_size ();
471    Type *ret = this->allocate_size<Type> (size);
472    if (unlikely (!ret)) return NULL;
473    memcpy (ret, obj, size);
474    return ret;
475  }
476
477  template <typename Type>
478  inline Type *extend_min (Type &obj)
479  {
480    unsigned int size = obj.min_size;
481    assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
482    if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
483    return reinterpret_cast<Type *> (&obj);
484  }
485
486  template <typename Type>
487  inline Type *extend (Type &obj)
488  {
489    unsigned int size = obj.get_size ();
490    assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
491    if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
492    return reinterpret_cast<Type *> (&obj);
493  }
494
495  inline void truncate (void *new_head)
496  {
497    assert (this->start < new_head && new_head <= this->head);
498    this->head = (char *) new_head;
499  }
500
501  unsigned int debug_depth;
502  char *start, *end, *head;
503  bool ran_out_of_room;
504};
505
506template <typename Type>
507struct Supplier
508{
509  inline Supplier (const Type *array, unsigned int len_)
510  {
511    head = array;
512    len = len_;
513  }
514  inline const Type operator [] (unsigned int i) const
515  {
516    if (unlikely (i >= len)) return Type ();
517    return head[i];
518  }
519
520  inline void advance (unsigned int count)
521  {
522    if (unlikely (count > len))
523      count = len;
524    len -= count;
525    head += count;
526  }
527
528  private:
529  inline Supplier (const Supplier<Type> &); /* Disallow copy */
530  inline Supplier<Type>& operator= (const Supplier<Type> &); /* Disallow copy */
531
532  unsigned int len;
533  const Type *head;
534};
535
536
537
538
539/*
540 *
541 * The OpenType Font File: Data Types
542 */
543
544
545/* "The following data types are used in the OpenType font file.
546 *  All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
547
548/*
549 * Int types
550 */
551
552
553template <typename Type, int Bytes> struct BEInt;
554
555template <typename Type>
556struct BEInt<Type, 1>
557{
558  public:
559  inline void set (Type V)
560  {
561    v = V;
562  }
563  inline operator Type (void) const
564  {
565    return v;
566  }
567  private: uint8_t v;
568};
569template <typename Type>
570struct BEInt<Type, 2>
571{
572  public:
573  inline void set (Type V)
574  {
575    v[0] = (V >>  8) & 0xFF;
576    v[1] = (V      ) & 0xFF;
577  }
578  inline operator Type (void) const
579  {
580    return (v[0] <<  8)
581         + (v[1]      );
582  }
583  private: uint8_t v[2];
584};
585template <typename Type>
586struct BEInt<Type, 3>
587{
588  public:
589  inline void set (Type V)
590  {
591    v[0] = (V >> 16) & 0xFF;
592    v[1] = (V >>  8) & 0xFF;
593    v[2] = (V      ) & 0xFF;
594  }
595  inline operator Type (void) const
596  {
597    return (v[0] << 16)
598         + (v[1] <<  8)
599         + (v[2]      );
600  }
601  private: uint8_t v[3];
602};
603template <typename Type>
604struct BEInt<Type, 4>
605{
606  public:
607  inline void set (Type V)
608  {
609    v[0] = (V >> 24) & 0xFF;
610    v[1] = (V >> 16) & 0xFF;
611    v[2] = (V >>  8) & 0xFF;
612    v[3] = (V      ) & 0xFF;
613  }
614  inline operator Type (void) const
615  {
616    return (v[0] << 24)
617         + (v[1] << 16)
618         + (v[2] <<  8)
619         + (v[3]      );
620  }
621  private: uint8_t v[4];
622};
623
624/* Integer types in big-endian order and no alignment requirement */
625template <typename Type, unsigned int Size>
626struct IntType
627{
628  inline void set (Type i) { v.set (i); }
629  inline operator Type(void) const { return v; }
630  inline bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
631  inline bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
632  static inline int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
633  inline int cmp (Type a) const
634  {
635    Type b = v;
636    if (sizeof (Type) < sizeof (int))
637      return (int) a - (int) b;
638    else
639      return a < b ? -1 : a == b ? 0 : +1;
640  }
641  inline bool sanitize (hb_sanitize_context_t *c) const
642  {
643    TRACE_SANITIZE (this);
644    return_trace (likely (c->check_struct (this)));
645  }
646  protected:
647  BEInt<Type, Size> v;
648  public:
649  DEFINE_SIZE_STATIC (Size);
650};
651
652typedef	IntType<uint8_t	, 1> BYTE;	/* 8-bit unsigned integer. */
653typedef IntType<uint16_t, 2> USHORT;	/* 16-bit unsigned integer. */
654typedef IntType<int16_t,  2> SHORT;	/* 16-bit signed integer. */
655typedef IntType<uint32_t, 4> ULONG;	/* 32-bit unsigned integer. */
656typedef IntType<int32_t,  4> LONG;	/* 32-bit signed integer. */
657typedef IntType<uint32_t, 3> UINT24;	/* 24-bit unsigned integer. */
658
659/* 16-bit signed integer (SHORT) that describes a quantity in FUnits. */
660typedef SHORT FWORD;
661
662/* 16-bit unsigned integer (USHORT) that describes a quantity in FUnits. */
663typedef USHORT UFWORD;
664
665/* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
666struct F2DOT14 : SHORT
667{
668  //inline float to_float (void) const { return ???; }
669  //inline void set_float (float f) { v.set (f * ???); }
670  public:
671  DEFINE_SIZE_STATIC (2);
672};
673
674/* Date represented in number of seconds since 12:00 midnight, January 1,
675 * 1904. The value is represented as a signed 64-bit integer. */
676struct LONGDATETIME
677{
678  inline bool sanitize (hb_sanitize_context_t *c) const
679  {
680    TRACE_SANITIZE (this);
681    return_trace (likely (c->check_struct (this)));
682  }
683  protected:
684  LONG major;
685  ULONG minor;
686  public:
687  DEFINE_SIZE_STATIC (8);
688};
689
690/* Array of four uint8s (length = 32 bits) used to identify a script, language
691 * system, feature, or baseline */
692struct Tag : ULONG
693{
694  /* What the char* converters return is NOT nul-terminated.  Print using "%.4s" */
695  inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); }
696  inline operator char* (void) { return reinterpret_cast<char *> (&this->v); }
697  public:
698  DEFINE_SIZE_STATIC (4);
699};
700DEFINE_NULL_DATA (Tag, "    ");
701
702/* Glyph index number, same as uint16 (length = 16 bits) */
703struct GlyphID : USHORT {
704  static inline int cmp (const GlyphID *a, const GlyphID *b) { return b->USHORT::cmp (*a); }
705  inline int cmp (hb_codepoint_t a) const { return (int) a - (int) *this; }
706};
707
708/* Script/language-system/feature index */
709struct Index : USHORT {
710  static const unsigned int NOT_FOUND_INDEX = 0xFFFFu;
711};
712DEFINE_NULL_DATA (Index, "\xff\xff");
713
714/* Offset, Null offset = 0 */
715template <typename Type=USHORT>
716struct Offset : Type
717{
718  inline bool is_null (void) const { return 0 == *this; }
719  public:
720  DEFINE_SIZE_STATIC (sizeof(Type));
721};
722
723
724/* CheckSum */
725struct CheckSum : ULONG
726{
727  /* This is reference implementation from the spec. */
728  static inline uint32_t CalcTableChecksum (const ULONG *Table, uint32_t Length)
729  {
730    uint32_t Sum = 0L;
731    const ULONG *EndPtr = Table+((Length+3) & ~3) / ULONG::static_size;
732
733    while (Table < EndPtr)
734      Sum += *Table++;
735    return Sum;
736  }
737
738  /* Note: data should be 4byte aligned and have 4byte padding at the end. */
739  inline void set_for_data (const void *data, unsigned int length)
740  { set (CalcTableChecksum ((const ULONG *) data, length)); }
741
742  public:
743  DEFINE_SIZE_STATIC (4);
744};
745
746
747/*
748 * Version Numbers
749 */
750
751template <typename FixedType=USHORT>
752struct FixedVersion
753{
754  inline uint32_t to_int (void) const { return (major << (sizeof(FixedType) * 8)) + minor; }
755
756  inline bool sanitize (hb_sanitize_context_t *c) const
757  {
758    TRACE_SANITIZE (this);
759    return_trace (c->check_struct (this));
760  }
761
762  FixedType major;
763  FixedType minor;
764  public:
765  DEFINE_SIZE_STATIC (2 * sizeof(FixedType));
766};
767
768
769
770/*
771 * Template subclasses of Offset that do the dereferencing.
772 * Use: (base+offset)
773 */
774
775template <typename Type, typename OffsetType=USHORT>
776struct OffsetTo : Offset<OffsetType>
777{
778  inline const Type& operator () (const void *base) const
779  {
780    unsigned int offset = *this;
781    if (unlikely (!offset)) return Null(Type);
782    return StructAtOffset<Type> (base, offset);
783  }
784
785  inline Type& serialize (hb_serialize_context_t *c, const void *base)
786  {
787    Type *t = c->start_embed<Type> ();
788    this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */
789    return *t;
790  }
791
792  inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
793  {
794    TRACE_SANITIZE (this);
795    if (unlikely (!c->check_struct (this))) return_trace (false);
796    unsigned int offset = *this;
797    if (unlikely (!offset)) return_trace (true);
798    const Type &obj = StructAtOffset<Type> (base, offset);
799    return_trace (likely (obj.sanitize (c)) || neuter (c));
800  }
801  template <typename T>
802  inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
803  {
804    TRACE_SANITIZE (this);
805    if (unlikely (!c->check_struct (this))) return_trace (false);
806    unsigned int offset = *this;
807    if (unlikely (!offset)) return_trace (true);
808    const Type &obj = StructAtOffset<Type> (base, offset);
809    return_trace (likely (obj.sanitize (c, user_data)) || neuter (c));
810  }
811
812  /* Set the offset to Null */
813  inline bool neuter (hb_sanitize_context_t *c) const {
814    return c->try_set (this, 0);
815  }
816  DEFINE_SIZE_STATIC (sizeof(OffsetType));
817};
818template <typename Base, typename OffsetType, typename Type>
819static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType> &offset) { return offset (base); }
820template <typename Base, typename OffsetType, typename Type>
821static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType> &offset) { return offset (base); }
822
823
824/*
825 * Array Types
826 */
827
828/* An array with a number of elements. */
829template <typename Type, typename LenType=USHORT>
830struct ArrayOf
831{
832  const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const
833  {
834    unsigned int count = len;
835    if (unlikely (start_offset > count))
836      count = 0;
837    else
838      count -= start_offset;
839    count = MIN (count, *pcount);
840    *pcount = count;
841    return array + start_offset;
842  }
843
844  inline const Type& operator [] (unsigned int i) const
845  {
846    if (unlikely (i >= len)) return Null(Type);
847    return array[i];
848  }
849  inline Type& operator [] (unsigned int i)
850  {
851    return array[i];
852  }
853  inline unsigned int get_size (void) const
854  { return len.static_size + len * Type::static_size; }
855
856  inline bool serialize (hb_serialize_context_t *c,
857			 unsigned int items_len)
858  {
859    TRACE_SERIALIZE (this);
860    if (unlikely (!c->extend_min (*this))) return_trace (false);
861    len.set (items_len); /* TODO(serialize) Overflow? */
862    if (unlikely (!c->extend (*this))) return_trace (false);
863    return_trace (true);
864  }
865
866  inline bool serialize (hb_serialize_context_t *c,
867			 Supplier<Type> &items,
868			 unsigned int items_len)
869  {
870    TRACE_SERIALIZE (this);
871    if (unlikely (!serialize (c, items_len))) return_trace (false);
872    for (unsigned int i = 0; i < items_len; i++)
873      array[i] = items[i];
874    items.advance (items_len);
875    return_trace (true);
876  }
877
878  inline bool sanitize (hb_sanitize_context_t *c) const
879  {
880    TRACE_SANITIZE (this);
881    if (unlikely (!sanitize_shallow (c))) return_trace (false);
882
883    /* Note: for structs that do not reference other structs,
884     * we do not need to call their sanitize() as we already did
885     * a bound check on the aggregate array size.  We just include
886     * a small unreachable expression to make sure the structs
887     * pointed to do have a simple sanitize(), ie. they do not
888     * reference other structs via offsets.
889     */
890    (void) (false && array[0].sanitize (c));
891
892    return_trace (true);
893  }
894  inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
895  {
896    TRACE_SANITIZE (this);
897    if (unlikely (!sanitize_shallow (c))) return_trace (false);
898    unsigned int count = len;
899    for (unsigned int i = 0; i < count; i++)
900      if (unlikely (!array[i].sanitize (c, base)))
901        return_trace (false);
902    return_trace (true);
903  }
904  template <typename T>
905  inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
906  {
907    TRACE_SANITIZE (this);
908    if (unlikely (!sanitize_shallow (c))) return_trace (false);
909    unsigned int count = len;
910    for (unsigned int i = 0; i < count; i++)
911      if (unlikely (!array[i].sanitize (c, base, user_data)))
912        return_trace (false);
913    return_trace (true);
914  }
915
916  template <typename SearchType>
917  inline int lsearch (const SearchType &x) const
918  {
919    unsigned int count = len;
920    for (unsigned int i = 0; i < count; i++)
921      if (!this->array[i].cmp (x))
922        return i;
923    return -1;
924  }
925
926  private:
927  inline bool sanitize_shallow (hb_sanitize_context_t *c) const
928  {
929    TRACE_SANITIZE (this);
930    return_trace (c->check_struct (this) && c->check_array (array, Type::static_size, len));
931  }
932
933  public:
934  LenType len;
935  Type array[VAR];
936  public:
937  DEFINE_SIZE_ARRAY (sizeof (LenType), array);
938};
939
940/* Array of Offset's */
941template <typename Type>
942struct OffsetArrayOf : ArrayOf<OffsetTo<Type> > {};
943
944/* Array of offsets relative to the beginning of the array itself. */
945template <typename Type>
946struct OffsetListOf : OffsetArrayOf<Type>
947{
948  inline const Type& operator [] (unsigned int i) const
949  {
950    if (unlikely (i >= this->len)) return Null(Type);
951    return this+this->array[i];
952  }
953
954  inline bool sanitize (hb_sanitize_context_t *c) const
955  {
956    TRACE_SANITIZE (this);
957    return_trace (OffsetArrayOf<Type>::sanitize (c, this));
958  }
959  template <typename T>
960  inline bool sanitize (hb_sanitize_context_t *c, T user_data) const
961  {
962    TRACE_SANITIZE (this);
963    return_trace (OffsetArrayOf<Type>::sanitize (c, this, user_data));
964  }
965};
966
967
968/* An array starting at second element. */
969template <typename Type, typename LenType=USHORT>
970struct HeadlessArrayOf
971{
972  inline const Type& operator [] (unsigned int i) const
973  {
974    if (unlikely (i >= len || !i)) return Null(Type);
975    return array[i-1];
976  }
977  inline unsigned int get_size (void) const
978  { return len.static_size + (len ? len - 1 : 0) * Type::static_size; }
979
980  inline bool serialize (hb_serialize_context_t *c,
981			 Supplier<Type> &items,
982			 unsigned int items_len)
983  {
984    TRACE_SERIALIZE (this);
985    if (unlikely (!c->extend_min (*this))) return_trace (false);
986    len.set (items_len); /* TODO(serialize) Overflow? */
987    if (unlikely (!items_len)) return_trace (true);
988    if (unlikely (!c->extend (*this))) return_trace (false);
989    for (unsigned int i = 0; i < items_len - 1; i++)
990      array[i] = items[i];
991    items.advance (items_len - 1);
992    return_trace (true);
993  }
994
995  inline bool sanitize_shallow (hb_sanitize_context_t *c) const
996  {
997    return c->check_struct (this)
998	&& c->check_array (this, Type::static_size, len);
999  }
1000
1001  inline bool sanitize (hb_sanitize_context_t *c) const
1002  {
1003    TRACE_SANITIZE (this);
1004    if (unlikely (!sanitize_shallow (c))) return_trace (false);
1005
1006    /* Note: for structs that do not reference other structs,
1007     * we do not need to call their sanitize() as we already did
1008     * a bound check on the aggregate array size.  We just include
1009     * a small unreachable expression to make sure the structs
1010     * pointed to do have a simple sanitize(), ie. they do not
1011     * reference other structs via offsets.
1012     */
1013    (void) (false && array[0].sanitize (c));
1014
1015    return_trace (true);
1016  }
1017
1018  LenType len;
1019  Type array[VAR];
1020  public:
1021  DEFINE_SIZE_ARRAY (sizeof (LenType), array);
1022};
1023
1024
1025/* An array with sorted elements.  Supports binary searching. */
1026template <typename Type, typename LenType=USHORT>
1027struct SortedArrayOf : ArrayOf<Type, LenType>
1028{
1029  template <typename SearchType>
1030  inline int bsearch (const SearchType &x) const
1031  {
1032    /* Hand-coded bsearch here since this is in the hot inner loop. */
1033    int min = 0, max = (int) this->len - 1;
1034    while (min <= max)
1035    {
1036      int mid = (min + max) / 2;
1037      int c = this->array[mid].cmp (x);
1038      if (c < 0)
1039        max = mid - 1;
1040      else if (c > 0)
1041        min = mid + 1;
1042      else
1043        return mid;
1044    }
1045    return -1;
1046  }
1047};
1048
1049
1050} /* namespace OT */
1051
1052
1053#endif /* HB_OPEN_TYPE_PRIVATE_HH */
1054