hb-open-type-private.hh revision fd0de881f4fc004da6f36d50a91d0e62f8eb4d8c
1/* 2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright © 2012 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29#ifndef HB_OPEN_TYPE_PRIVATE_HH 30#define HB_OPEN_TYPE_PRIVATE_HH 31 32#include "hb-private.hh" 33 34#include "hb-blob.h" 35 36 37namespace OT { 38 39 40/* 41 * Casts 42 */ 43 44/* Cast to struct T, reference to reference */ 45template<typename Type, typename TObject> 46inline const Type& CastR(const TObject &X) 47{ return reinterpret_cast<const Type&> (X); } 48template<typename Type, typename TObject> 49inline Type& CastR(TObject &X) 50{ return reinterpret_cast<Type&> (X); } 51 52/* Cast to struct T, pointer to pointer */ 53template<typename Type, typename TObject> 54inline const Type* CastP(const TObject *X) 55{ return reinterpret_cast<const Type*> (X); } 56template<typename Type, typename TObject> 57inline Type* CastP(TObject *X) 58{ return reinterpret_cast<Type*> (X); } 59 60/* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory 61 * location pointed to by P plus Ofs bytes. */ 62template<typename Type> 63inline const Type& StructAtOffset(const void *P, unsigned int offset) 64{ return * reinterpret_cast<const Type*> ((const char *) P + offset); } 65template<typename Type> 66inline Type& StructAtOffset(void *P, unsigned int offset) 67{ return * reinterpret_cast<Type*> ((char *) P + offset); } 68 69/* StructAfter<T>(X) returns the struct T& that is placed after X. 70 * Works with X of variable size also. X must implement get_size() */ 71template<typename Type, typename TObject> 72inline const Type& StructAfter(const TObject &X) 73{ return StructAtOffset<Type>(&X, X.get_size()); } 74template<typename Type, typename TObject> 75inline Type& StructAfter(TObject &X) 76{ return StructAtOffset<Type>(&X, X.get_size()); } 77 78 79 80/* 81 * Size checking 82 */ 83 84/* Check _assertion in a method environment */ 85#define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \ 86 inline void _instance_assertion_on_line_##_line (void) const \ 87 { \ 88 ASSERT_STATIC (_assertion); \ 89 ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \ 90 } 91# define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion) 92# define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion) 93 94/* Check that _code compiles in a method environment */ 95#define _DEFINE_COMPILES_ASSERTION1(_line, _code) \ 96 inline void _compiles_assertion_on_line_##_line (void) const \ 97 { _code; } 98# define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code) 99# define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code) 100 101 102#define DEFINE_SIZE_STATIC(size) \ 103 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \ 104 static const unsigned int static_size = (size); \ 105 static const unsigned int min_size = (size) 106 107/* Size signifying variable-sized array */ 108#define VAR 1 109 110#define DEFINE_SIZE_UNION(size, _member) \ 111 DEFINE_INSTANCE_ASSERTION (this->u._member.static_size == (size)); \ 112 static const unsigned int min_size = (size) 113 114#define DEFINE_SIZE_MIN(size) \ 115 DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \ 116 static const unsigned int min_size = (size) 117 118#define DEFINE_SIZE_ARRAY(size, array) \ 119 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \ 120 DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \ 121 static const unsigned int min_size = (size) 122 123#define DEFINE_SIZE_ARRAY2(size, array1, array2) \ 124 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \ 125 DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \ 126 static const unsigned int min_size = (size) 127 128 129 130/* 131 * Null objects 132 */ 133 134/* Global nul-content Null pool. Enlarge as necessary. */ 135/* TODO This really should be a extern HB_INTERNAL and defined somewhere... */ 136static const void *_NullPool[64 / sizeof (void *)]; 137 138/* Generic nul-content Null objects. */ 139template <typename Type> 140static inline const Type& Null (void) { 141 ASSERT_STATIC (sizeof (Type) <= sizeof (_NullPool)); 142 return *CastP<Type> (_NullPool); 143} 144 145/* Specializaiton for arbitrary-content arbitrary-sized Null objects. */ 146#define DEFINE_NULL_DATA(Type, data) \ 147static const char _Null##Type[sizeof (Type) + 1] = data; /* +1 is for nul-termination in data */ \ 148template <> \ 149inline const Type& Null<Type> (void) { \ 150 return *CastP<Type> (_Null##Type); \ 151} /* The following line really exists such that we end in a place needing semicolon */ \ 152ASSERT_STATIC (Type::min_size + 1 <= sizeof (_Null##Type)) 153 154/* Accessor macro. */ 155#define Null(Type) Null<Type>() 156 157 158 159/* 160 * Sanitize 161 */ 162 163#ifndef HB_DEBUG_SANITIZE 164#define HB_DEBUG_SANITIZE (HB_DEBUG+0) 165#endif 166 167 168#define TRACE_SANITIZE() \ 169 hb_auto_trace_t<HB_DEBUG_SANITIZE> trace (&c->debug_depth, "SANITIZE", this, HB_FUNC, ""); 170 171 172struct hb_sanitize_context_t 173{ 174 inline void init (hb_blob_t *b) 175 { 176 this->blob = hb_blob_reference (b); 177 this->writable = false; 178 } 179 180 inline void start_processing (void) 181 { 182 this->start = hb_blob_get_data (this->blob, NULL); 183 this->end = this->start + hb_blob_get_length (this->blob); 184 this->edit_count = 0; 185 this->debug_depth = 0; 186 187 DEBUG_MSG_LEVEL (SANITIZE, this->blob, 0, +1, 188 "start [%p..%p] (%lu bytes)", 189 this->start, this->end, 190 (unsigned long) (this->end - this->start)); 191 } 192 193 inline void end_processing (void) 194 { 195 DEBUG_MSG_LEVEL (SANITIZE, this->blob, 0, -1, 196 "end [%p..%p] %u edit requests", 197 this->start, this->end, this->edit_count); 198 199 hb_blob_destroy (this->blob); 200 this->blob = NULL; 201 this->start = this->end = NULL; 202 } 203 204 inline bool check_range (const void *base, unsigned int len) const 205 { 206 const char *p = (const char *) base; 207 208 hb_auto_trace_t<HB_DEBUG_SANITIZE> trace (&this->debug_depth, "SANITIZE", this->blob, NULL, 209 "check_range [%p..%p] (%d bytes) in [%p..%p]", 210 p, p + len, len, 211 this->start, this->end); 212 213 return TRACE_RETURN (likely (this->start <= p && p <= this->end && (unsigned int) (this->end - p) >= len)); 214 } 215 216 inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const 217 { 218 const char *p = (const char *) base; 219 bool overflows = _hb_unsigned_int_mul_overflows (len, record_size); 220 221 hb_auto_trace_t<HB_DEBUG_SANITIZE> trace (&this->debug_depth, "SANITIZE", this->blob, NULL, 222 "check_array [%p..%p] (%d*%d=%ld bytes) in [%p..%p]", 223 p, p + (record_size * len), record_size, len, (unsigned long) record_size * len, 224 this->start, this->end); 225 226 return TRACE_RETURN (likely (!overflows && this->check_range (base, record_size * len))); 227 } 228 229 template <typename Type> 230 inline bool check_struct (const Type *obj) const 231 { 232 return likely (this->check_range (obj, obj->min_size)); 233 } 234 235 inline bool may_edit (const void *base HB_UNUSED, unsigned int len HB_UNUSED) 236 { 237 const char *p = (const char *) base; 238 this->edit_count++; 239 240 hb_auto_trace_t<HB_DEBUG_SANITIZE> trace (&this->debug_depth, "SANITIZE", this->blob, NULL, 241 "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s", 242 this->edit_count, 243 p, p + len, len, 244 this->start, this->end); 245 246 return TRACE_RETURN (this->writable); 247 } 248 249 mutable unsigned int debug_depth; 250 const char *start, *end; 251 bool writable; 252 unsigned int edit_count; 253 hb_blob_t *blob; 254}; 255 256 257 258/* Template to sanitize an object. */ 259template <typename Type> 260struct Sanitizer 261{ 262 static hb_blob_t *sanitize (hb_blob_t *blob) { 263 hb_sanitize_context_t c[1] = {{0}}; 264 bool sane; 265 266 /* TODO is_sane() stuff */ 267 268 c->init (blob); 269 270 retry: 271 DEBUG_MSG_FUNC (SANITIZE, blob, "start"); 272 273 c->start_processing (); 274 275 if (unlikely (!c->start)) { 276 c->end_processing (); 277 return blob; 278 } 279 280 Type *t = CastP<Type> (const_cast<char *> (c->start)); 281 282 sane = t->sanitize (c); 283 if (sane) { 284 if (c->edit_count) { 285 DEBUG_MSG_FUNC (SANITIZE, blob, "passed first round with %d edits; going for second round", c->edit_count); 286 287 /* sanitize again to ensure no toe-stepping */ 288 c->edit_count = 0; 289 sane = t->sanitize (c); 290 if (c->edit_count) { 291 DEBUG_MSG_FUNC (SANITIZE, blob, "requested %d edits in second round; FAILLING", c->edit_count); 292 sane = false; 293 } 294 } 295 } else { 296 unsigned int edit_count = c->edit_count; 297 if (edit_count && !c->writable) { 298 c->start = hb_blob_get_data_writable (blob, NULL); 299 c->end = c->start + hb_blob_get_length (blob); 300 301 if (c->start) { 302 c->writable = true; 303 /* ok, we made it writable by relocating. try again */ 304 DEBUG_MSG_FUNC (SANITIZE, blob, "retry"); 305 goto retry; 306 } 307 } 308 } 309 310 c->end_processing (); 311 312 DEBUG_MSG_FUNC (SANITIZE, blob, sane ? "PASSED" : "FAILED"); 313 if (sane) 314 return blob; 315 else { 316 hb_blob_destroy (blob); 317 return hb_blob_get_empty (); 318 } 319 } 320 321 static const Type* lock_instance (hb_blob_t *blob) { 322 hb_blob_make_immutable (blob); 323 const char *base = hb_blob_get_data (blob, NULL); 324 return unlikely (!base) ? &Null(Type) : CastP<Type> (base); 325 } 326}; 327 328 329 330/* 331 * Serialize 332 */ 333 334#ifndef HB_DEBUG_SERIALIZE 335#define HB_DEBUG_SERIALIZE (HB_DEBUG+0) 336#endif 337 338 339#define TRACE_SERIALIZE() \ 340 hb_auto_trace_t<HB_DEBUG_SERIALIZE> trace (&c->debug_depth, "SERIALIZE", c, HB_FUNC, ""); 341 342 343struct hb_serialize_context_t 344{ 345 inline hb_serialize_context_t (void *start, unsigned int size) 346 { 347 this->start = (char *) start; 348 this->end = this->start + size; 349 350 this->ran_out_of_room = false; 351 this->head = this->start; 352 this->debug_depth = 0; 353 } 354 355 template <typename Type> 356 inline Type *start_serialize (void) 357 { 358 DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1, 359 "start [%p..%p] (%lu bytes)", 360 this->start, this->end, 361 (unsigned long) (this->end - this->start)); 362 363 return start_embed<Type> (); 364 } 365 366 inline void end_serialize (void) 367 { 368 DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1, 369 "end [%p..%p] serialized %d bytes; %s", 370 this->start, this->end, 371 (int) (this->head - this->start), 372 this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room"); 373 374 } 375 376 template <typename Type> 377 inline Type *copy (void) 378 { 379 assert (!this->ran_out_of_room); 380 unsigned int len = this->head - this->start; 381 void *p = malloc (len); 382 if (p) 383 memcpy (p, this->start, len); 384 return reinterpret_cast<Type *> (p); 385 } 386 387 template <typename Type> 388 inline Type *allocate_size (unsigned int size) 389 { 390 if (unlikely (this->ran_out_of_room || this->end - this->head < size)) { 391 this->ran_out_of_room = true; 392 return NULL; 393 } 394 memset (this->head, 0, size); 395 char *ret = this->head; 396 this->head += size; 397 return reinterpret_cast<Type *> (ret); 398 } 399 400 template <typename Type> 401 inline Type *allocate_min (void) 402 { 403 return this->allocate_size<Type> (Type::min_size); 404 } 405 406 template <typename Type> 407 inline Type *start_embed (void) 408 { 409 Type *ret = reinterpret_cast<Type *> (this->head); 410 return ret; 411 } 412 413 template <typename Type> 414 inline Type *embed (const Type &obj) 415 { 416 unsigned int size = obj.get_size (); 417 Type *ret = this->allocate_size<Type> (size); 418 if (unlikely (!ret)) return NULL; 419 memcpy (ret, obj, size); 420 return ret; 421 } 422 423 template <typename Type> 424 inline Type *extend_min (Type &obj) 425 { 426 unsigned int size = obj.min_size; 427 assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head); 428 if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL; 429 return reinterpret_cast<Type *> (&obj); 430 } 431 432 template <typename Type> 433 inline Type *extend (Type &obj) 434 { 435 unsigned int size = obj.get_size (); 436 assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head); 437 if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL; 438 return reinterpret_cast<Type *> (&obj); 439 } 440 441 inline void truncate (void *head) 442 { 443 assert (this->start < head && head <= this->head); 444 this->head = (char *) head; 445 } 446 447 unsigned int debug_depth; 448 char *start, *end, *head; 449 bool ran_out_of_room; 450}; 451 452template <typename Type> 453struct Supplier 454{ 455 inline Supplier (const Type *array, unsigned int len_) 456 { 457 head = array; 458 len = len_; 459 } 460 inline const Type operator [] (unsigned int i) const 461 { 462 if (unlikely (i >= len)) return Type (); 463 return head[i]; 464 } 465 466 inline void advance (unsigned int count) 467 { 468 if (unlikely (count > len)) 469 count = len; 470 len -= count; 471 head += count; 472 } 473 474 private: 475 inline Supplier (const Supplier<Type> &); /* Disallow copy */ 476 inline Supplier<Type>& operator= (const Supplier<Type> &); /* Disallow copy */ 477 478 unsigned int len; 479 const Type *head; 480}; 481 482 483 484 485/* 486 * 487 * The OpenType Font File: Data Types 488 */ 489 490 491/* "The following data types are used in the OpenType font file. 492 * All OpenType fonts use Motorola-style byte ordering (Big Endian):" */ 493 494/* 495 * Int types 496 */ 497 498 499template <typename Type, int Bytes> struct BEInt; 500 501template <typename Type> 502struct BEInt<Type, 2> 503{ 504 public: 505 inline void set (Type i) { hb_be_uint16_put (v,i); } 506 inline operator Type (void) const { return hb_be_uint16_get (v); } 507 inline bool operator == (const BEInt<Type, 2>& o) const { return hb_be_uint16_eq (v, o.v); } 508 inline bool operator != (const BEInt<Type, 2>& o) const { return !(*this == o); } 509 private: uint8_t v[2]; 510}; 511template <typename Type> 512struct BEInt<Type, 4> 513{ 514 public: 515 inline void set (Type i) { hb_be_uint32_put (v,i); } 516 inline operator Type (void) const { return hb_be_uint32_get (v); } 517 inline bool operator == (const BEInt<Type, 4>& o) const { return hb_be_uint32_eq (v, o.v); } 518 inline bool operator != (const BEInt<Type, 4>& o) const { return !(*this == o); } 519 private: uint8_t v[4]; 520}; 521 522/* Integer types in big-endian order and no alignment requirement */ 523template <typename Type> 524struct IntType 525{ 526 inline void set (Type i) { v.set (i); } 527 inline operator Type(void) const { return v; } 528 inline bool operator == (const IntType<Type> &o) const { return v == o.v; } 529 inline bool operator != (const IntType<Type> &o) const { return v != o.v; } 530 static inline int cmp (const IntType<Type> *a, const IntType<Type> *b) { return b->cmp (*a); } 531 inline int cmp (IntType<Type> va) const { Type a = va; Type b = v; return a < b ? -1 : a == b ? 0 : +1; } 532 inline int cmp (Type a) const { Type b = v; return a < b ? -1 : a == b ? 0 : +1; } 533 inline bool sanitize (hb_sanitize_context_t *c) { 534 TRACE_SANITIZE (); 535 return TRACE_RETURN (likely (c->check_struct (this))); 536 } 537 protected: 538 BEInt<Type, sizeof (Type)> v; 539 public: 540 DEFINE_SIZE_STATIC (sizeof (Type)); 541}; 542 543typedef IntType<uint16_t> USHORT; /* 16-bit unsigned integer. */ 544typedef IntType<int16_t> SHORT; /* 16-bit signed integer. */ 545typedef IntType<uint32_t> ULONG; /* 32-bit unsigned integer. */ 546typedef IntType<int32_t> LONG; /* 32-bit signed integer. */ 547 548/* 16-bit signed integer (SHORT) that describes a quantity in FUnits. */ 549typedef SHORT FWORD; 550 551/* 16-bit unsigned integer (USHORT) that describes a quantity in FUnits. */ 552typedef USHORT UFWORD; 553 554/* Date represented in number of seconds since 12:00 midnight, January 1, 555 * 1904. The value is represented as a signed 64-bit integer. */ 556struct LONGDATETIME 557{ 558 inline bool sanitize (hb_sanitize_context_t *c) { 559 TRACE_SANITIZE (); 560 return TRACE_RETURN (likely (c->check_struct (this))); 561 } 562 private: 563 LONG major; 564 ULONG minor; 565 public: 566 DEFINE_SIZE_STATIC (8); 567}; 568 569/* Array of four uint8s (length = 32 bits) used to identify a script, language 570 * system, feature, or baseline */ 571struct Tag : ULONG 572{ 573 /* What the char* converters return is NOT nul-terminated. Print using "%.4s" */ 574 inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); } 575 inline operator char* (void) { return reinterpret_cast<char *> (&this->v); } 576 public: 577 DEFINE_SIZE_STATIC (4); 578}; 579DEFINE_NULL_DATA (Tag, " "); 580 581/* Glyph index number, same as uint16 (length = 16 bits) */ 582typedef USHORT GlyphID; 583 584/* Script/language-system/feature index */ 585struct Index : USHORT { 586 static const unsigned int NOT_FOUND_INDEX = 0xFFFF; 587}; 588DEFINE_NULL_DATA (Index, "\xff\xff"); 589 590/* Offset to a table, same as uint16 (length = 16 bits), Null offset = 0x0000 */ 591typedef USHORT Offset; 592 593/* LongOffset to a table, same as uint32 (length = 32 bits), Null offset = 0x00000000 */ 594typedef ULONG LongOffset; 595 596 597/* CheckSum */ 598struct CheckSum : ULONG 599{ 600 static uint32_t CalcTableChecksum (ULONG *Table, uint32_t Length) 601 { 602 uint32_t Sum = 0L; 603 ULONG *EndPtr = Table+((Length+3) & ~3) / ULONG::static_size; 604 605 while (Table < EndPtr) 606 Sum += *Table++; 607 return Sum; 608 } 609 public: 610 DEFINE_SIZE_STATIC (4); 611}; 612 613 614/* 615 * Version Numbers 616 */ 617 618struct FixedVersion 619{ 620 inline uint32_t to_int (void) const { return (major << 16) + minor; } 621 622 inline bool sanitize (hb_sanitize_context_t *c) { 623 TRACE_SANITIZE (); 624 return TRACE_RETURN (c->check_struct (this)); 625 } 626 627 USHORT major; 628 USHORT minor; 629 public: 630 DEFINE_SIZE_STATIC (4); 631}; 632 633 634 635/* 636 * Template subclasses of Offset and LongOffset that do the dereferencing. 637 * Use: (base+offset) 638 */ 639 640template <typename OffsetType, typename Type> 641struct GenericOffsetTo : OffsetType 642{ 643 inline const Type& operator () (const void *base) const 644 { 645 unsigned int offset = *this; 646 if (unlikely (!offset)) return Null(Type); 647 return StructAtOffset<Type> (base, offset); 648 } 649 inline Type& operator () (void *base) 650 { 651 unsigned int offset = *this; 652 return StructAtOffset<Type> (base, offset); 653 } 654 655 inline Type& serialize (hb_serialize_context_t *c, void *base) 656 { 657 Type *t = c->start_embed<Type> (); 658 this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */ 659 return *t; 660 } 661 662 inline bool sanitize (hb_sanitize_context_t *c, void *base) { 663 TRACE_SANITIZE (); 664 if (unlikely (!c->check_struct (this))) return TRACE_RETURN (false); 665 unsigned int offset = *this; 666 if (unlikely (!offset)) return TRACE_RETURN (true); 667 Type &obj = StructAtOffset<Type> (base, offset); 668 return TRACE_RETURN (likely (obj.sanitize (c)) || neuter (c)); 669 } 670 template <typename T> 671 inline bool sanitize (hb_sanitize_context_t *c, void *base, T user_data) { 672 TRACE_SANITIZE (); 673 if (unlikely (!c->check_struct (this))) return TRACE_RETURN (false); 674 unsigned int offset = *this; 675 if (unlikely (!offset)) return TRACE_RETURN (true); 676 Type &obj = StructAtOffset<Type> (base, offset); 677 return TRACE_RETURN (likely (obj.sanitize (c, user_data)) || neuter (c)); 678 } 679 680 private: 681 /* Set the offset to Null */ 682 inline bool neuter (hb_sanitize_context_t *c) { 683 if (c->may_edit (this, this->static_size)) { 684 this->set (0); /* 0 is Null offset */ 685 return true; 686 } 687 return false; 688 } 689}; 690template <typename Base, typename OffsetType, typename Type> 691inline const Type& operator + (const Base &base, const GenericOffsetTo<OffsetType, Type> &offset) { return offset (base); } 692template <typename Base, typename OffsetType, typename Type> 693inline Type& operator + (Base &base, GenericOffsetTo<OffsetType, Type> &offset) { return offset (base); } 694 695template <typename Type> 696struct OffsetTo : GenericOffsetTo<Offset, Type> {}; 697 698template <typename Type> 699struct LongOffsetTo : GenericOffsetTo<LongOffset, Type> {}; 700 701 702/* 703 * Array Types 704 */ 705 706template <typename LenType, typename Type> 707struct GenericArrayOf 708{ 709 const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const 710 { 711 unsigned int count = len; 712 if (unlikely (start_offset > count)) 713 count = 0; 714 else 715 count -= start_offset; 716 count = MIN (count, *pcount); 717 *pcount = count; 718 return array + start_offset; 719 } 720 721 inline const Type& operator [] (unsigned int i) const 722 { 723 if (unlikely (i >= len)) return Null(Type); 724 return array[i]; 725 } 726 inline Type& operator [] (unsigned int i) 727 { 728 return array[i]; 729 } 730 inline unsigned int get_size (void) const 731 { return len.static_size + len * Type::static_size; } 732 733 inline bool serialize (hb_serialize_context_t *c, 734 unsigned int items_len) 735 { 736 TRACE_SERIALIZE (); 737 if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); 738 len.set (items_len); /* TODO(serialize) Overflow? */ 739 if (unlikely (!c->extend (*this))) return TRACE_RETURN (false); 740 return TRACE_RETURN (true); 741 } 742 743 inline bool serialize (hb_serialize_context_t *c, 744 Supplier<Type> &items, 745 unsigned int items_len) 746 { 747 TRACE_SERIALIZE (); 748 if (unlikely (!serialize (c, items_len))) return TRACE_RETURN (false); 749 for (unsigned int i = 0; i < items_len; i++) 750 array[i] = items[i]; 751 items.advance (items_len); 752 return TRACE_RETURN (true); 753 } 754 755 inline bool sanitize (hb_sanitize_context_t *c) { 756 TRACE_SANITIZE (); 757 if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false); 758 759 /* Note: for structs that do not reference other structs, 760 * we do not need to call their sanitize() as we already did 761 * a bound check on the aggregate array size. We just include 762 * a small unreachable expression to make sure the structs 763 * pointed to do have a simple sanitize(), ie. they do not 764 * reference other structs via offsets. 765 */ 766 (void) (false && array[0].sanitize (c)); 767 768 return TRACE_RETURN (true); 769 } 770 inline bool sanitize (hb_sanitize_context_t *c, void *base) { 771 TRACE_SANITIZE (); 772 if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false); 773 unsigned int count = len; 774 for (unsigned int i = 0; i < count; i++) 775 if (unlikely (!array[i].sanitize (c, base))) 776 return TRACE_RETURN (false); 777 return TRACE_RETURN (true); 778 } 779 template <typename T> 780 inline bool sanitize (hb_sanitize_context_t *c, void *base, T user_data) { 781 TRACE_SANITIZE (); 782 if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false); 783 unsigned int count = len; 784 for (unsigned int i = 0; i < count; i++) 785 if (unlikely (!array[i].sanitize (c, base, user_data))) 786 return TRACE_RETURN (false); 787 return TRACE_RETURN (true); 788 } 789 790 private: 791 inline bool sanitize_shallow (hb_sanitize_context_t *c) { 792 TRACE_SANITIZE (); 793 return TRACE_RETURN (c->check_struct (this) && c->check_array (this, Type::static_size, len)); 794 } 795 796 public: 797 LenType len; 798 Type array[VAR]; 799 public: 800 DEFINE_SIZE_ARRAY (sizeof (LenType), array); 801}; 802 803/* An array with a USHORT number of elements. */ 804template <typename Type> 805struct ArrayOf : GenericArrayOf<USHORT, Type> {}; 806 807/* An array with a ULONG number of elements. */ 808template <typename Type> 809struct LongArrayOf : GenericArrayOf<ULONG, Type> {}; 810 811/* Array of Offset's */ 812template <typename Type> 813struct OffsetArrayOf : ArrayOf<OffsetTo<Type> > {}; 814 815/* Array of LongOffset's */ 816template <typename Type> 817struct LongOffsetArrayOf : ArrayOf<LongOffsetTo<Type> > {}; 818 819/* LongArray of LongOffset's */ 820template <typename Type> 821struct LongOffsetLongArrayOf : LongArrayOf<LongOffsetTo<Type> > {}; 822 823/* Array of offsets relative to the beginning of the array itself. */ 824template <typename Type> 825struct OffsetListOf : OffsetArrayOf<Type> 826{ 827 inline const Type& operator [] (unsigned int i) const 828 { 829 if (unlikely (i >= this->len)) return Null(Type); 830 return this+this->array[i]; 831 } 832 833 inline bool sanitize (hb_sanitize_context_t *c) { 834 TRACE_SANITIZE (); 835 return TRACE_RETURN (OffsetArrayOf<Type>::sanitize (c, this)); 836 } 837 template <typename T> 838 inline bool sanitize (hb_sanitize_context_t *c, T user_data) { 839 TRACE_SANITIZE (); 840 return TRACE_RETURN (OffsetArrayOf<Type>::sanitize (c, this, user_data)); 841 } 842}; 843 844 845/* An array with a USHORT number of elements, 846 * starting at second element. */ 847template <typename Type> 848struct HeadlessArrayOf 849{ 850 inline const Type& operator [] (unsigned int i) const 851 { 852 if (unlikely (i >= len || !i)) return Null(Type); 853 return array[i-1]; 854 } 855 inline unsigned int get_size (void) const 856 { return len.static_size + (len ? len - 1 : 0) * Type::static_size; } 857 858 inline bool serialize (hb_serialize_context_t *c, 859 Supplier<Type> &items, 860 unsigned int items_len) 861 { 862 TRACE_SERIALIZE (); 863 if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); 864 len.set (items_len); /* TODO(serialize) Overflow? */ 865 if (unlikely (!items_len)) return TRACE_RETURN (true); 866 if (unlikely (!c->extend (*this))) return TRACE_RETURN (false); 867 for (unsigned int i = 0; i < items_len - 1; i++) 868 array[i] = items[i]; 869 items.advance (items_len - 1); 870 return TRACE_RETURN (true); 871 } 872 873 inline bool sanitize_shallow (hb_sanitize_context_t *c) { 874 return c->check_struct (this) 875 && c->check_array (this, Type::static_size, len); 876 } 877 878 inline bool sanitize (hb_sanitize_context_t *c) { 879 TRACE_SANITIZE (); 880 if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false); 881 882 /* Note: for structs that do not reference other structs, 883 * we do not need to call their sanitize() as we already did 884 * a bound check on the aggregate array size. We just include 885 * a small unreachable expression to make sure the structs 886 * pointed to do have a simple sanitize(), ie. they do not 887 * reference other structs via offsets. 888 */ 889 (void) (false && array[0].sanitize (c)); 890 891 return TRACE_RETURN (true); 892 } 893 894 USHORT len; 895 Type array[VAR]; 896 public: 897 DEFINE_SIZE_ARRAY (sizeof (USHORT), array); 898}; 899 900 901/* An array with sorted elements. Supports binary searching. */ 902template <typename Type> 903struct SortedArrayOf : ArrayOf<Type> { 904 905 template <typename SearchType> 906 inline int search (const SearchType &x) const { 907 unsigned int count = this->len; 908 /* Linear search is *much* faster for small counts. */ 909 if (likely (count < 32)) { 910 for (unsigned int i = 0; i < count; i++) 911 if (this->array[i].cmp (x) == 0) 912 return i; 913 return -1; 914 } else { 915 struct Cmp { 916 static int cmp (const SearchType *a, const Type *b) { return b->cmp (*a); } 917 }; 918 const Type *p = (const Type *) bsearch (&x, this->array, this->len, sizeof (this->array[0]), (hb_compare_func_t) Cmp::cmp); 919 return p ? p - this->array : -1; 920 } 921 } 922}; 923 924 925} // namespace OT 926 927 928#endif /* HB_OPEN_TYPE_PRIVATE_HH */ 929