1 /*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2012 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
30 #define HB_OPEN_TYPE_PRIVATE_HH
31
32 #include "hb-private.hh"
33
34
35 namespace OT {
36
37
38
39 /*
40 * Casts
41 */
42
43 /* Cast to struct T, reference to reference */
44 template<typename Type, typename TObject>
CastR(const TObject & X)45 static inline const Type& CastR(const TObject &X)
46 { return reinterpret_cast<const Type&> (X); }
47 template<typename Type, typename TObject>
CastR(TObject & X)48 static inline Type& CastR(TObject &X)
49 { return reinterpret_cast<Type&> (X); }
50
51 /* Cast to struct T, pointer to pointer */
52 template<typename Type, typename TObject>
CastP(const TObject * X)53 static inline const Type* CastP(const TObject *X)
54 { return reinterpret_cast<const Type*> (X); }
55 template<typename Type, typename TObject>
CastP(TObject * X)56 static inline Type* CastP(TObject *X)
57 { return reinterpret_cast<Type*> (X); }
58
59 /* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
60 * location pointed to by P plus Ofs bytes. */
61 template<typename Type>
StructAtOffset(const void * P,unsigned int offset)62 static inline const Type& StructAtOffset(const void *P, unsigned int offset)
63 { return * reinterpret_cast<const Type*> ((const char *) P + offset); }
64 template<typename Type>
StructAtOffset(void * P,unsigned int offset)65 static inline Type& StructAtOffset(void *P, unsigned int offset)
66 { return * reinterpret_cast<Type*> ((char *) P + offset); }
67
68 /* StructAfter<T>(X) returns the struct T& that is placed after X.
69 * Works with X of variable size also. X must implement get_size() */
70 template<typename Type, typename TObject>
StructAfter(const TObject & X)71 static inline const Type& StructAfter(const TObject &X)
72 { return StructAtOffset<Type>(&X, X.get_size()); }
73 template<typename Type, typename TObject>
StructAfter(TObject & X)74 static inline Type& StructAfter(TObject &X)
75 { return StructAtOffset<Type>(&X, X.get_size()); }
76
77
78
79 /*
80 * Size checking
81 */
82
83 /* Check _assertion in a method environment */
84 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
85 inline void _instance_assertion_on_line_##_line (void) const \
86 { \
87 ASSERT_STATIC (_assertion); \
88 ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
89 }
90 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
91 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
92
93 /* Check that _code compiles in a method environment */
94 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
95 inline void _compiles_assertion_on_line_##_line (void) const \
96 { _code; }
97 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
98 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
99
100
101 #define DEFINE_SIZE_STATIC(size) \
102 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
103 static const unsigned int static_size = (size); \
104 static const unsigned int min_size = (size)
105
106 /* Size signifying variable-sized array */
107 #define VAR 1
108
109 #define DEFINE_SIZE_UNION(size, _member) \
110 DEFINE_INSTANCE_ASSERTION (this->u._member.static_size == (size)); \
111 static const unsigned int min_size = (size)
112
113 #define DEFINE_SIZE_MIN(size) \
114 DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
115 static const unsigned int min_size = (size)
116
117 #define DEFINE_SIZE_ARRAY(size, array) \
118 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
119 DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
120 static const unsigned int min_size = (size)
121
122 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
123 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
124 DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
125 static const unsigned int min_size = (size)
126
127
128
129 /*
130 * Null objects
131 */
132
133 /* Global nul-content Null pool. Enlarge as necessary. */
134 /* TODO This really should be a extern HB_INTERNAL and defined somewhere... */
135 static const void *_NullPool[(256+8) / sizeof (void *)];
136
137 /* Generic nul-content Null objects. */
138 template <typename Type>
Null(void)139 static inline const Type& Null (void) {
140 ASSERT_STATIC (sizeof (Type) <= sizeof (_NullPool));
141 return *CastP<Type> (_NullPool);
142 }
143
144 /* Specializaiton for arbitrary-content arbitrary-sized Null objects. */
145 #define DEFINE_NULL_DATA(Type, data) \
146 static const char _Null##Type[sizeof (Type) + 1] = data; /* +1 is for nul-termination in data */ \
147 template <> \
148 /*static*/ inline const Type& Null<Type> (void) { \
149 return *CastP<Type> (_Null##Type); \
150 } /* The following line really exists such that we end in a place needing semicolon */ \
151 ASSERT_STATIC (Type::min_size + 1 <= sizeof (_Null##Type))
152
153 /* Accessor macro. */
154 #define Null(Type) Null<Type>()
155
156
157
158 /*
159 * Sanitize
160 */
161
162 #ifndef HB_DEBUG_SANITIZE
163 #define HB_DEBUG_SANITIZE (HB_DEBUG+0)
164 #endif
165
166
167 #define TRACE_SANITIZE(this) \
168 hb_auto_trace_t<HB_DEBUG_SANITIZE, bool> trace \
169 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
170 "");
171
172 /* This limits sanitizing time on really broken fonts. */
173 #ifndef HB_SANITIZE_MAX_EDITS
174 #define HB_SANITIZE_MAX_EDITS 100
175 #endif
176
177 struct hb_sanitize_context_t
178 {
get_nameOT::hb_sanitize_context_t179 inline const char *get_name (void) { return "SANITIZE"; }
180 static const unsigned int max_debug_depth = HB_DEBUG_SANITIZE;
181 typedef bool return_t;
182 template <typename T>
dispatchOT::hb_sanitize_context_t183 inline return_t dispatch (const T &obj) { return obj.sanitize (this); }
default_return_valueOT::hb_sanitize_context_t184 static return_t default_return_value (void) { return true; }
stop_sublookup_iterationOT::hb_sanitize_context_t185 bool stop_sublookup_iteration (const return_t r HB_UNUSED) const { return false; }
186
initOT::hb_sanitize_context_t187 inline void init (hb_blob_t *b)
188 {
189 this->blob = hb_blob_reference (b);
190 this->writable = false;
191 }
192
start_processingOT::hb_sanitize_context_t193 inline void start_processing (void)
194 {
195 this->start = hb_blob_get_data (this->blob, NULL);
196 this->end = this->start + hb_blob_get_length (this->blob);
197 assert (this->start <= this->end); /* Must not overflow. */
198 this->edit_count = 0;
199 this->debug_depth = 0;
200
201 DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
202 "start [%p..%p] (%lu bytes)",
203 this->start, this->end,
204 (unsigned long) (this->end - this->start));
205 }
206
end_processingOT::hb_sanitize_context_t207 inline void end_processing (void)
208 {
209 DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1,
210 "end [%p..%p] %u edit requests",
211 this->start, this->end, this->edit_count);
212
213 hb_blob_destroy (this->blob);
214 this->blob = NULL;
215 this->start = this->end = NULL;
216 }
217
check_rangeOT::hb_sanitize_context_t218 inline bool check_range (const void *base, unsigned int len) const
219 {
220 const char *p = (const char *) base;
221 bool ok = this->start <= p && p <= this->end && (unsigned int) (this->end - p) >= len;
222
223 DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
224 "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s",
225 p, p + len, len,
226 this->start, this->end,
227 ok ? "OK" : "OUT-OF-RANGE");
228
229 return likely (ok);
230 }
231
check_arrayOT::hb_sanitize_context_t232 inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
233 {
234 const char *p = (const char *) base;
235 bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);
236 unsigned int array_size = record_size * len;
237 bool ok = !overflows && this->check_range (base, array_size);
238
239 DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
240 "check_array [%p..%p] (%d*%d=%d bytes) in [%p..%p] -> %s",
241 p, p + (record_size * len), record_size, len, (unsigned int) array_size,
242 this->start, this->end,
243 overflows ? "OVERFLOWS" : ok ? "OK" : "OUT-OF-RANGE");
244
245 return likely (ok);
246 }
247
248 template <typename Type>
check_structOT::hb_sanitize_context_t249 inline bool check_struct (const Type *obj) const
250 {
251 return likely (this->check_range (obj, obj->min_size));
252 }
253
may_editOT::hb_sanitize_context_t254 inline bool may_edit (const void *base HB_UNUSED, unsigned int len HB_UNUSED)
255 {
256 if (this->edit_count >= HB_SANITIZE_MAX_EDITS)
257 return false;
258
259 const char *p = (const char *) base;
260 this->edit_count++;
261
262 DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
263 "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
264 this->edit_count,
265 p, p + len, len,
266 this->start, this->end,
267 this->writable ? "GRANTED" : "DENIED");
268
269 return this->writable;
270 }
271
272 template <typename Type, typename ValueType>
try_setOT::hb_sanitize_context_t273 inline bool try_set (Type *obj, const ValueType &v) {
274 if (this->may_edit (obj, obj->static_size)) {
275 obj->set (v);
276 return true;
277 }
278 return false;
279 }
280
281 mutable unsigned int debug_depth;
282 const char *start, *end;
283 bool writable;
284 unsigned int edit_count;
285 hb_blob_t *blob;
286 };
287
288
289
290 /* Template to sanitize an object. */
291 template <typename Type>
292 struct Sanitizer
293 {
sanitizeOT::Sanitizer294 static hb_blob_t *sanitize (hb_blob_t *blob) {
295 hb_sanitize_context_t c[1] = {{0, NULL, NULL, false, 0, NULL}};
296 bool sane;
297
298 /* TODO is_sane() stuff */
299
300 c->init (blob);
301
302 retry:
303 DEBUG_MSG_FUNC (SANITIZE, c->start, "start");
304
305 c->start_processing ();
306
307 if (unlikely (!c->start)) {
308 c->end_processing ();
309 return blob;
310 }
311
312 Type *t = CastP<Type> (const_cast<char *> (c->start));
313
314 sane = t->sanitize (c);
315 if (sane) {
316 if (c->edit_count) {
317 DEBUG_MSG_FUNC (SANITIZE, c->start, "passed first round with %d edits; going for second round", c->edit_count);
318
319 /* sanitize again to ensure no toe-stepping */
320 c->edit_count = 0;
321 sane = t->sanitize (c);
322 if (c->edit_count) {
323 DEBUG_MSG_FUNC (SANITIZE, c->start, "requested %d edits in second round; FAILLING", c->edit_count);
324 sane = false;
325 }
326 }
327 } else {
328 unsigned int edit_count = c->edit_count;
329 if (edit_count && !c->writable) {
330 c->start = hb_blob_get_data_writable (blob, NULL);
331 c->end = c->start + hb_blob_get_length (blob);
332
333 if (c->start) {
334 c->writable = true;
335 /* ok, we made it writable by relocating. try again */
336 DEBUG_MSG_FUNC (SANITIZE, c->start, "retry");
337 goto retry;
338 }
339 }
340 }
341
342 c->end_processing ();
343
344 DEBUG_MSG_FUNC (SANITIZE, c->start, sane ? "PASSED" : "FAILED");
345 if (sane)
346 return blob;
347 else {
348 hb_blob_destroy (blob);
349 return hb_blob_get_empty ();
350 }
351 }
352
lock_instanceOT::Sanitizer353 static const Type* lock_instance (hb_blob_t *blob) {
354 hb_blob_make_immutable (blob);
355 const char *base = hb_blob_get_data (blob, NULL);
356 return unlikely (!base) ? &Null(Type) : CastP<Type> (base);
357 }
358 };
359
360
361
362 /*
363 * Serialize
364 */
365
366 #ifndef HB_DEBUG_SERIALIZE
367 #define HB_DEBUG_SERIALIZE (HB_DEBUG+0)
368 #endif
369
370
371 #define TRACE_SERIALIZE(this) \
372 hb_auto_trace_t<HB_DEBUG_SERIALIZE, bool> trace \
373 (&c->debug_depth, "SERIALIZE", c, HB_FUNC, \
374 "");
375
376
377 struct hb_serialize_context_t
378 {
hb_serialize_context_tOT::hb_serialize_context_t379 inline hb_serialize_context_t (void *start, unsigned int size)
380 {
381 this->start = (char *) start;
382 this->end = this->start + size;
383
384 this->ran_out_of_room = false;
385 this->head = this->start;
386 this->debug_depth = 0;
387 }
388
389 template <typename Type>
start_serializeOT::hb_serialize_context_t390 inline Type *start_serialize (void)
391 {
392 DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
393 "start [%p..%p] (%lu bytes)",
394 this->start, this->end,
395 (unsigned long) (this->end - this->start));
396
397 return start_embed<Type> ();
398 }
399
end_serializeOT::hb_serialize_context_t400 inline void end_serialize (void)
401 {
402 DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
403 "end [%p..%p] serialized %d bytes; %s",
404 this->start, this->end,
405 (int) (this->head - this->start),
406 this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room");
407
408 }
409
410 template <typename Type>
copyOT::hb_serialize_context_t411 inline Type *copy (void)
412 {
413 assert (!this->ran_out_of_room);
414 unsigned int len = this->head - this->start;
415 void *p = malloc (len);
416 if (p)
417 memcpy (p, this->start, len);
418 return reinterpret_cast<Type *> (p);
419 }
420
421 template <typename Type>
allocate_sizeOT::hb_serialize_context_t422 inline Type *allocate_size (unsigned int size)
423 {
424 if (unlikely (this->ran_out_of_room || this->end - this->head < ptrdiff_t (size))) {
425 this->ran_out_of_room = true;
426 return NULL;
427 }
428 memset (this->head, 0, size);
429 char *ret = this->head;
430 this->head += size;
431 return reinterpret_cast<Type *> (ret);
432 }
433
434 template <typename Type>
allocate_minOT::hb_serialize_context_t435 inline Type *allocate_min (void)
436 {
437 return this->allocate_size<Type> (Type::min_size);
438 }
439
440 template <typename Type>
start_embedOT::hb_serialize_context_t441 inline Type *start_embed (void)
442 {
443 Type *ret = reinterpret_cast<Type *> (this->head);
444 return ret;
445 }
446
447 template <typename Type>
embedOT::hb_serialize_context_t448 inline Type *embed (const Type &obj)
449 {
450 unsigned int size = obj.get_size ();
451 Type *ret = this->allocate_size<Type> (size);
452 if (unlikely (!ret)) return NULL;
453 memcpy (ret, obj, size);
454 return ret;
455 }
456
457 template <typename Type>
extend_minOT::hb_serialize_context_t458 inline Type *extend_min (Type &obj)
459 {
460 unsigned int size = obj.min_size;
461 assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
462 if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
463 return reinterpret_cast<Type *> (&obj);
464 }
465
466 template <typename Type>
extendOT::hb_serialize_context_t467 inline Type *extend (Type &obj)
468 {
469 unsigned int size = obj.get_size ();
470 assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
471 if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
472 return reinterpret_cast<Type *> (&obj);
473 }
474
truncateOT::hb_serialize_context_t475 inline void truncate (void *head)
476 {
477 assert (this->start < head && head <= this->head);
478 this->head = (char *) head;
479 }
480
481 unsigned int debug_depth;
482 char *start, *end, *head;
483 bool ran_out_of_room;
484 };
485
486 template <typename Type>
487 struct Supplier
488 {
SupplierOT::Supplier489 inline Supplier (const Type *array, unsigned int len_)
490 {
491 head = array;
492 len = len_;
493 }
operator []OT::Supplier494 inline const Type operator [] (unsigned int i) const
495 {
496 if (unlikely (i >= len)) return Type ();
497 return head[i];
498 }
499
advanceOT::Supplier500 inline void advance (unsigned int count)
501 {
502 if (unlikely (count > len))
503 count = len;
504 len -= count;
505 head += count;
506 }
507
508 private:
509 inline Supplier (const Supplier<Type> &); /* Disallow copy */
510 inline Supplier<Type>& operator= (const Supplier<Type> &); /* Disallow copy */
511
512 unsigned int len;
513 const Type *head;
514 };
515
516
517
518
519 /*
520 *
521 * The OpenType Font File: Data Types
522 */
523
524
525 /* "The following data types are used in the OpenType font file.
526 * All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
527
528 /*
529 * Int types
530 */
531
532
533 template <typename Type, int Bytes> struct BEInt;
534
535 template <typename Type>
536 struct BEInt<Type, 2>
537 {
538 public:
setOT::BEInt539 inline void set (Type V)
540 {
541 v[0] = (V >> 8) & 0xFF;
542 v[1] = (V ) & 0xFF;
543 }
operator TypeOT::BEInt544 inline operator Type (void) const
545 {
546 return (v[0] << 8)
547 + (v[1] );
548 }
operator ==OT::BEInt549 inline bool operator == (const BEInt<Type, 2>& o) const
550 {
551 return v[0] == o.v[0]
552 && v[1] == o.v[1];
553 }
operator !=OT::BEInt554 inline bool operator != (const BEInt<Type, 2>& o) const { return !(*this == o); }
555 private: uint8_t v[2];
556 };
557 template <typename Type>
558 struct BEInt<Type, 3>
559 {
560 public:
setOT::BEInt561 inline void set (Type V)
562 {
563 v[0] = (V >> 16) & 0xFF;
564 v[1] = (V >> 8) & 0xFF;
565 v[2] = (V ) & 0xFF;
566 }
operator TypeOT::BEInt567 inline operator Type (void) const
568 {
569 return (v[0] << 16)
570 + (v[1] << 8)
571 + (v[2] );
572 }
operator ==OT::BEInt573 inline bool operator == (const BEInt<Type, 3>& o) const
574 {
575 return v[0] == o.v[0]
576 && v[1] == o.v[1]
577 && v[2] == o.v[2];
578 }
operator !=OT::BEInt579 inline bool operator != (const BEInt<Type, 3>& o) const { return !(*this == o); }
580 private: uint8_t v[3];
581 };
582 template <typename Type>
583 struct BEInt<Type, 4>
584 {
585 public:
setOT::BEInt586 inline void set (Type V)
587 {
588 v[0] = (V >> 24) & 0xFF;
589 v[1] = (V >> 16) & 0xFF;
590 v[2] = (V >> 8) & 0xFF;
591 v[3] = (V ) & 0xFF;
592 }
operator TypeOT::BEInt593 inline operator Type (void) const
594 {
595 return (v[0] << 24)
596 + (v[1] << 16)
597 + (v[2] << 8)
598 + (v[3] );
599 }
operator ==OT::BEInt600 inline bool operator == (const BEInt<Type, 4>& o) const
601 {
602 return v[0] == o.v[0]
603 && v[1] == o.v[1]
604 && v[2] == o.v[2]
605 && v[3] == o.v[3];
606 }
operator !=OT::BEInt607 inline bool operator != (const BEInt<Type, 4>& o) const { return !(*this == o); }
608 private: uint8_t v[4];
609 };
610
611 /* Integer types in big-endian order and no alignment requirement */
612 template <typename Type, unsigned int Size>
613 struct IntType
614 {
setOT::IntType615 inline void set (Type i) { v.set (i); }
operator TypeOT::IntType616 inline operator Type(void) const { return v; }
operator ==OT::IntType617 inline bool operator == (const IntType<Type,Size> &o) const { return v == o.v; }
operator !=OT::IntType618 inline bool operator != (const IntType<Type,Size> &o) const { return v != o.v; }
cmpOT::IntType619 static inline int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
cmpOT::IntType620 inline int cmp (IntType<Type,Size> va) const { Type a = va; Type b = v; return a < b ? -1 : a == b ? 0 : +1; }
cmpOT::IntType621 inline int cmp (Type a) const { Type b = v; return a < b ? -1 : a == b ? 0 : +1; }
sanitizeOT::IntType622 inline bool sanitize (hb_sanitize_context_t *c) {
623 TRACE_SANITIZE (this);
624 return TRACE_RETURN (likely (c->check_struct (this)));
625 }
626 protected:
627 BEInt<Type, Size> v;
628 public:
629 DEFINE_SIZE_STATIC (Size);
630 };
631
632 typedef uint8_t BYTE; /* 8-bit unsigned integer. */
633 typedef IntType<uint16_t, 2> USHORT; /* 16-bit unsigned integer. */
634 typedef IntType<int16_t, 2> SHORT; /* 16-bit signed integer. */
635 typedef IntType<uint32_t, 4> ULONG; /* 32-bit unsigned integer. */
636 typedef IntType<int32_t, 4> LONG; /* 32-bit signed integer. */
637 typedef IntType<uint32_t, 3> UINT24; /* 24-bit unsigned integer. */
638
639 /* 16-bit signed integer (SHORT) that describes a quantity in FUnits. */
640 typedef SHORT FWORD;
641
642 /* 16-bit unsigned integer (USHORT) that describes a quantity in FUnits. */
643 typedef USHORT UFWORD;
644
645 /* Date represented in number of seconds since 12:00 midnight, January 1,
646 * 1904. The value is represented as a signed 64-bit integer. */
647 struct LONGDATETIME
648 {
sanitizeOT::LONGDATETIME649 inline bool sanitize (hb_sanitize_context_t *c) {
650 TRACE_SANITIZE (this);
651 return TRACE_RETURN (likely (c->check_struct (this)));
652 }
653 protected:
654 LONG major;
655 ULONG minor;
656 public:
657 DEFINE_SIZE_STATIC (8);
658 };
659
660 /* Array of four uint8s (length = 32 bits) used to identify a script, language
661 * system, feature, or baseline */
662 struct Tag : ULONG
663 {
664 /* What the char* converters return is NOT nul-terminated. Print using "%.4s" */
operator const char*OT::Tag665 inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); }
operator char*OT::Tag666 inline operator char* (void) { return reinterpret_cast<char *> (&this->v); }
667 public:
668 DEFINE_SIZE_STATIC (4);
669 };
670 DEFINE_NULL_DATA (Tag, " ");
671
672 /* Glyph index number, same as uint16 (length = 16 bits) */
673 typedef USHORT GlyphID;
674
675 /* Script/language-system/feature index */
676 struct Index : USHORT {
677 static const unsigned int NOT_FOUND_INDEX = 0xFFFFu;
678 };
679 DEFINE_NULL_DATA (Index, "\xff\xff");
680
681 /* Offset, Null offset = 0 */
682 template <typename Type=USHORT>
683 struct Offset : Type
684 {
is_nullOT::Offset685 inline bool is_null (void) const { return 0 == *this; }
686 public:
687 DEFINE_SIZE_STATIC (sizeof(Type));
688 };
689
690
691 /* CheckSum */
692 struct CheckSum : ULONG
693 {
694 /* This is reference implementation from the spec. */
CalcTableChecksumOT::CheckSum695 static inline uint32_t CalcTableChecksum (const ULONG *Table, uint32_t Length)
696 {
697 uint32_t Sum = 0L;
698 const ULONG *EndPtr = Table+((Length+3) & ~3) / ULONG::static_size;
699
700 while (Table < EndPtr)
701 Sum += *Table++;
702 return Sum;
703 }
704
705 /* Note: data should be 4byte aligned and have 4byte padding at the end. */
set_for_dataOT::CheckSum706 inline void set_for_data (const void *data, unsigned int length)
707 { set (CalcTableChecksum ((const ULONG *) data, length)); }
708
709 public:
710 DEFINE_SIZE_STATIC (4);
711 };
712
713
714 /*
715 * Version Numbers
716 */
717
718 struct FixedVersion
719 {
to_intOT::FixedVersion720 inline uint32_t to_int (void) const { return (major << 16) + minor; }
721
sanitizeOT::FixedVersion722 inline bool sanitize (hb_sanitize_context_t *c) {
723 TRACE_SANITIZE (this);
724 return TRACE_RETURN (c->check_struct (this));
725 }
726
727 USHORT major;
728 USHORT minor;
729 public:
730 DEFINE_SIZE_STATIC (4);
731 };
732
733
734
735 /*
736 * Template subclasses of Offset that do the dereferencing.
737 * Use: (base+offset)
738 */
739
740 template <typename Type, typename OffsetType=USHORT>
741 struct OffsetTo : Offset<OffsetType>
742 {
operator ()OT::OffsetTo743 inline const Type& operator () (const void *base) const
744 {
745 unsigned int offset = *this;
746 if (unlikely (!offset)) return Null(Type);
747 return StructAtOffset<Type> (base, offset);
748 }
749
serializeOT::OffsetTo750 inline Type& serialize (hb_serialize_context_t *c, void *base)
751 {
752 Type *t = c->start_embed<Type> ();
753 this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */
754 return *t;
755 }
756
sanitizeOT::OffsetTo757 inline bool sanitize (hb_sanitize_context_t *c, void *base) {
758 TRACE_SANITIZE (this);
759 if (unlikely (!c->check_struct (this))) return TRACE_RETURN (false);
760 unsigned int offset = *this;
761 if (unlikely (!offset)) return TRACE_RETURN (true);
762 Type &obj = StructAtOffset<Type> (base, offset);
763 return TRACE_RETURN (likely (obj.sanitize (c)) || neuter (c));
764 }
765 template <typename T>
sanitizeOT::OffsetTo766 inline bool sanitize (hb_sanitize_context_t *c, void *base, T user_data) {
767 TRACE_SANITIZE (this);
768 if (unlikely (!c->check_struct (this))) return TRACE_RETURN (false);
769 unsigned int offset = *this;
770 if (unlikely (!offset)) return TRACE_RETURN (true);
771 Type &obj = StructAtOffset<Type> (base, offset);
772 return TRACE_RETURN (likely (obj.sanitize (c, user_data)) || neuter (c));
773 }
774
775 /* Set the offset to Null */
neuterOT::OffsetTo776 inline bool neuter (hb_sanitize_context_t *c) {
777 return c->try_set (this, 0);
778 }
779 DEFINE_SIZE_STATIC (sizeof(OffsetType));
780 };
781 template <typename Base, typename OffsetType, typename Type>
operator +(const Base & base,const OffsetTo<Type,OffsetType> & offset)782 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType> &offset) { return offset (base); }
783 template <typename Base, typename OffsetType, typename Type>
operator +(Base & base,OffsetTo<Type,OffsetType> & offset)784 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType> &offset) { return offset (base); }
785
786
787 /*
788 * Array Types
789 */
790
791 /* An array with a number of elements. */
792 template <typename Type, typename LenType=USHORT>
793 struct ArrayOf
794 {
sub_arrayOT::ArrayOf795 const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const
796 {
797 unsigned int count = len;
798 if (unlikely (start_offset > count))
799 count = 0;
800 else
801 count -= start_offset;
802 count = MIN (count, *pcount);
803 *pcount = count;
804 return array + start_offset;
805 }
806
operator []OT::ArrayOf807 inline const Type& operator [] (unsigned int i) const
808 {
809 if (unlikely (i >= len)) return Null(Type);
810 return array[i];
811 }
operator []OT::ArrayOf812 inline Type& operator [] (unsigned int i)
813 {
814 return array[i];
815 }
get_sizeOT::ArrayOf816 inline unsigned int get_size (void) const
817 { return len.static_size + len * Type::static_size; }
818
serializeOT::ArrayOf819 inline bool serialize (hb_serialize_context_t *c,
820 unsigned int items_len)
821 {
822 TRACE_SERIALIZE (this);
823 if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
824 len.set (items_len); /* TODO(serialize) Overflow? */
825 if (unlikely (!c->extend (*this))) return TRACE_RETURN (false);
826 return TRACE_RETURN (true);
827 }
828
serializeOT::ArrayOf829 inline bool serialize (hb_serialize_context_t *c,
830 Supplier<Type> &items,
831 unsigned int items_len)
832 {
833 TRACE_SERIALIZE (this);
834 if (unlikely (!serialize (c, items_len))) return TRACE_RETURN (false);
835 for (unsigned int i = 0; i < items_len; i++)
836 array[i] = items[i];
837 items.advance (items_len);
838 return TRACE_RETURN (true);
839 }
840
sanitizeOT::ArrayOf841 inline bool sanitize (hb_sanitize_context_t *c) {
842 TRACE_SANITIZE (this);
843 if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false);
844
845 /* Note: for structs that do not reference other structs,
846 * we do not need to call their sanitize() as we already did
847 * a bound check on the aggregate array size. We just include
848 * a small unreachable expression to make sure the structs
849 * pointed to do have a simple sanitize(), ie. they do not
850 * reference other structs via offsets.
851 */
852 (void) (false && array[0].sanitize (c));
853
854 return TRACE_RETURN (true);
855 }
sanitizeOT::ArrayOf856 inline bool sanitize (hb_sanitize_context_t *c, void *base) {
857 TRACE_SANITIZE (this);
858 if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false);
859 unsigned int count = len;
860 for (unsigned int i = 0; i < count; i++)
861 if (unlikely (!array[i].sanitize (c, base)))
862 return TRACE_RETURN (false);
863 return TRACE_RETURN (true);
864 }
865 template <typename T>
sanitizeOT::ArrayOf866 inline bool sanitize (hb_sanitize_context_t *c, void *base, T user_data) {
867 TRACE_SANITIZE (this);
868 if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false);
869 unsigned int count = len;
870 for (unsigned int i = 0; i < count; i++)
871 if (unlikely (!array[i].sanitize (c, base, user_data)))
872 return TRACE_RETURN (false);
873 return TRACE_RETURN (true);
874 }
875
876 template <typename SearchType>
lsearchOT::ArrayOf877 inline int lsearch (const SearchType &x) const
878 {
879 unsigned int count = len;
880 for (unsigned int i = 0; i < count; i++)
881 if (!this->array[i].cmp (x))
882 return i;
883 return -1;
884 }
885
886 private:
sanitize_shallowOT::ArrayOf887 inline bool sanitize_shallow (hb_sanitize_context_t *c) {
888 TRACE_SANITIZE (this);
889 return TRACE_RETURN (c->check_struct (this) && c->check_array (this, Type::static_size, len));
890 }
891
892 public:
893 LenType len;
894 Type array[VAR];
895 public:
896 DEFINE_SIZE_ARRAY (sizeof (LenType), array);
897 };
898
899 /* Array of Offset's */
900 template <typename Type>
901 struct OffsetArrayOf : ArrayOf<OffsetTo<Type> > {};
902
903 /* Array of offsets relative to the beginning of the array itself. */
904 template <typename Type>
905 struct OffsetListOf : OffsetArrayOf<Type>
906 {
operator []OT::OffsetListOf907 inline const Type& operator [] (unsigned int i) const
908 {
909 if (unlikely (i >= this->len)) return Null(Type);
910 return this+this->array[i];
911 }
912
sanitizeOT::OffsetListOf913 inline bool sanitize (hb_sanitize_context_t *c) {
914 TRACE_SANITIZE (this);
915 return TRACE_RETURN (OffsetArrayOf<Type>::sanitize (c, this));
916 }
917 template <typename T>
sanitizeOT::OffsetListOf918 inline bool sanitize (hb_sanitize_context_t *c, T user_data) {
919 TRACE_SANITIZE (this);
920 return TRACE_RETURN (OffsetArrayOf<Type>::sanitize (c, this, user_data));
921 }
922 };
923
924
925 /* An array starting at second element. */
926 template <typename Type, typename LenType=USHORT>
927 struct HeadlessArrayOf
928 {
operator []OT::HeadlessArrayOf929 inline const Type& operator [] (unsigned int i) const
930 {
931 if (unlikely (i >= len || !i)) return Null(Type);
932 return array[i-1];
933 }
get_sizeOT::HeadlessArrayOf934 inline unsigned int get_size (void) const
935 { return len.static_size + (len ? len - 1 : 0) * Type::static_size; }
936
serializeOT::HeadlessArrayOf937 inline bool serialize (hb_serialize_context_t *c,
938 Supplier<Type> &items,
939 unsigned int items_len)
940 {
941 TRACE_SERIALIZE (this);
942 if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
943 len.set (items_len); /* TODO(serialize) Overflow? */
944 if (unlikely (!items_len)) return TRACE_RETURN (true);
945 if (unlikely (!c->extend (*this))) return TRACE_RETURN (false);
946 for (unsigned int i = 0; i < items_len - 1; i++)
947 array[i] = items[i];
948 items.advance (items_len - 1);
949 return TRACE_RETURN (true);
950 }
951
sanitize_shallowOT::HeadlessArrayOf952 inline bool sanitize_shallow (hb_sanitize_context_t *c) {
953 return c->check_struct (this)
954 && c->check_array (this, Type::static_size, len);
955 }
956
sanitizeOT::HeadlessArrayOf957 inline bool sanitize (hb_sanitize_context_t *c) {
958 TRACE_SANITIZE (this);
959 if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false);
960
961 /* Note: for structs that do not reference other structs,
962 * we do not need to call their sanitize() as we already did
963 * a bound check on the aggregate array size. We just include
964 * a small unreachable expression to make sure the structs
965 * pointed to do have a simple sanitize(), ie. they do not
966 * reference other structs via offsets.
967 */
968 (void) (false && array[0].sanitize (c));
969
970 return TRACE_RETURN (true);
971 }
972
973 LenType len;
974 Type array[VAR];
975 public:
976 DEFINE_SIZE_ARRAY (sizeof (LenType), array);
977 };
978
979
980 /* An array with sorted elements. Supports binary searching. */
981 template <typename Type, typename LenType=USHORT>
982 struct SortedArrayOf : ArrayOf<Type, LenType>
983 {
984 template <typename SearchType>
bsearchOT::SortedArrayOf985 inline int bsearch (const SearchType &x) const
986 {
987 /* Hand-coded bsearch here since this is in the hot inner loop. */
988 int min = 0, max = (int) this->len - 1;
989 while (min <= max)
990 {
991 int mid = (min + max) / 2;
992 int c = this->array[mid].cmp (x);
993 if (c < 0)
994 max = mid - 1;
995 else if (c > 0)
996 min = mid + 1;
997 else
998 return mid;
999 }
1000 return -1;
1001 }
1002 };
1003
1004
1005 } /* namespace OT */
1006
1007
1008 #endif /* HB_OPEN_TYPE_PRIVATE_HH */
1009