|
1 /* |
|
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. |
|
3 * Copyright © 2010,2012,2013 Google, Inc. |
|
4 * |
|
5 * This is part of HarfBuzz, a text shaping library. |
|
6 * |
|
7 * Permission is hereby granted, without written agreement and without |
|
8 * license or royalty fees, to use, copy, modify, and distribute this |
|
9 * software and its documentation for any purpose, provided that the |
|
10 * above copyright notice and the following two paragraphs appear in |
|
11 * all copies of this software. |
|
12 * |
|
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
|
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
|
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
|
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
|
17 * DAMAGE. |
|
18 * |
|
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
|
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
|
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
|
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
|
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
|
24 * |
|
25 * Red Hat Author(s): Behdad Esfahbod |
|
26 * Google Author(s): Behdad Esfahbod |
|
27 */ |
|
28 |
|
29 #ifndef HB_OT_LAYOUT_GSUB_TABLE_HH |
|
30 #define HB_OT_LAYOUT_GSUB_TABLE_HH |
|
31 |
|
32 #include "hb-ot-layout-gsubgpos-private.hh" |
|
33 |
|
34 |
|
35 namespace OT { |
|
36 |
|
37 |
|
38 struct SingleSubstFormat1 |
|
39 { |
|
40 inline void closure (hb_closure_context_t *c) const |
|
41 { |
|
42 TRACE_CLOSURE (this); |
|
43 Coverage::Iter iter; |
|
44 for (iter.init (this+coverage); iter.more (); iter.next ()) { |
|
45 hb_codepoint_t glyph_id = iter.get_glyph (); |
|
46 if (c->glyphs->has (glyph_id)) |
|
47 c->glyphs->add ((glyph_id + deltaGlyphID) & 0xFFFF); |
|
48 } |
|
49 } |
|
50 |
|
51 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
52 { |
|
53 TRACE_COLLECT_GLYPHS (this); |
|
54 Coverage::Iter iter; |
|
55 for (iter.init (this+coverage); iter.more (); iter.next ()) { |
|
56 hb_codepoint_t glyph_id = iter.get_glyph (); |
|
57 c->input->add (glyph_id); |
|
58 c->output->add ((glyph_id + deltaGlyphID) & 0xFFFF); |
|
59 } |
|
60 } |
|
61 |
|
62 inline const Coverage &get_coverage (void) const |
|
63 { |
|
64 return this+coverage; |
|
65 } |
|
66 |
|
67 inline bool would_apply (hb_would_apply_context_t *c) const |
|
68 { |
|
69 TRACE_WOULD_APPLY (this); |
|
70 return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED); |
|
71 } |
|
72 |
|
73 inline bool apply (hb_apply_context_t *c) const |
|
74 { |
|
75 TRACE_APPLY (this); |
|
76 hb_codepoint_t glyph_id = c->buffer->cur().codepoint; |
|
77 unsigned int index = (this+coverage).get_coverage (glyph_id); |
|
78 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
|
79 |
|
80 /* According to the Adobe Annotated OpenType Suite, result is always |
|
81 * limited to 16bit. */ |
|
82 glyph_id = (glyph_id + deltaGlyphID) & 0xFFFF; |
|
83 c->replace_glyph (glyph_id); |
|
84 |
|
85 return TRACE_RETURN (true); |
|
86 } |
|
87 |
|
88 inline bool serialize (hb_serialize_context_t *c, |
|
89 Supplier<GlyphID> &glyphs, |
|
90 unsigned int num_glyphs, |
|
91 int delta) |
|
92 { |
|
93 TRACE_SERIALIZE (this); |
|
94 if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); |
|
95 if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false); |
|
96 deltaGlyphID.set (delta); /* TODO(serilaize) overflow? */ |
|
97 return TRACE_RETURN (true); |
|
98 } |
|
99 |
|
100 inline bool sanitize (hb_sanitize_context_t *c) { |
|
101 TRACE_SANITIZE (this); |
|
102 return TRACE_RETURN (coverage.sanitize (c, this) && deltaGlyphID.sanitize (c)); |
|
103 } |
|
104 |
|
105 protected: |
|
106 USHORT format; /* Format identifier--format = 1 */ |
|
107 OffsetTo<Coverage> |
|
108 coverage; /* Offset to Coverage table--from |
|
109 * beginning of Substitution table */ |
|
110 SHORT deltaGlyphID; /* Add to original GlyphID to get |
|
111 * substitute GlyphID */ |
|
112 public: |
|
113 DEFINE_SIZE_STATIC (6); |
|
114 }; |
|
115 |
|
116 struct SingleSubstFormat2 |
|
117 { |
|
118 inline void closure (hb_closure_context_t *c) const |
|
119 { |
|
120 TRACE_CLOSURE (this); |
|
121 Coverage::Iter iter; |
|
122 for (iter.init (this+coverage); iter.more (); iter.next ()) { |
|
123 if (c->glyphs->has (iter.get_glyph ())) |
|
124 c->glyphs->add (substitute[iter.get_coverage ()]); |
|
125 } |
|
126 } |
|
127 |
|
128 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
129 { |
|
130 TRACE_COLLECT_GLYPHS (this); |
|
131 Coverage::Iter iter; |
|
132 for (iter.init (this+coverage); iter.more (); iter.next ()) { |
|
133 c->input->add (iter.get_glyph ()); |
|
134 c->output->add (substitute[iter.get_coverage ()]); |
|
135 } |
|
136 } |
|
137 |
|
138 inline const Coverage &get_coverage (void) const |
|
139 { |
|
140 return this+coverage; |
|
141 } |
|
142 |
|
143 inline bool would_apply (hb_would_apply_context_t *c) const |
|
144 { |
|
145 TRACE_WOULD_APPLY (this); |
|
146 return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED); |
|
147 } |
|
148 |
|
149 inline bool apply (hb_apply_context_t *c) const |
|
150 { |
|
151 TRACE_APPLY (this); |
|
152 hb_codepoint_t glyph_id = c->buffer->cur().codepoint; |
|
153 unsigned int index = (this+coverage).get_coverage (glyph_id); |
|
154 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
|
155 |
|
156 if (unlikely (index >= substitute.len)) return TRACE_RETURN (false); |
|
157 |
|
158 glyph_id = substitute[index]; |
|
159 c->replace_glyph (glyph_id); |
|
160 |
|
161 return TRACE_RETURN (true); |
|
162 } |
|
163 |
|
164 inline bool serialize (hb_serialize_context_t *c, |
|
165 Supplier<GlyphID> &glyphs, |
|
166 Supplier<GlyphID> &substitutes, |
|
167 unsigned int num_glyphs) |
|
168 { |
|
169 TRACE_SERIALIZE (this); |
|
170 if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); |
|
171 if (unlikely (!substitute.serialize (c, substitutes, num_glyphs))) return TRACE_RETURN (false); |
|
172 if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false); |
|
173 return TRACE_RETURN (true); |
|
174 } |
|
175 |
|
176 inline bool sanitize (hb_sanitize_context_t *c) { |
|
177 TRACE_SANITIZE (this); |
|
178 return TRACE_RETURN (coverage.sanitize (c, this) && substitute.sanitize (c)); |
|
179 } |
|
180 |
|
181 protected: |
|
182 USHORT format; /* Format identifier--format = 2 */ |
|
183 OffsetTo<Coverage> |
|
184 coverage; /* Offset to Coverage table--from |
|
185 * beginning of Substitution table */ |
|
186 ArrayOf<GlyphID> |
|
187 substitute; /* Array of substitute |
|
188 * GlyphIDs--ordered by Coverage Index */ |
|
189 public: |
|
190 DEFINE_SIZE_ARRAY (6, substitute); |
|
191 }; |
|
192 |
|
193 struct SingleSubst |
|
194 { |
|
195 inline bool serialize (hb_serialize_context_t *c, |
|
196 Supplier<GlyphID> &glyphs, |
|
197 Supplier<GlyphID> &substitutes, |
|
198 unsigned int num_glyphs) |
|
199 { |
|
200 TRACE_SERIALIZE (this); |
|
201 if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false); |
|
202 unsigned int format = 2; |
|
203 int delta; |
|
204 if (num_glyphs) { |
|
205 format = 1; |
|
206 /* TODO(serialize) check for wrap-around */ |
|
207 delta = substitutes[0] - glyphs[0]; |
|
208 for (unsigned int i = 1; i < num_glyphs; i++) |
|
209 if (delta != substitutes[i] - glyphs[i]) { |
|
210 format = 2; |
|
211 break; |
|
212 } |
|
213 } |
|
214 u.format.set (format); |
|
215 switch (u.format) { |
|
216 case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, num_glyphs, delta)); |
|
217 case 2: return TRACE_RETURN (u.format2.serialize (c, glyphs, substitutes, num_glyphs)); |
|
218 default:return TRACE_RETURN (false); |
|
219 } |
|
220 } |
|
221 |
|
222 template <typename context_t> |
|
223 inline typename context_t::return_t dispatch (context_t *c) const |
|
224 { |
|
225 TRACE_DISPATCH (this); |
|
226 switch (u.format) { |
|
227 case 1: return TRACE_RETURN (c->dispatch (u.format1)); |
|
228 case 2: return TRACE_RETURN (c->dispatch (u.format2)); |
|
229 default:return TRACE_RETURN (c->default_return_value ()); |
|
230 } |
|
231 } |
|
232 |
|
233 inline bool sanitize (hb_sanitize_context_t *c) { |
|
234 TRACE_SANITIZE (this); |
|
235 if (!u.format.sanitize (c)) return TRACE_RETURN (false); |
|
236 switch (u.format) { |
|
237 case 1: return TRACE_RETURN (u.format1.sanitize (c)); |
|
238 case 2: return TRACE_RETURN (u.format2.sanitize (c)); |
|
239 default:return TRACE_RETURN (true); |
|
240 } |
|
241 } |
|
242 |
|
243 protected: |
|
244 union { |
|
245 USHORT format; /* Format identifier */ |
|
246 SingleSubstFormat1 format1; |
|
247 SingleSubstFormat2 format2; |
|
248 } u; |
|
249 }; |
|
250 |
|
251 |
|
252 struct Sequence |
|
253 { |
|
254 inline void closure (hb_closure_context_t *c) const |
|
255 { |
|
256 TRACE_CLOSURE (this); |
|
257 unsigned int count = substitute.len; |
|
258 for (unsigned int i = 0; i < count; i++) |
|
259 c->glyphs->add (substitute[i]); |
|
260 } |
|
261 |
|
262 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
263 { |
|
264 TRACE_COLLECT_GLYPHS (this); |
|
265 unsigned int count = substitute.len; |
|
266 for (unsigned int i = 0; i < count; i++) |
|
267 c->output->add (substitute[i]); |
|
268 } |
|
269 |
|
270 inline bool apply (hb_apply_context_t *c) const |
|
271 { |
|
272 TRACE_APPLY (this); |
|
273 if (unlikely (!substitute.len)) return TRACE_RETURN (false); |
|
274 |
|
275 unsigned int klass = _hb_glyph_info_is_ligature (&c->buffer->cur()) ? |
|
276 HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH : 0; |
|
277 unsigned int count = substitute.len; |
|
278 if (count == 1) /* Special-case to make it in-place. */ |
|
279 { |
|
280 c->replace_glyph (substitute.array[0]); |
|
281 } |
|
282 else |
|
283 { |
|
284 for (unsigned int i = 0; i < count; i++) { |
|
285 _hb_glyph_info_set_lig_props_for_component (&c->buffer->cur(), i); |
|
286 c->output_glyph (substitute.array[i], klass); |
|
287 } |
|
288 c->buffer->skip_glyph (); |
|
289 } |
|
290 |
|
291 return TRACE_RETURN (true); |
|
292 } |
|
293 |
|
294 inline bool serialize (hb_serialize_context_t *c, |
|
295 Supplier<GlyphID> &glyphs, |
|
296 unsigned int num_glyphs) |
|
297 { |
|
298 TRACE_SERIALIZE (this); |
|
299 if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); |
|
300 if (unlikely (!substitute.serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false); |
|
301 return TRACE_RETURN (true); |
|
302 } |
|
303 |
|
304 inline bool sanitize (hb_sanitize_context_t *c) { |
|
305 TRACE_SANITIZE (this); |
|
306 return TRACE_RETURN (substitute.sanitize (c)); |
|
307 } |
|
308 |
|
309 protected: |
|
310 ArrayOf<GlyphID> |
|
311 substitute; /* String of GlyphIDs to substitute */ |
|
312 public: |
|
313 DEFINE_SIZE_ARRAY (2, substitute); |
|
314 }; |
|
315 |
|
316 struct MultipleSubstFormat1 |
|
317 { |
|
318 inline void closure (hb_closure_context_t *c) const |
|
319 { |
|
320 TRACE_CLOSURE (this); |
|
321 Coverage::Iter iter; |
|
322 for (iter.init (this+coverage); iter.more (); iter.next ()) { |
|
323 if (c->glyphs->has (iter.get_glyph ())) |
|
324 (this+sequence[iter.get_coverage ()]).closure (c); |
|
325 } |
|
326 } |
|
327 |
|
328 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
329 { |
|
330 TRACE_COLLECT_GLYPHS (this); |
|
331 (this+coverage).add_coverage (c->input); |
|
332 unsigned int count = sequence.len; |
|
333 for (unsigned int i = 0; i < count; i++) |
|
334 (this+sequence[i]).collect_glyphs (c); |
|
335 } |
|
336 |
|
337 inline const Coverage &get_coverage (void) const |
|
338 { |
|
339 return this+coverage; |
|
340 } |
|
341 |
|
342 inline bool would_apply (hb_would_apply_context_t *c) const |
|
343 { |
|
344 TRACE_WOULD_APPLY (this); |
|
345 return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED); |
|
346 } |
|
347 |
|
348 inline bool apply (hb_apply_context_t *c) const |
|
349 { |
|
350 TRACE_APPLY (this); |
|
351 |
|
352 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
|
353 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
|
354 |
|
355 return TRACE_RETURN ((this+sequence[index]).apply (c)); |
|
356 } |
|
357 |
|
358 inline bool serialize (hb_serialize_context_t *c, |
|
359 Supplier<GlyphID> &glyphs, |
|
360 Supplier<unsigned int> &substitute_len_list, |
|
361 unsigned int num_glyphs, |
|
362 Supplier<GlyphID> &substitute_glyphs_list) |
|
363 { |
|
364 TRACE_SERIALIZE (this); |
|
365 if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); |
|
366 if (unlikely (!sequence.serialize (c, num_glyphs))) return TRACE_RETURN (false); |
|
367 for (unsigned int i = 0; i < num_glyphs; i++) |
|
368 if (unlikely (!sequence[i].serialize (c, this).serialize (c, |
|
369 substitute_glyphs_list, |
|
370 substitute_len_list[i]))) return TRACE_RETURN (false); |
|
371 substitute_len_list.advance (num_glyphs); |
|
372 if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false); |
|
373 return TRACE_RETURN (true); |
|
374 } |
|
375 |
|
376 inline bool sanitize (hb_sanitize_context_t *c) { |
|
377 TRACE_SANITIZE (this); |
|
378 return TRACE_RETURN (coverage.sanitize (c, this) && sequence.sanitize (c, this)); |
|
379 } |
|
380 |
|
381 protected: |
|
382 USHORT format; /* Format identifier--format = 1 */ |
|
383 OffsetTo<Coverage> |
|
384 coverage; /* Offset to Coverage table--from |
|
385 * beginning of Substitution table */ |
|
386 OffsetArrayOf<Sequence> |
|
387 sequence; /* Array of Sequence tables |
|
388 * ordered by Coverage Index */ |
|
389 public: |
|
390 DEFINE_SIZE_ARRAY (6, sequence); |
|
391 }; |
|
392 |
|
393 struct MultipleSubst |
|
394 { |
|
395 inline bool serialize (hb_serialize_context_t *c, |
|
396 Supplier<GlyphID> &glyphs, |
|
397 Supplier<unsigned int> &substitute_len_list, |
|
398 unsigned int num_glyphs, |
|
399 Supplier<GlyphID> &substitute_glyphs_list) |
|
400 { |
|
401 TRACE_SERIALIZE (this); |
|
402 if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false); |
|
403 unsigned int format = 1; |
|
404 u.format.set (format); |
|
405 switch (u.format) { |
|
406 case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, substitute_len_list, num_glyphs, substitute_glyphs_list)); |
|
407 default:return TRACE_RETURN (false); |
|
408 } |
|
409 } |
|
410 |
|
411 template <typename context_t> |
|
412 inline typename context_t::return_t dispatch (context_t *c) const |
|
413 { |
|
414 TRACE_DISPATCH (this); |
|
415 switch (u.format) { |
|
416 case 1: return TRACE_RETURN (c->dispatch (u.format1)); |
|
417 default:return TRACE_RETURN (c->default_return_value ()); |
|
418 } |
|
419 } |
|
420 |
|
421 inline bool sanitize (hb_sanitize_context_t *c) { |
|
422 TRACE_SANITIZE (this); |
|
423 if (!u.format.sanitize (c)) return TRACE_RETURN (false); |
|
424 switch (u.format) { |
|
425 case 1: return TRACE_RETURN (u.format1.sanitize (c)); |
|
426 default:return TRACE_RETURN (true); |
|
427 } |
|
428 } |
|
429 |
|
430 protected: |
|
431 union { |
|
432 USHORT format; /* Format identifier */ |
|
433 MultipleSubstFormat1 format1; |
|
434 } u; |
|
435 }; |
|
436 |
|
437 |
|
438 typedef ArrayOf<GlyphID> AlternateSet; /* Array of alternate GlyphIDs--in |
|
439 * arbitrary order */ |
|
440 |
|
441 struct AlternateSubstFormat1 |
|
442 { |
|
443 inline void closure (hb_closure_context_t *c) const |
|
444 { |
|
445 TRACE_CLOSURE (this); |
|
446 Coverage::Iter iter; |
|
447 for (iter.init (this+coverage); iter.more (); iter.next ()) { |
|
448 if (c->glyphs->has (iter.get_glyph ())) { |
|
449 const AlternateSet &alt_set = this+alternateSet[iter.get_coverage ()]; |
|
450 unsigned int count = alt_set.len; |
|
451 for (unsigned int i = 0; i < count; i++) |
|
452 c->glyphs->add (alt_set[i]); |
|
453 } |
|
454 } |
|
455 } |
|
456 |
|
457 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
458 { |
|
459 TRACE_COLLECT_GLYPHS (this); |
|
460 Coverage::Iter iter; |
|
461 for (iter.init (this+coverage); iter.more (); iter.next ()) { |
|
462 c->input->add (iter.get_glyph ()); |
|
463 const AlternateSet &alt_set = this+alternateSet[iter.get_coverage ()]; |
|
464 unsigned int count = alt_set.len; |
|
465 for (unsigned int i = 0; i < count; i++) |
|
466 c->output->add (alt_set[i]); |
|
467 } |
|
468 } |
|
469 |
|
470 inline const Coverage &get_coverage (void) const |
|
471 { |
|
472 return this+coverage; |
|
473 } |
|
474 |
|
475 inline bool would_apply (hb_would_apply_context_t *c) const |
|
476 { |
|
477 TRACE_WOULD_APPLY (this); |
|
478 return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED); |
|
479 } |
|
480 |
|
481 inline bool apply (hb_apply_context_t *c) const |
|
482 { |
|
483 TRACE_APPLY (this); |
|
484 hb_codepoint_t glyph_id = c->buffer->cur().codepoint; |
|
485 |
|
486 unsigned int index = (this+coverage).get_coverage (glyph_id); |
|
487 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
|
488 |
|
489 const AlternateSet &alt_set = this+alternateSet[index]; |
|
490 |
|
491 if (unlikely (!alt_set.len)) return TRACE_RETURN (false); |
|
492 |
|
493 hb_mask_t glyph_mask = c->buffer->cur().mask; |
|
494 hb_mask_t lookup_mask = c->lookup_mask; |
|
495 |
|
496 /* Note: This breaks badly if two features enabled this lookup together. */ |
|
497 unsigned int shift = _hb_ctz (lookup_mask); |
|
498 unsigned int alt_index = ((lookup_mask & glyph_mask) >> shift); |
|
499 |
|
500 if (unlikely (alt_index > alt_set.len || alt_index == 0)) return TRACE_RETURN (false); |
|
501 |
|
502 glyph_id = alt_set[alt_index - 1]; |
|
503 |
|
504 c->replace_glyph (glyph_id); |
|
505 |
|
506 return TRACE_RETURN (true); |
|
507 } |
|
508 |
|
509 inline bool serialize (hb_serialize_context_t *c, |
|
510 Supplier<GlyphID> &glyphs, |
|
511 Supplier<unsigned int> &alternate_len_list, |
|
512 unsigned int num_glyphs, |
|
513 Supplier<GlyphID> &alternate_glyphs_list) |
|
514 { |
|
515 TRACE_SERIALIZE (this); |
|
516 if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); |
|
517 if (unlikely (!alternateSet.serialize (c, num_glyphs))) return TRACE_RETURN (false); |
|
518 for (unsigned int i = 0; i < num_glyphs; i++) |
|
519 if (unlikely (!alternateSet[i].serialize (c, this).serialize (c, |
|
520 alternate_glyphs_list, |
|
521 alternate_len_list[i]))) return TRACE_RETURN (false); |
|
522 alternate_len_list.advance (num_glyphs); |
|
523 if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false); |
|
524 return TRACE_RETURN (true); |
|
525 } |
|
526 |
|
527 inline bool sanitize (hb_sanitize_context_t *c) { |
|
528 TRACE_SANITIZE (this); |
|
529 return TRACE_RETURN (coverage.sanitize (c, this) && alternateSet.sanitize (c, this)); |
|
530 } |
|
531 |
|
532 protected: |
|
533 USHORT format; /* Format identifier--format = 1 */ |
|
534 OffsetTo<Coverage> |
|
535 coverage; /* Offset to Coverage table--from |
|
536 * beginning of Substitution table */ |
|
537 OffsetArrayOf<AlternateSet> |
|
538 alternateSet; /* Array of AlternateSet tables |
|
539 * ordered by Coverage Index */ |
|
540 public: |
|
541 DEFINE_SIZE_ARRAY (6, alternateSet); |
|
542 }; |
|
543 |
|
544 struct AlternateSubst |
|
545 { |
|
546 inline bool serialize (hb_serialize_context_t *c, |
|
547 Supplier<GlyphID> &glyphs, |
|
548 Supplier<unsigned int> &alternate_len_list, |
|
549 unsigned int num_glyphs, |
|
550 Supplier<GlyphID> &alternate_glyphs_list) |
|
551 { |
|
552 TRACE_SERIALIZE (this); |
|
553 if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false); |
|
554 unsigned int format = 1; |
|
555 u.format.set (format); |
|
556 switch (u.format) { |
|
557 case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, alternate_len_list, num_glyphs, alternate_glyphs_list)); |
|
558 default:return TRACE_RETURN (false); |
|
559 } |
|
560 } |
|
561 |
|
562 template <typename context_t> |
|
563 inline typename context_t::return_t dispatch (context_t *c) const |
|
564 { |
|
565 TRACE_DISPATCH (this); |
|
566 switch (u.format) { |
|
567 case 1: return TRACE_RETURN (c->dispatch (u.format1)); |
|
568 default:return TRACE_RETURN (c->default_return_value ()); |
|
569 } |
|
570 } |
|
571 |
|
572 inline bool sanitize (hb_sanitize_context_t *c) { |
|
573 TRACE_SANITIZE (this); |
|
574 if (!u.format.sanitize (c)) return TRACE_RETURN (false); |
|
575 switch (u.format) { |
|
576 case 1: return TRACE_RETURN (u.format1.sanitize (c)); |
|
577 default:return TRACE_RETURN (true); |
|
578 } |
|
579 } |
|
580 |
|
581 protected: |
|
582 union { |
|
583 USHORT format; /* Format identifier */ |
|
584 AlternateSubstFormat1 format1; |
|
585 } u; |
|
586 }; |
|
587 |
|
588 |
|
589 struct Ligature |
|
590 { |
|
591 inline void closure (hb_closure_context_t *c) const |
|
592 { |
|
593 TRACE_CLOSURE (this); |
|
594 unsigned int count = component.len; |
|
595 for (unsigned int i = 1; i < count; i++) |
|
596 if (!c->glyphs->has (component[i])) |
|
597 return; |
|
598 c->glyphs->add (ligGlyph); |
|
599 } |
|
600 |
|
601 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
602 { |
|
603 TRACE_COLLECT_GLYPHS (this); |
|
604 unsigned int count = component.len; |
|
605 for (unsigned int i = 1; i < count; i++) |
|
606 c->input->add (component[i]); |
|
607 c->output->add (ligGlyph); |
|
608 } |
|
609 |
|
610 inline bool would_apply (hb_would_apply_context_t *c) const |
|
611 { |
|
612 TRACE_WOULD_APPLY (this); |
|
613 if (c->len != component.len) |
|
614 return TRACE_RETURN (false); |
|
615 |
|
616 for (unsigned int i = 1; i < c->len; i++) |
|
617 if (likely (c->glyphs[i] != component[i])) |
|
618 return TRACE_RETURN (false); |
|
619 |
|
620 return TRACE_RETURN (true); |
|
621 } |
|
622 |
|
623 inline bool apply (hb_apply_context_t *c) const |
|
624 { |
|
625 TRACE_APPLY (this); |
|
626 unsigned int count = component.len; |
|
627 if (unlikely (count < 1)) return TRACE_RETURN (false); |
|
628 |
|
629 bool is_mark_ligature = false; |
|
630 unsigned int total_component_count = 0; |
|
631 |
|
632 unsigned int match_length = 0; |
|
633 unsigned int match_positions[MAX_CONTEXT_LENGTH]; |
|
634 |
|
635 if (likely (!match_input (c, count, |
|
636 &component[1], |
|
637 match_glyph, |
|
638 NULL, |
|
639 &match_length, |
|
640 match_positions, |
|
641 &is_mark_ligature, |
|
642 &total_component_count))) |
|
643 return TRACE_RETURN (false); |
|
644 |
|
645 ligate_input (c, |
|
646 count, |
|
647 match_positions, |
|
648 match_length, |
|
649 ligGlyph, |
|
650 is_mark_ligature, |
|
651 total_component_count); |
|
652 |
|
653 return TRACE_RETURN (true); |
|
654 } |
|
655 |
|
656 inline bool serialize (hb_serialize_context_t *c, |
|
657 GlyphID ligature, |
|
658 Supplier<GlyphID> &components, /* Starting from second */ |
|
659 unsigned int num_components /* Including first component */) |
|
660 { |
|
661 TRACE_SERIALIZE (this); |
|
662 if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); |
|
663 ligGlyph = ligature; |
|
664 if (unlikely (!component.serialize (c, components, num_components))) return TRACE_RETURN (false); |
|
665 return TRACE_RETURN (true); |
|
666 } |
|
667 |
|
668 public: |
|
669 inline bool sanitize (hb_sanitize_context_t *c) { |
|
670 TRACE_SANITIZE (this); |
|
671 return TRACE_RETURN (ligGlyph.sanitize (c) && component.sanitize (c)); |
|
672 } |
|
673 |
|
674 protected: |
|
675 GlyphID ligGlyph; /* GlyphID of ligature to substitute */ |
|
676 HeadlessArrayOf<GlyphID> |
|
677 component; /* Array of component GlyphIDs--start |
|
678 * with the second component--ordered |
|
679 * in writing direction */ |
|
680 public: |
|
681 DEFINE_SIZE_ARRAY (4, component); |
|
682 }; |
|
683 |
|
684 struct LigatureSet |
|
685 { |
|
686 inline void closure (hb_closure_context_t *c) const |
|
687 { |
|
688 TRACE_CLOSURE (this); |
|
689 unsigned int num_ligs = ligature.len; |
|
690 for (unsigned int i = 0; i < num_ligs; i++) |
|
691 (this+ligature[i]).closure (c); |
|
692 } |
|
693 |
|
694 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
695 { |
|
696 TRACE_COLLECT_GLYPHS (this); |
|
697 unsigned int num_ligs = ligature.len; |
|
698 for (unsigned int i = 0; i < num_ligs; i++) |
|
699 (this+ligature[i]).collect_glyphs (c); |
|
700 } |
|
701 |
|
702 inline bool would_apply (hb_would_apply_context_t *c) const |
|
703 { |
|
704 TRACE_WOULD_APPLY (this); |
|
705 unsigned int num_ligs = ligature.len; |
|
706 for (unsigned int i = 0; i < num_ligs; i++) |
|
707 { |
|
708 const Ligature &lig = this+ligature[i]; |
|
709 if (lig.would_apply (c)) |
|
710 return TRACE_RETURN (true); |
|
711 } |
|
712 return TRACE_RETURN (false); |
|
713 } |
|
714 |
|
715 inline bool apply (hb_apply_context_t *c) const |
|
716 { |
|
717 TRACE_APPLY (this); |
|
718 unsigned int num_ligs = ligature.len; |
|
719 for (unsigned int i = 0; i < num_ligs; i++) |
|
720 { |
|
721 const Ligature &lig = this+ligature[i]; |
|
722 if (lig.apply (c)) return TRACE_RETURN (true); |
|
723 } |
|
724 |
|
725 return TRACE_RETURN (false); |
|
726 } |
|
727 |
|
728 inline bool serialize (hb_serialize_context_t *c, |
|
729 Supplier<GlyphID> &ligatures, |
|
730 Supplier<unsigned int> &component_count_list, |
|
731 unsigned int num_ligatures, |
|
732 Supplier<GlyphID> &component_list /* Starting from second for each ligature */) |
|
733 { |
|
734 TRACE_SERIALIZE (this); |
|
735 if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); |
|
736 if (unlikely (!ligature.serialize (c, num_ligatures))) return TRACE_RETURN (false); |
|
737 for (unsigned int i = 0; i < num_ligatures; i++) |
|
738 if (unlikely (!ligature[i].serialize (c, this).serialize (c, |
|
739 ligatures[i], |
|
740 component_list, |
|
741 component_count_list[i]))) return TRACE_RETURN (false); |
|
742 ligatures.advance (num_ligatures); |
|
743 component_count_list.advance (num_ligatures); |
|
744 return TRACE_RETURN (true); |
|
745 } |
|
746 |
|
747 inline bool sanitize (hb_sanitize_context_t *c) { |
|
748 TRACE_SANITIZE (this); |
|
749 return TRACE_RETURN (ligature.sanitize (c, this)); |
|
750 } |
|
751 |
|
752 protected: |
|
753 OffsetArrayOf<Ligature> |
|
754 ligature; /* Array LigatureSet tables |
|
755 * ordered by preference */ |
|
756 public: |
|
757 DEFINE_SIZE_ARRAY (2, ligature); |
|
758 }; |
|
759 |
|
760 struct LigatureSubstFormat1 |
|
761 { |
|
762 inline void closure (hb_closure_context_t *c) const |
|
763 { |
|
764 TRACE_CLOSURE (this); |
|
765 Coverage::Iter iter; |
|
766 for (iter.init (this+coverage); iter.more (); iter.next ()) { |
|
767 if (c->glyphs->has (iter.get_glyph ())) |
|
768 (this+ligatureSet[iter.get_coverage ()]).closure (c); |
|
769 } |
|
770 } |
|
771 |
|
772 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
773 { |
|
774 TRACE_COLLECT_GLYPHS (this); |
|
775 Coverage::Iter iter; |
|
776 for (iter.init (this+coverage); iter.more (); iter.next ()) { |
|
777 c->input->add (iter.get_glyph ()); |
|
778 (this+ligatureSet[iter.get_coverage ()]).collect_glyphs (c); |
|
779 } |
|
780 } |
|
781 |
|
782 inline const Coverage &get_coverage (void) const |
|
783 { |
|
784 return this+coverage; |
|
785 } |
|
786 |
|
787 inline bool would_apply (hb_would_apply_context_t *c) const |
|
788 { |
|
789 TRACE_WOULD_APPLY (this); |
|
790 unsigned int index = (this+coverage).get_coverage (c->glyphs[0]); |
|
791 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
|
792 |
|
793 const LigatureSet &lig_set = this+ligatureSet[index]; |
|
794 return TRACE_RETURN (lig_set.would_apply (c)); |
|
795 } |
|
796 |
|
797 inline bool apply (hb_apply_context_t *c) const |
|
798 { |
|
799 TRACE_APPLY (this); |
|
800 hb_codepoint_t glyph_id = c->buffer->cur().codepoint; |
|
801 |
|
802 unsigned int index = (this+coverage).get_coverage (glyph_id); |
|
803 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
|
804 |
|
805 const LigatureSet &lig_set = this+ligatureSet[index]; |
|
806 return TRACE_RETURN (lig_set.apply (c)); |
|
807 } |
|
808 |
|
809 inline bool serialize (hb_serialize_context_t *c, |
|
810 Supplier<GlyphID> &first_glyphs, |
|
811 Supplier<unsigned int> &ligature_per_first_glyph_count_list, |
|
812 unsigned int num_first_glyphs, |
|
813 Supplier<GlyphID> &ligatures_list, |
|
814 Supplier<unsigned int> &component_count_list, |
|
815 Supplier<GlyphID> &component_list /* Starting from second for each ligature */) |
|
816 { |
|
817 TRACE_SERIALIZE (this); |
|
818 if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); |
|
819 if (unlikely (!ligatureSet.serialize (c, num_first_glyphs))) return TRACE_RETURN (false); |
|
820 for (unsigned int i = 0; i < num_first_glyphs; i++) |
|
821 if (unlikely (!ligatureSet[i].serialize (c, this).serialize (c, |
|
822 ligatures_list, |
|
823 component_count_list, |
|
824 ligature_per_first_glyph_count_list[i], |
|
825 component_list))) return TRACE_RETURN (false); |
|
826 ligature_per_first_glyph_count_list.advance (num_first_glyphs); |
|
827 if (unlikely (!coverage.serialize (c, this).serialize (c, first_glyphs, num_first_glyphs))) return TRACE_RETURN (false); |
|
828 return TRACE_RETURN (true); |
|
829 } |
|
830 |
|
831 inline bool sanitize (hb_sanitize_context_t *c) { |
|
832 TRACE_SANITIZE (this); |
|
833 return TRACE_RETURN (coverage.sanitize (c, this) && ligatureSet.sanitize (c, this)); |
|
834 } |
|
835 |
|
836 protected: |
|
837 USHORT format; /* Format identifier--format = 1 */ |
|
838 OffsetTo<Coverage> |
|
839 coverage; /* Offset to Coverage table--from |
|
840 * beginning of Substitution table */ |
|
841 OffsetArrayOf<LigatureSet> |
|
842 ligatureSet; /* Array LigatureSet tables |
|
843 * ordered by Coverage Index */ |
|
844 public: |
|
845 DEFINE_SIZE_ARRAY (6, ligatureSet); |
|
846 }; |
|
847 |
|
848 struct LigatureSubst |
|
849 { |
|
850 inline bool serialize (hb_serialize_context_t *c, |
|
851 Supplier<GlyphID> &first_glyphs, |
|
852 Supplier<unsigned int> &ligature_per_first_glyph_count_list, |
|
853 unsigned int num_first_glyphs, |
|
854 Supplier<GlyphID> &ligatures_list, |
|
855 Supplier<unsigned int> &component_count_list, |
|
856 Supplier<GlyphID> &component_list /* Starting from second for each ligature */) |
|
857 { |
|
858 TRACE_SERIALIZE (this); |
|
859 if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false); |
|
860 unsigned int format = 1; |
|
861 u.format.set (format); |
|
862 switch (u.format) { |
|
863 case 1: return TRACE_RETURN (u.format1.serialize (c, first_glyphs, ligature_per_first_glyph_count_list, num_first_glyphs, |
|
864 ligatures_list, component_count_list, component_list)); |
|
865 default:return TRACE_RETURN (false); |
|
866 } |
|
867 } |
|
868 |
|
869 template <typename context_t> |
|
870 inline typename context_t::return_t dispatch (context_t *c) const |
|
871 { |
|
872 TRACE_DISPATCH (this); |
|
873 switch (u.format) { |
|
874 case 1: return TRACE_RETURN (c->dispatch (u.format1)); |
|
875 default:return TRACE_RETURN (c->default_return_value ()); |
|
876 } |
|
877 } |
|
878 |
|
879 inline bool sanitize (hb_sanitize_context_t *c) { |
|
880 TRACE_SANITIZE (this); |
|
881 if (!u.format.sanitize (c)) return TRACE_RETURN (false); |
|
882 switch (u.format) { |
|
883 case 1: return TRACE_RETURN (u.format1.sanitize (c)); |
|
884 default:return TRACE_RETURN (true); |
|
885 } |
|
886 } |
|
887 |
|
888 protected: |
|
889 union { |
|
890 USHORT format; /* Format identifier */ |
|
891 LigatureSubstFormat1 format1; |
|
892 } u; |
|
893 }; |
|
894 |
|
895 |
|
896 struct ContextSubst : Context {}; |
|
897 |
|
898 struct ChainContextSubst : ChainContext {}; |
|
899 |
|
900 struct ExtensionSubst : Extension<ExtensionSubst> |
|
901 { |
|
902 typedef struct SubstLookupSubTable LookupSubTable; |
|
903 |
|
904 inline bool is_reverse (void) const; |
|
905 }; |
|
906 |
|
907 |
|
908 struct ReverseChainSingleSubstFormat1 |
|
909 { |
|
910 inline void closure (hb_closure_context_t *c) const |
|
911 { |
|
912 TRACE_CLOSURE (this); |
|
913 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
|
914 |
|
915 unsigned int count; |
|
916 |
|
917 count = backtrack.len; |
|
918 for (unsigned int i = 0; i < count; i++) |
|
919 if (!(this+backtrack[i]).intersects (c->glyphs)) |
|
920 return; |
|
921 |
|
922 count = lookahead.len; |
|
923 for (unsigned int i = 0; i < count; i++) |
|
924 if (!(this+lookahead[i]).intersects (c->glyphs)) |
|
925 return; |
|
926 |
|
927 const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead); |
|
928 Coverage::Iter iter; |
|
929 for (iter.init (this+coverage); iter.more (); iter.next ()) { |
|
930 if (c->glyphs->has (iter.get_glyph ())) |
|
931 c->glyphs->add (substitute[iter.get_coverage ()]); |
|
932 } |
|
933 } |
|
934 |
|
935 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
936 { |
|
937 TRACE_COLLECT_GLYPHS (this); |
|
938 |
|
939 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
|
940 |
|
941 unsigned int count; |
|
942 |
|
943 (this+coverage).add_coverage (c->input); |
|
944 |
|
945 count = backtrack.len; |
|
946 for (unsigned int i = 0; i < count; i++) |
|
947 (this+backtrack[i]).add_coverage (c->before); |
|
948 |
|
949 count = lookahead.len; |
|
950 for (unsigned int i = 0; i < count; i++) |
|
951 (this+lookahead[i]).add_coverage (c->after); |
|
952 |
|
953 const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead); |
|
954 count = substitute.len; |
|
955 for (unsigned int i = 0; i < count; i++) |
|
956 c->output->add (substitute[i]); |
|
957 } |
|
958 |
|
959 inline const Coverage &get_coverage (void) const |
|
960 { |
|
961 return this+coverage; |
|
962 } |
|
963 |
|
964 inline bool would_apply (hb_would_apply_context_t *c) const |
|
965 { |
|
966 TRACE_WOULD_APPLY (this); |
|
967 return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED); |
|
968 } |
|
969 |
|
970 inline bool apply (hb_apply_context_t *c) const |
|
971 { |
|
972 TRACE_APPLY (this); |
|
973 if (unlikely (c->nesting_level_left != MAX_NESTING_LEVEL)) |
|
974 return TRACE_RETURN (false); /* No chaining to this type */ |
|
975 |
|
976 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
|
977 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
|
978 |
|
979 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
|
980 const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead); |
|
981 |
|
982 if (match_backtrack (c, |
|
983 backtrack.len, (USHORT *) backtrack.array, |
|
984 match_coverage, this) && |
|
985 match_lookahead (c, |
|
986 lookahead.len, (USHORT *) lookahead.array, |
|
987 match_coverage, this, |
|
988 1)) |
|
989 { |
|
990 c->replace_glyph_inplace (substitute[index]); |
|
991 /* Note: We DON'T decrease buffer->idx. The main loop does it |
|
992 * for us. This is useful for preventing surprises if someone |
|
993 * calls us through a Context lookup. */ |
|
994 return TRACE_RETURN (true); |
|
995 } |
|
996 |
|
997 return TRACE_RETURN (false); |
|
998 } |
|
999 |
|
1000 inline bool sanitize (hb_sanitize_context_t *c) { |
|
1001 TRACE_SANITIZE (this); |
|
1002 if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this))) |
|
1003 return TRACE_RETURN (false); |
|
1004 OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
|
1005 if (!lookahead.sanitize (c, this)) |
|
1006 return TRACE_RETURN (false); |
|
1007 ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead); |
|
1008 return TRACE_RETURN (substitute.sanitize (c)); |
|
1009 } |
|
1010 |
|
1011 protected: |
|
1012 USHORT format; /* Format identifier--format = 1 */ |
|
1013 OffsetTo<Coverage> |
|
1014 coverage; /* Offset to Coverage table--from |
|
1015 * beginning of table */ |
|
1016 OffsetArrayOf<Coverage> |
|
1017 backtrack; /* Array of coverage tables |
|
1018 * in backtracking sequence, in glyph |
|
1019 * sequence order */ |
|
1020 OffsetArrayOf<Coverage> |
|
1021 lookaheadX; /* Array of coverage tables |
|
1022 * in lookahead sequence, in glyph |
|
1023 * sequence order */ |
|
1024 ArrayOf<GlyphID> |
|
1025 substituteX; /* Array of substitute |
|
1026 * GlyphIDs--ordered by Coverage Index */ |
|
1027 public: |
|
1028 DEFINE_SIZE_MIN (10); |
|
1029 }; |
|
1030 |
|
1031 struct ReverseChainSingleSubst |
|
1032 { |
|
1033 template <typename context_t> |
|
1034 inline typename context_t::return_t dispatch (context_t *c) const |
|
1035 { |
|
1036 TRACE_DISPATCH (this); |
|
1037 switch (u.format) { |
|
1038 case 1: return TRACE_RETURN (c->dispatch (u.format1)); |
|
1039 default:return TRACE_RETURN (c->default_return_value ()); |
|
1040 } |
|
1041 } |
|
1042 |
|
1043 inline bool sanitize (hb_sanitize_context_t *c) { |
|
1044 TRACE_SANITIZE (this); |
|
1045 if (!u.format.sanitize (c)) return TRACE_RETURN (false); |
|
1046 switch (u.format) { |
|
1047 case 1: return TRACE_RETURN (u.format1.sanitize (c)); |
|
1048 default:return TRACE_RETURN (true); |
|
1049 } |
|
1050 } |
|
1051 |
|
1052 protected: |
|
1053 union { |
|
1054 USHORT format; /* Format identifier */ |
|
1055 ReverseChainSingleSubstFormat1 format1; |
|
1056 } u; |
|
1057 }; |
|
1058 |
|
1059 |
|
1060 |
|
1061 /* |
|
1062 * SubstLookup |
|
1063 */ |
|
1064 |
|
1065 struct SubstLookupSubTable |
|
1066 { |
|
1067 friend struct SubstLookup; |
|
1068 |
|
1069 enum Type { |
|
1070 Single = 1, |
|
1071 Multiple = 2, |
|
1072 Alternate = 3, |
|
1073 Ligature = 4, |
|
1074 Context = 5, |
|
1075 ChainContext = 6, |
|
1076 Extension = 7, |
|
1077 ReverseChainSingle = 8 |
|
1078 }; |
|
1079 |
|
1080 template <typename context_t> |
|
1081 inline typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type) const |
|
1082 { |
|
1083 TRACE_DISPATCH (this); |
|
1084 switch (lookup_type) { |
|
1085 case Single: return TRACE_RETURN (u.single.dispatch (c)); |
|
1086 case Multiple: return TRACE_RETURN (u.multiple.dispatch (c)); |
|
1087 case Alternate: return TRACE_RETURN (u.alternate.dispatch (c)); |
|
1088 case Ligature: return TRACE_RETURN (u.ligature.dispatch (c)); |
|
1089 case Context: return TRACE_RETURN (u.context.dispatch (c)); |
|
1090 case ChainContext: return TRACE_RETURN (u.chainContext.dispatch (c)); |
|
1091 case Extension: return TRACE_RETURN (u.extension.dispatch (c)); |
|
1092 case ReverseChainSingle: return TRACE_RETURN (u.reverseChainContextSingle.dispatch (c)); |
|
1093 default: return TRACE_RETURN (c->default_return_value ()); |
|
1094 } |
|
1095 } |
|
1096 |
|
1097 inline bool sanitize (hb_sanitize_context_t *c, unsigned int lookup_type) { |
|
1098 TRACE_SANITIZE (this); |
|
1099 if (!u.header.sub_format.sanitize (c)) |
|
1100 return TRACE_RETURN (false); |
|
1101 switch (lookup_type) { |
|
1102 case Single: return TRACE_RETURN (u.single.sanitize (c)); |
|
1103 case Multiple: return TRACE_RETURN (u.multiple.sanitize (c)); |
|
1104 case Alternate: return TRACE_RETURN (u.alternate.sanitize (c)); |
|
1105 case Ligature: return TRACE_RETURN (u.ligature.sanitize (c)); |
|
1106 case Context: return TRACE_RETURN (u.context.sanitize (c)); |
|
1107 case ChainContext: return TRACE_RETURN (u.chainContext.sanitize (c)); |
|
1108 case Extension: return TRACE_RETURN (u.extension.sanitize (c)); |
|
1109 case ReverseChainSingle: return TRACE_RETURN (u.reverseChainContextSingle.sanitize (c)); |
|
1110 default: return TRACE_RETURN (true); |
|
1111 } |
|
1112 } |
|
1113 |
|
1114 protected: |
|
1115 union { |
|
1116 struct { |
|
1117 USHORT sub_format; |
|
1118 } header; |
|
1119 SingleSubst single; |
|
1120 MultipleSubst multiple; |
|
1121 AlternateSubst alternate; |
|
1122 LigatureSubst ligature; |
|
1123 ContextSubst context; |
|
1124 ChainContextSubst chainContext; |
|
1125 ExtensionSubst extension; |
|
1126 ReverseChainSingleSubst reverseChainContextSingle; |
|
1127 } u; |
|
1128 public: |
|
1129 DEFINE_SIZE_UNION (2, header.sub_format); |
|
1130 }; |
|
1131 |
|
1132 |
|
1133 struct SubstLookup : Lookup |
|
1134 { |
|
1135 inline const SubstLookupSubTable& get_subtable (unsigned int i) const |
|
1136 { return this+CastR<OffsetArrayOf<SubstLookupSubTable> > (subTable)[i]; } |
|
1137 |
|
1138 inline static bool lookup_type_is_reverse (unsigned int lookup_type) |
|
1139 { return lookup_type == SubstLookupSubTable::ReverseChainSingle; } |
|
1140 |
|
1141 inline bool is_reverse (void) const |
|
1142 { |
|
1143 unsigned int type = get_type (); |
|
1144 if (unlikely (type == SubstLookupSubTable::Extension)) |
|
1145 return CastR<ExtensionSubst> (get_subtable(0)).is_reverse (); |
|
1146 return lookup_type_is_reverse (type); |
|
1147 } |
|
1148 |
|
1149 inline hb_closure_context_t::return_t closure (hb_closure_context_t *c) const |
|
1150 { |
|
1151 TRACE_CLOSURE (this); |
|
1152 c->set_recurse_func (dispatch_recurse_func<hb_closure_context_t>); |
|
1153 return TRACE_RETURN (dispatch (c)); |
|
1154 } |
|
1155 |
|
1156 inline hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
1157 { |
|
1158 TRACE_COLLECT_GLYPHS (this); |
|
1159 c->set_recurse_func (dispatch_recurse_func<hb_collect_glyphs_context_t>); |
|
1160 return TRACE_RETURN (dispatch (c)); |
|
1161 } |
|
1162 |
|
1163 template <typename set_t> |
|
1164 inline void add_coverage (set_t *glyphs) const |
|
1165 { |
|
1166 hb_get_coverage_context_t c; |
|
1167 const Coverage *last = NULL; |
|
1168 unsigned int count = get_subtable_count (); |
|
1169 for (unsigned int i = 0; i < count; i++) { |
|
1170 const Coverage *coverage = &get_subtable (i).dispatch (&c, get_type ()); |
|
1171 if (coverage != last) { |
|
1172 coverage->add_coverage (glyphs); |
|
1173 last = coverage; |
|
1174 } |
|
1175 } |
|
1176 } |
|
1177 |
|
1178 inline bool would_apply (hb_would_apply_context_t *c, const hb_set_digest_t *digest) const |
|
1179 { |
|
1180 TRACE_WOULD_APPLY (this); |
|
1181 if (unlikely (!c->len)) return TRACE_RETURN (false); |
|
1182 if (!digest->may_have (c->glyphs[0])) return TRACE_RETURN (false); |
|
1183 return TRACE_RETURN (dispatch (c)); |
|
1184 } |
|
1185 |
|
1186 inline bool apply_once (hb_apply_context_t *c) const |
|
1187 { |
|
1188 TRACE_APPLY (this); |
|
1189 if (!c->check_glyph_property (&c->buffer->cur(), c->lookup_props)) |
|
1190 return TRACE_RETURN (false); |
|
1191 return TRACE_RETURN (dispatch (c)); |
|
1192 } |
|
1193 |
|
1194 static bool apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index); |
|
1195 |
|
1196 inline SubstLookupSubTable& serialize_subtable (hb_serialize_context_t *c, |
|
1197 unsigned int i) |
|
1198 { return CastR<OffsetArrayOf<SubstLookupSubTable> > (subTable)[i].serialize (c, this); } |
|
1199 |
|
1200 inline bool serialize_single (hb_serialize_context_t *c, |
|
1201 uint32_t lookup_props, |
|
1202 Supplier<GlyphID> &glyphs, |
|
1203 Supplier<GlyphID> &substitutes, |
|
1204 unsigned int num_glyphs) |
|
1205 { |
|
1206 TRACE_SERIALIZE (this); |
|
1207 if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Single, lookup_props, 1))) return TRACE_RETURN (false); |
|
1208 return TRACE_RETURN (serialize_subtable (c, 0).u.single.serialize (c, glyphs, substitutes, num_glyphs)); |
|
1209 } |
|
1210 |
|
1211 inline bool serialize_multiple (hb_serialize_context_t *c, |
|
1212 uint32_t lookup_props, |
|
1213 Supplier<GlyphID> &glyphs, |
|
1214 Supplier<unsigned int> &substitute_len_list, |
|
1215 unsigned int num_glyphs, |
|
1216 Supplier<GlyphID> &substitute_glyphs_list) |
|
1217 { |
|
1218 TRACE_SERIALIZE (this); |
|
1219 if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Multiple, lookup_props, 1))) return TRACE_RETURN (false); |
|
1220 return TRACE_RETURN (serialize_subtable (c, 0).u.multiple.serialize (c, glyphs, substitute_len_list, num_glyphs, |
|
1221 substitute_glyphs_list)); |
|
1222 } |
|
1223 |
|
1224 inline bool serialize_alternate (hb_serialize_context_t *c, |
|
1225 uint32_t lookup_props, |
|
1226 Supplier<GlyphID> &glyphs, |
|
1227 Supplier<unsigned int> &alternate_len_list, |
|
1228 unsigned int num_glyphs, |
|
1229 Supplier<GlyphID> &alternate_glyphs_list) |
|
1230 { |
|
1231 TRACE_SERIALIZE (this); |
|
1232 if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Alternate, lookup_props, 1))) return TRACE_RETURN (false); |
|
1233 return TRACE_RETURN (serialize_subtable (c, 0).u.alternate.serialize (c, glyphs, alternate_len_list, num_glyphs, |
|
1234 alternate_glyphs_list)); |
|
1235 } |
|
1236 |
|
1237 inline bool serialize_ligature (hb_serialize_context_t *c, |
|
1238 uint32_t lookup_props, |
|
1239 Supplier<GlyphID> &first_glyphs, |
|
1240 Supplier<unsigned int> &ligature_per_first_glyph_count_list, |
|
1241 unsigned int num_first_glyphs, |
|
1242 Supplier<GlyphID> &ligatures_list, |
|
1243 Supplier<unsigned int> &component_count_list, |
|
1244 Supplier<GlyphID> &component_list /* Starting from second for each ligature */) |
|
1245 { |
|
1246 TRACE_SERIALIZE (this); |
|
1247 if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Ligature, lookup_props, 1))) return TRACE_RETURN (false); |
|
1248 return TRACE_RETURN (serialize_subtable (c, 0).u.ligature.serialize (c, first_glyphs, ligature_per_first_glyph_count_list, num_first_glyphs, |
|
1249 ligatures_list, component_count_list, component_list)); |
|
1250 } |
|
1251 |
|
1252 template <typename context_t> |
|
1253 static inline typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index); |
|
1254 |
|
1255 template <typename context_t> |
|
1256 inline typename context_t::return_t dispatch (context_t *c) const |
|
1257 { |
|
1258 TRACE_DISPATCH (this); |
|
1259 unsigned int lookup_type = get_type (); |
|
1260 unsigned int count = get_subtable_count (); |
|
1261 for (unsigned int i = 0; i < count; i++) { |
|
1262 typename context_t::return_t r = get_subtable (i).dispatch (c, lookup_type); |
|
1263 if (c->stop_sublookup_iteration (r)) |
|
1264 return TRACE_RETURN (r); |
|
1265 } |
|
1266 return TRACE_RETURN (c->default_return_value ()); |
|
1267 } |
|
1268 |
|
1269 inline bool sanitize (hb_sanitize_context_t *c) |
|
1270 { |
|
1271 TRACE_SANITIZE (this); |
|
1272 if (unlikely (!Lookup::sanitize (c))) return TRACE_RETURN (false); |
|
1273 OffsetArrayOf<SubstLookupSubTable> &list = CastR<OffsetArrayOf<SubstLookupSubTable> > (subTable); |
|
1274 if (unlikely (!list.sanitize (c, this, get_type ()))) return TRACE_RETURN (false); |
|
1275 |
|
1276 if (unlikely (get_type () == SubstLookupSubTable::Extension)) |
|
1277 { |
|
1278 /* The spec says all subtables of an Extension lookup should |
|
1279 * have the same type. This is specially important if one has |
|
1280 * a reverse type! */ |
|
1281 unsigned int type = get_subtable (0).u.extension.get_type (); |
|
1282 unsigned int count = get_subtable_count (); |
|
1283 for (unsigned int i = 1; i < count; i++) |
|
1284 if (get_subtable (i).u.extension.get_type () != type) |
|
1285 return TRACE_RETURN (false); |
|
1286 } |
|
1287 return TRACE_RETURN (true); |
|
1288 } |
|
1289 }; |
|
1290 |
|
1291 typedef OffsetListOf<SubstLookup> SubstLookupList; |
|
1292 |
|
1293 /* |
|
1294 * GSUB -- The Glyph Substitution Table |
|
1295 */ |
|
1296 |
|
1297 struct GSUB : GSUBGPOS |
|
1298 { |
|
1299 static const hb_tag_t tableTag = HB_OT_TAG_GSUB; |
|
1300 |
|
1301 inline const SubstLookup& get_lookup (unsigned int i) const |
|
1302 { return CastR<SubstLookup> (GSUBGPOS::get_lookup (i)); } |
|
1303 |
|
1304 static inline void substitute_start (hb_font_t *font, hb_buffer_t *buffer); |
|
1305 static inline void substitute_finish (hb_font_t *font, hb_buffer_t *buffer); |
|
1306 |
|
1307 inline bool sanitize (hb_sanitize_context_t *c) { |
|
1308 TRACE_SANITIZE (this); |
|
1309 if (unlikely (!GSUBGPOS::sanitize (c))) return TRACE_RETURN (false); |
|
1310 OffsetTo<SubstLookupList> &list = CastR<OffsetTo<SubstLookupList> > (lookupList); |
|
1311 return TRACE_RETURN (list.sanitize (c, this)); |
|
1312 } |
|
1313 public: |
|
1314 DEFINE_SIZE_STATIC (10); |
|
1315 }; |
|
1316 |
|
1317 |
|
1318 void |
|
1319 GSUB::substitute_start (hb_font_t *font, hb_buffer_t *buffer) |
|
1320 { |
|
1321 _hb_buffer_allocate_gsubgpos_vars (buffer); |
|
1322 |
|
1323 const GDEF &gdef = *hb_ot_layout_from_face (font->face)->gdef; |
|
1324 unsigned int count = buffer->len; |
|
1325 for (unsigned int i = 0; i < count; i++) |
|
1326 { |
|
1327 _hb_glyph_info_set_glyph_props (&buffer->info[i], gdef.get_glyph_props (buffer->info[i].codepoint)); |
|
1328 _hb_glyph_info_clear_lig_props (&buffer->info[i]); |
|
1329 buffer->info[i].syllable() = 0; |
|
1330 } |
|
1331 } |
|
1332 |
|
1333 void |
|
1334 GSUB::substitute_finish (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer HB_UNUSED) |
|
1335 { |
|
1336 } |
|
1337 |
|
1338 |
|
1339 /* Out-of-class implementation for methods recursing */ |
|
1340 |
|
1341 inline bool ExtensionSubst::is_reverse (void) const |
|
1342 { |
|
1343 unsigned int type = get_type (); |
|
1344 if (unlikely (type == SubstLookupSubTable::Extension)) |
|
1345 return CastR<ExtensionSubst> (get_subtable<SubstLookupSubTable>()).is_reverse (); |
|
1346 return SubstLookup::lookup_type_is_reverse (type); |
|
1347 } |
|
1348 |
|
1349 template <typename context_t> |
|
1350 inline typename context_t::return_t SubstLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index) |
|
1351 { |
|
1352 const GSUB &gsub = *(hb_ot_layout_from_face (c->face)->gsub); |
|
1353 const SubstLookup &l = gsub.get_lookup (lookup_index); |
|
1354 return l.dispatch (c); |
|
1355 } |
|
1356 |
|
1357 inline bool SubstLookup::apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index) |
|
1358 { |
|
1359 const GSUB &gsub = *(hb_ot_layout_from_face (c->face)->gsub); |
|
1360 const SubstLookup &l = gsub.get_lookup (lookup_index); |
|
1361 unsigned int saved_lookup_props = c->lookup_props; |
|
1362 c->set_lookup (l); |
|
1363 bool ret = l.apply_once (c); |
|
1364 c->lookup_props = saved_lookup_props; |
|
1365 return ret; |
|
1366 } |
|
1367 |
|
1368 |
|
1369 } /* namespace OT */ |
|
1370 |
|
1371 |
|
1372 #endif /* HB_OT_LAYOUT_GSUB_TABLE_HH */ |