gfx/harfbuzz/src/hb-ot-layout-gsubgpos-private.hh

Fri, 16 Jan 2015 18:13:44 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Fri, 16 Jan 2015 18:13:44 +0100
branch
TOR_BUG_9701
changeset 14
925c144e1f1f
permissions
-rw-r--r--

Integrate suggestion from review to improve consistency with existing code.

michael@0 1 /*
michael@0 2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
michael@0 3 * Copyright © 2010,2012 Google, Inc.
michael@0 4 *
michael@0 5 * This is part of HarfBuzz, a text shaping library.
michael@0 6 *
michael@0 7 * Permission is hereby granted, without written agreement and without
michael@0 8 * license or royalty fees, to use, copy, modify, and distribute this
michael@0 9 * software and its documentation for any purpose, provided that the
michael@0 10 * above copyright notice and the following two paragraphs appear in
michael@0 11 * all copies of this software.
michael@0 12 *
michael@0 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
michael@0 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
michael@0 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
michael@0 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
michael@0 17 * DAMAGE.
michael@0 18 *
michael@0 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
michael@0 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
michael@0 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
michael@0 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
michael@0 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
michael@0 24 *
michael@0 25 * Red Hat Author(s): Behdad Esfahbod
michael@0 26 * Google Author(s): Behdad Esfahbod
michael@0 27 */
michael@0 28
michael@0 29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
michael@0 30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
michael@0 31
michael@0 32 #include "hb-buffer-private.hh"
michael@0 33 #include "hb-ot-layout-gdef-table.hh"
michael@0 34 #include "hb-set-private.hh"
michael@0 35
michael@0 36
michael@0 37 namespace OT {
michael@0 38
michael@0 39
michael@0 40
michael@0 41 #define TRACE_DISPATCH(this) \
michael@0 42 hb_auto_trace_t<context_t::max_debug_depth, typename context_t::return_t> trace \
michael@0 43 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
michael@0 44 "");
michael@0 45
michael@0 46 #ifndef HB_DEBUG_CLOSURE
michael@0 47 #define HB_DEBUG_CLOSURE (HB_DEBUG+0)
michael@0 48 #endif
michael@0 49
michael@0 50 #define TRACE_CLOSURE(this) \
michael@0 51 hb_auto_trace_t<HB_DEBUG_CLOSURE, hb_void_t> trace \
michael@0 52 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
michael@0 53 "");
michael@0 54
michael@0 55 struct hb_closure_context_t
michael@0 56 {
michael@0 57 inline const char *get_name (void) { return "CLOSURE"; }
michael@0 58 static const unsigned int max_debug_depth = HB_DEBUG_CLOSURE;
michael@0 59 typedef hb_void_t return_t;
michael@0 60 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
michael@0 61 template <typename T>
michael@0 62 inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; }
michael@0 63 static return_t default_return_value (void) { return HB_VOID; }
michael@0 64 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
michael@0 65 return_t recurse (unsigned int lookup_index)
michael@0 66 {
michael@0 67 if (unlikely (nesting_level_left == 0 || !recurse_func))
michael@0 68 return default_return_value ();
michael@0 69
michael@0 70 nesting_level_left--;
michael@0 71 recurse_func (this, lookup_index);
michael@0 72 nesting_level_left++;
michael@0 73 return HB_VOID;
michael@0 74 }
michael@0 75
michael@0 76 hb_face_t *face;
michael@0 77 hb_set_t *glyphs;
michael@0 78 recurse_func_t recurse_func;
michael@0 79 unsigned int nesting_level_left;
michael@0 80 unsigned int debug_depth;
michael@0 81
michael@0 82 hb_closure_context_t (hb_face_t *face_,
michael@0 83 hb_set_t *glyphs_,
michael@0 84 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
michael@0 85 face (face_),
michael@0 86 glyphs (glyphs_),
michael@0 87 recurse_func (NULL),
michael@0 88 nesting_level_left (nesting_level_left_),
michael@0 89 debug_depth (0) {}
michael@0 90
michael@0 91 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
michael@0 92 };
michael@0 93
michael@0 94
michael@0 95
michael@0 96 #ifndef HB_DEBUG_WOULD_APPLY
michael@0 97 #define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0)
michael@0 98 #endif
michael@0 99
michael@0 100 #define TRACE_WOULD_APPLY(this) \
michael@0 101 hb_auto_trace_t<HB_DEBUG_WOULD_APPLY, bool> trace \
michael@0 102 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
michael@0 103 "%d glyphs", c->len);
michael@0 104
michael@0 105 struct hb_would_apply_context_t
michael@0 106 {
michael@0 107 inline const char *get_name (void) { return "WOULD_APPLY"; }
michael@0 108 static const unsigned int max_debug_depth = HB_DEBUG_WOULD_APPLY;
michael@0 109 typedef bool return_t;
michael@0 110 template <typename T>
michael@0 111 inline return_t dispatch (const T &obj) { return obj.would_apply (this); }
michael@0 112 static return_t default_return_value (void) { return false; }
michael@0 113 bool stop_sublookup_iteration (return_t r) const { return r; }
michael@0 114
michael@0 115 hb_face_t *face;
michael@0 116 const hb_codepoint_t *glyphs;
michael@0 117 unsigned int len;
michael@0 118 bool zero_context;
michael@0 119 unsigned int debug_depth;
michael@0 120
michael@0 121 hb_would_apply_context_t (hb_face_t *face_,
michael@0 122 const hb_codepoint_t *glyphs_,
michael@0 123 unsigned int len_,
michael@0 124 bool zero_context_) :
michael@0 125 face (face_),
michael@0 126 glyphs (glyphs_),
michael@0 127 len (len_),
michael@0 128 zero_context (zero_context_),
michael@0 129 debug_depth (0) {}
michael@0 130 };
michael@0 131
michael@0 132
michael@0 133
michael@0 134 #ifndef HB_DEBUG_COLLECT_GLYPHS
michael@0 135 #define HB_DEBUG_COLLECT_GLYPHS (HB_DEBUG+0)
michael@0 136 #endif
michael@0 137
michael@0 138 #define TRACE_COLLECT_GLYPHS(this) \
michael@0 139 hb_auto_trace_t<HB_DEBUG_COLLECT_GLYPHS, hb_void_t> trace \
michael@0 140 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
michael@0 141 "");
michael@0 142
michael@0 143 struct hb_collect_glyphs_context_t
michael@0 144 {
michael@0 145 inline const char *get_name (void) { return "COLLECT_GLYPHS"; }
michael@0 146 static const unsigned int max_debug_depth = HB_DEBUG_COLLECT_GLYPHS;
michael@0 147 typedef hb_void_t return_t;
michael@0 148 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
michael@0 149 template <typename T>
michael@0 150 inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; }
michael@0 151 static return_t default_return_value (void) { return HB_VOID; }
michael@0 152 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
michael@0 153 return_t recurse (unsigned int lookup_index)
michael@0 154 {
michael@0 155 if (unlikely (nesting_level_left == 0 || !recurse_func))
michael@0 156 return default_return_value ();
michael@0 157
michael@0 158 /* Note that GPOS sets recurse_func to NULL already, so it doesn't get
michael@0 159 * past the previous check. For GSUB, we only want to collect the output
michael@0 160 * glyphs in the recursion. If output is not requested, we can go home now.
michael@0 161 *
michael@0 162 * Note further, that the above is not exactly correct. A recursed lookup
michael@0 163 * is allowed to match input that is not matched in the context, but that's
michael@0 164 * not how most fonts are built. It's possible to relax that and recurse
michael@0 165 * with all sets here if it proves to be an issue.
michael@0 166 */
michael@0 167
michael@0 168 if (output == hb_set_get_empty ())
michael@0 169 return HB_VOID;
michael@0 170
michael@0 171 hb_set_t *old_before = before;
michael@0 172 hb_set_t *old_input = input;
michael@0 173 hb_set_t *old_after = after;
michael@0 174 before = input = after = hb_set_get_empty ();
michael@0 175
michael@0 176 nesting_level_left--;
michael@0 177 recurse_func (this, lookup_index);
michael@0 178 nesting_level_left++;
michael@0 179
michael@0 180 before = old_before;
michael@0 181 input = old_input;
michael@0 182 after = old_after;
michael@0 183
michael@0 184 return HB_VOID;
michael@0 185 }
michael@0 186
michael@0 187 hb_face_t *face;
michael@0 188 hb_set_t *before;
michael@0 189 hb_set_t *input;
michael@0 190 hb_set_t *after;
michael@0 191 hb_set_t *output;
michael@0 192 recurse_func_t recurse_func;
michael@0 193 unsigned int nesting_level_left;
michael@0 194 unsigned int debug_depth;
michael@0 195
michael@0 196 hb_collect_glyphs_context_t (hb_face_t *face_,
michael@0 197 hb_set_t *glyphs_before, /* OUT. May be NULL */
michael@0 198 hb_set_t *glyphs_input, /* OUT. May be NULL */
michael@0 199 hb_set_t *glyphs_after, /* OUT. May be NULL */
michael@0 200 hb_set_t *glyphs_output, /* OUT. May be NULL */
michael@0 201 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
michael@0 202 face (face_),
michael@0 203 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
michael@0 204 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
michael@0 205 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
michael@0 206 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
michael@0 207 recurse_func (NULL),
michael@0 208 nesting_level_left (nesting_level_left_),
michael@0 209 debug_depth (0) {}
michael@0 210
michael@0 211 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
michael@0 212 };
michael@0 213
michael@0 214
michael@0 215
michael@0 216 struct hb_get_coverage_context_t
michael@0 217 {
michael@0 218 inline const char *get_name (void) { return "GET_COVERAGE"; }
michael@0 219 static const unsigned int max_debug_depth = 0;
michael@0 220 typedef const Coverage &return_t;
michael@0 221 template <typename T>
michael@0 222 inline return_t dispatch (const T &obj) { return obj.get_coverage (); }
michael@0 223 static return_t default_return_value (void) { return Null(Coverage); }
michael@0 224
michael@0 225 hb_get_coverage_context_t (void) :
michael@0 226 debug_depth (0) {}
michael@0 227
michael@0 228 unsigned int debug_depth;
michael@0 229 };
michael@0 230
michael@0 231
michael@0 232
michael@0 233 #ifndef HB_DEBUG_APPLY
michael@0 234 #define HB_DEBUG_APPLY (HB_DEBUG+0)
michael@0 235 #endif
michael@0 236
michael@0 237 #define TRACE_APPLY(this) \
michael@0 238 hb_auto_trace_t<HB_DEBUG_APPLY, bool> trace \
michael@0 239 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
michael@0 240 "idx %d codepoint %u", c->buffer->idx, c->buffer->cur().codepoint);
michael@0 241
michael@0 242 struct hb_apply_context_t
michael@0 243 {
michael@0 244 inline const char *get_name (void) { return "APPLY"; }
michael@0 245 static const unsigned int max_debug_depth = HB_DEBUG_APPLY;
michael@0 246 typedef bool return_t;
michael@0 247 typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
michael@0 248 template <typename T>
michael@0 249 inline return_t dispatch (const T &obj) { return obj.apply (this); }
michael@0 250 static return_t default_return_value (void) { return false; }
michael@0 251 bool stop_sublookup_iteration (return_t r) const { return r; }
michael@0 252 return_t recurse (unsigned int lookup_index)
michael@0 253 {
michael@0 254 if (unlikely (nesting_level_left == 0 || !recurse_func))
michael@0 255 return default_return_value ();
michael@0 256
michael@0 257 nesting_level_left--;
michael@0 258 bool ret = recurse_func (this, lookup_index);
michael@0 259 nesting_level_left++;
michael@0 260 return ret;
michael@0 261 }
michael@0 262
michael@0 263 unsigned int table_index; /* GSUB/GPOS */
michael@0 264 hb_font_t *font;
michael@0 265 hb_face_t *face;
michael@0 266 hb_buffer_t *buffer;
michael@0 267 hb_direction_t direction;
michael@0 268 hb_mask_t lookup_mask;
michael@0 269 bool auto_zwj;
michael@0 270 recurse_func_t recurse_func;
michael@0 271 unsigned int nesting_level_left;
michael@0 272 unsigned int lookup_props;
michael@0 273 const GDEF &gdef;
michael@0 274 bool has_glyph_classes;
michael@0 275 unsigned int debug_depth;
michael@0 276
michael@0 277
michael@0 278 hb_apply_context_t (unsigned int table_index_,
michael@0 279 hb_font_t *font_,
michael@0 280 hb_buffer_t *buffer_) :
michael@0 281 table_index (table_index_),
michael@0 282 font (font_), face (font->face), buffer (buffer_),
michael@0 283 direction (buffer_->props.direction),
michael@0 284 lookup_mask (1),
michael@0 285 auto_zwj (true),
michael@0 286 recurse_func (NULL),
michael@0 287 nesting_level_left (MAX_NESTING_LEVEL),
michael@0 288 lookup_props (0),
michael@0 289 gdef (*hb_ot_layout_from_face (face)->gdef),
michael@0 290 has_glyph_classes (gdef.has_glyph_classes ()),
michael@0 291 debug_depth (0) {}
michael@0 292
michael@0 293 inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; }
michael@0 294 inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; }
michael@0 295 inline void set_recurse_func (recurse_func_t func) { recurse_func = func; }
michael@0 296 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
michael@0 297 inline void set_lookup (const Lookup &l) { lookup_props = l.get_props (); }
michael@0 298
michael@0 299 struct matcher_t
michael@0 300 {
michael@0 301 inline matcher_t (void) :
michael@0 302 lookup_props (0),
michael@0 303 ignore_zwnj (false),
michael@0 304 ignore_zwj (false),
michael@0 305 mask (-1),
michael@0 306 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
michael@0 307 syllable arg1(0),
michael@0 308 #undef arg1
michael@0 309 match_func (NULL),
michael@0 310 match_data (NULL) {};
michael@0 311
michael@0 312 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
michael@0 313
michael@0 314 inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
michael@0 315 inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
michael@0 316 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
michael@0 317 inline void set_mask (hb_mask_t mask_) { mask = mask_; }
michael@0 318 inline void set_syllable (uint8_t syllable_) { syllable = syllable_; }
michael@0 319 inline void set_match_func (match_func_t match_func_,
michael@0 320 const void *match_data_)
michael@0 321 { match_func = match_func_; match_data = match_data_; }
michael@0 322
michael@0 323 enum may_match_t {
michael@0 324 MATCH_NO,
michael@0 325 MATCH_YES,
michael@0 326 MATCH_MAYBE
michael@0 327 };
michael@0 328
michael@0 329 inline may_match_t may_match (const hb_glyph_info_t &info,
michael@0 330 const USHORT *glyph_data) const
michael@0 331 {
michael@0 332 if (!(info.mask & mask) ||
michael@0 333 (syllable && syllable != info.syllable ()))
michael@0 334 return MATCH_NO;
michael@0 335
michael@0 336 if (match_func)
michael@0 337 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
michael@0 338
michael@0 339 return MATCH_MAYBE;
michael@0 340 }
michael@0 341
michael@0 342 enum may_skip_t {
michael@0 343 SKIP_NO,
michael@0 344 SKIP_YES,
michael@0 345 SKIP_MAYBE
michael@0 346 };
michael@0 347
michael@0 348 inline may_skip_t
michael@0 349 may_skip (const hb_apply_context_t *c,
michael@0 350 const hb_glyph_info_t &info) const
michael@0 351 {
michael@0 352 unsigned int property;
michael@0 353
michael@0 354 property = _hb_glyph_info_get_glyph_props (&info);
michael@0 355
michael@0 356 if (!c->match_properties (info.codepoint, property, lookup_props))
michael@0 357 return SKIP_YES;
michael@0 358
michael@0 359 if (unlikely (_hb_glyph_info_is_default_ignorable (&info) &&
michael@0 360 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
michael@0 361 (ignore_zwj || !_hb_glyph_info_is_zwj (&info)) &&
michael@0 362 !_hb_glyph_info_ligated (&info)))
michael@0 363 return SKIP_MAYBE;
michael@0 364
michael@0 365 return SKIP_NO;
michael@0 366 }
michael@0 367
michael@0 368 protected:
michael@0 369 unsigned int lookup_props;
michael@0 370 bool ignore_zwnj;
michael@0 371 bool ignore_zwj;
michael@0 372 hb_mask_t mask;
michael@0 373 uint8_t syllable;
michael@0 374 match_func_t match_func;
michael@0 375 const void *match_data;
michael@0 376 };
michael@0 377
michael@0 378 struct skipping_forward_iterator_t
michael@0 379 {
michael@0 380 inline skipping_forward_iterator_t (hb_apply_context_t *c_,
michael@0 381 unsigned int start_index_,
michael@0 382 unsigned int num_items_,
michael@0 383 bool context_match = false) :
michael@0 384 idx (start_index_),
michael@0 385 c (c_),
michael@0 386 match_glyph_data (NULL),
michael@0 387 num_items (num_items_),
michael@0 388 end (c->buffer->len)
michael@0 389 {
michael@0 390 matcher.set_lookup_props (c->lookup_props);
michael@0 391 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */
michael@0 392 matcher.set_ignore_zwnj (context_match || c->table_index == 1);
michael@0 393 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */
michael@0 394 matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj);
michael@0 395 if (!context_match)
michael@0 396 matcher.set_mask (c->lookup_mask);
michael@0 397 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
michael@0 398 }
michael@0 399 inline void set_lookup_props (unsigned int lookup_props) { matcher.set_lookup_props (lookup_props); }
michael@0 400 inline void set_syllable (unsigned int syllable) { matcher.set_syllable (syllable); }
michael@0 401 inline void set_match_func (matcher_t::match_func_t match_func,
michael@0 402 const void *match_data,
michael@0 403 const USHORT glyph_data[])
michael@0 404 {
michael@0 405 matcher.set_match_func (match_func, match_data);
michael@0 406 match_glyph_data = glyph_data;
michael@0 407 }
michael@0 408
michael@0 409 inline bool has_no_chance (void) const { return unlikely (num_items && idx + num_items >= end); }
michael@0 410 inline void reject (void) { num_items++; match_glyph_data--; }
michael@0 411 inline bool next (void)
michael@0 412 {
michael@0 413 assert (num_items > 0);
michael@0 414 while (!has_no_chance ())
michael@0 415 {
michael@0 416 idx++;
michael@0 417 const hb_glyph_info_t &info = c->buffer->info[idx];
michael@0 418
michael@0 419 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
michael@0 420 if (unlikely (skip == matcher_t::SKIP_YES))
michael@0 421 continue;
michael@0 422
michael@0 423 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
michael@0 424 if (match == matcher_t::MATCH_YES ||
michael@0 425 (match == matcher_t::MATCH_MAYBE &&
michael@0 426 skip == matcher_t::SKIP_NO))
michael@0 427 {
michael@0 428 num_items--;
michael@0 429 match_glyph_data++;
michael@0 430 return true;
michael@0 431 }
michael@0 432
michael@0 433 if (skip == matcher_t::SKIP_NO)
michael@0 434 return false;
michael@0 435 }
michael@0 436 return false;
michael@0 437 }
michael@0 438
michael@0 439 unsigned int idx;
michael@0 440 protected:
michael@0 441 hb_apply_context_t *c;
michael@0 442 matcher_t matcher;
michael@0 443 const USHORT *match_glyph_data;
michael@0 444
michael@0 445 unsigned int num_items;
michael@0 446 unsigned int end;
michael@0 447 };
michael@0 448
michael@0 449 struct skipping_backward_iterator_t
michael@0 450 {
michael@0 451 inline skipping_backward_iterator_t (hb_apply_context_t *c_,
michael@0 452 unsigned int start_index_,
michael@0 453 unsigned int num_items_,
michael@0 454 bool context_match = false) :
michael@0 455 idx (start_index_),
michael@0 456 c (c_),
michael@0 457 match_glyph_data (NULL),
michael@0 458 num_items (num_items_)
michael@0 459 {
michael@0 460 matcher.set_lookup_props (c->lookup_props);
michael@0 461 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */
michael@0 462 matcher.set_ignore_zwnj (context_match || c->table_index == 1);
michael@0 463 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */
michael@0 464 matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj);
michael@0 465 if (!context_match)
michael@0 466 matcher.set_mask (c->lookup_mask);
michael@0 467 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
michael@0 468 }
michael@0 469 inline void set_lookup_props (unsigned int lookup_props) { matcher.set_lookup_props (lookup_props); }
michael@0 470 inline void set_syllable (unsigned int syllable) { matcher.set_syllable (syllable); }
michael@0 471 inline void set_match_func (matcher_t::match_func_t match_func,
michael@0 472 const void *match_data,
michael@0 473 const USHORT glyph_data[])
michael@0 474 {
michael@0 475 matcher.set_match_func (match_func, match_data);
michael@0 476 match_glyph_data = glyph_data;
michael@0 477 }
michael@0 478
michael@0 479 inline bool has_no_chance (void) const { return unlikely (idx < num_items); }
michael@0 480 inline void reject (void) { num_items++; }
michael@0 481 inline bool prev (void)
michael@0 482 {
michael@0 483 assert (num_items > 0);
michael@0 484 while (!has_no_chance ())
michael@0 485 {
michael@0 486 idx--;
michael@0 487 const hb_glyph_info_t &info = c->buffer->out_info[idx];
michael@0 488
michael@0 489 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
michael@0 490
michael@0 491 if (unlikely (skip == matcher_t::SKIP_YES))
michael@0 492 continue;
michael@0 493
michael@0 494 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
michael@0 495 if (match == matcher_t::MATCH_YES ||
michael@0 496 (match == matcher_t::MATCH_MAYBE &&
michael@0 497 skip == matcher_t::SKIP_NO))
michael@0 498 {
michael@0 499 num_items--;
michael@0 500 match_glyph_data++;
michael@0 501 return true;
michael@0 502 }
michael@0 503
michael@0 504 if (skip == matcher_t::SKIP_NO)
michael@0 505 return false;
michael@0 506 }
michael@0 507 return false;
michael@0 508 }
michael@0 509
michael@0 510 unsigned int idx;
michael@0 511 protected:
michael@0 512 hb_apply_context_t *c;
michael@0 513 matcher_t matcher;
michael@0 514 const USHORT *match_glyph_data;
michael@0 515
michael@0 516 unsigned int num_items;
michael@0 517 };
michael@0 518
michael@0 519 inline bool
michael@0 520 match_properties_mark (hb_codepoint_t glyph,
michael@0 521 unsigned int glyph_props,
michael@0 522 unsigned int lookup_props) const
michael@0 523 {
michael@0 524 /* If using mark filtering sets, the high short of
michael@0 525 * lookup_props has the set index.
michael@0 526 */
michael@0 527 if (lookup_props & LookupFlag::UseMarkFilteringSet)
michael@0 528 return gdef.mark_set_covers (lookup_props >> 16, glyph);
michael@0 529
michael@0 530 /* The second byte of lookup_props has the meaning
michael@0 531 * "ignore marks of attachment type different than
michael@0 532 * the attachment type specified."
michael@0 533 */
michael@0 534 if (lookup_props & LookupFlag::MarkAttachmentType)
michael@0 535 return (lookup_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
michael@0 536
michael@0 537 return true;
michael@0 538 }
michael@0 539
michael@0 540 inline bool
michael@0 541 match_properties (hb_codepoint_t glyph,
michael@0 542 unsigned int glyph_props,
michael@0 543 unsigned int lookup_props) const
michael@0 544 {
michael@0 545 /* Not covered, if, for example, glyph class is ligature and
michael@0 546 * lookup_props includes LookupFlags::IgnoreLigatures
michael@0 547 */
michael@0 548 if (glyph_props & lookup_props & LookupFlag::IgnoreFlags)
michael@0 549 return false;
michael@0 550
michael@0 551 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
michael@0 552 return match_properties_mark (glyph, glyph_props, lookup_props);
michael@0 553
michael@0 554 return true;
michael@0 555 }
michael@0 556
michael@0 557 inline bool
michael@0 558 check_glyph_property (hb_glyph_info_t *info,
michael@0 559 unsigned int lookup_props) const
michael@0 560 {
michael@0 561 unsigned int property;
michael@0 562
michael@0 563 property = _hb_glyph_info_get_glyph_props (info);
michael@0 564
michael@0 565 return match_properties (info->codepoint, property, lookup_props);
michael@0 566 }
michael@0 567
michael@0 568 inline void _set_glyph_props (hb_codepoint_t glyph_index,
michael@0 569 unsigned int class_guess = 0,
michael@0 570 bool ligature = false) const
michael@0 571 {
michael@0 572 unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) &
michael@0 573 HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
michael@0 574 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
michael@0 575 if (ligature)
michael@0 576 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
michael@0 577 if (likely (has_glyph_classes))
michael@0 578 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index));
michael@0 579 else if (class_guess)
michael@0 580 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess);
michael@0 581 }
michael@0 582
michael@0 583 inline void replace_glyph (hb_codepoint_t glyph_index) const
michael@0 584 {
michael@0 585 _set_glyph_props (glyph_index);
michael@0 586 buffer->replace_glyph (glyph_index);
michael@0 587 }
michael@0 588 inline void replace_glyph_inplace (hb_codepoint_t glyph_index) const
michael@0 589 {
michael@0 590 _set_glyph_props (glyph_index);
michael@0 591 buffer->cur().codepoint = glyph_index;
michael@0 592 }
michael@0 593 inline void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
michael@0 594 unsigned int class_guess) const
michael@0 595 {
michael@0 596 _set_glyph_props (glyph_index, class_guess, true);
michael@0 597 buffer->replace_glyph (glyph_index);
michael@0 598 }
michael@0 599 inline void output_glyph (hb_codepoint_t glyph_index,
michael@0 600 unsigned int class_guess) const
michael@0 601 {
michael@0 602 _set_glyph_props (glyph_index, class_guess);
michael@0 603 buffer->output_glyph (glyph_index);
michael@0 604 }
michael@0 605 };
michael@0 606
michael@0 607
michael@0 608
michael@0 609 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
michael@0 610 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
michael@0 611 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
michael@0 612
michael@0 613 struct ContextClosureFuncs
michael@0 614 {
michael@0 615 intersects_func_t intersects;
michael@0 616 };
michael@0 617 struct ContextCollectGlyphsFuncs
michael@0 618 {
michael@0 619 collect_glyphs_func_t collect;
michael@0 620 };
michael@0 621 struct ContextApplyFuncs
michael@0 622 {
michael@0 623 match_func_t match;
michael@0 624 };
michael@0 625
michael@0 626
michael@0 627 static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
michael@0 628 {
michael@0 629 return glyphs->has (value);
michael@0 630 }
michael@0 631 static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data)
michael@0 632 {
michael@0 633 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
michael@0 634 return class_def.intersects_class (glyphs, value);
michael@0 635 }
michael@0 636 static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
michael@0 637 {
michael@0 638 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
michael@0 639 return (data+coverage).intersects (glyphs);
michael@0 640 }
michael@0 641
michael@0 642 static inline bool intersects_array (hb_closure_context_t *c,
michael@0 643 unsigned int count,
michael@0 644 const USHORT values[],
michael@0 645 intersects_func_t intersects_func,
michael@0 646 const void *intersects_data)
michael@0 647 {
michael@0 648 for (unsigned int i = 0; i < count; i++)
michael@0 649 if (likely (!intersects_func (c->glyphs, values[i], intersects_data)))
michael@0 650 return false;
michael@0 651 return true;
michael@0 652 }
michael@0 653
michael@0 654
michael@0 655 static inline void collect_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
michael@0 656 {
michael@0 657 glyphs->add (value);
michael@0 658 }
michael@0 659 static inline void collect_class (hb_set_t *glyphs, const USHORT &value, const void *data)
michael@0 660 {
michael@0 661 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
michael@0 662 class_def.add_class (glyphs, value);
michael@0 663 }
michael@0 664 static inline void collect_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
michael@0 665 {
michael@0 666 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
michael@0 667 (data+coverage).add_coverage (glyphs);
michael@0 668 }
michael@0 669 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
michael@0 670 hb_set_t *glyphs,
michael@0 671 unsigned int count,
michael@0 672 const USHORT values[],
michael@0 673 collect_glyphs_func_t collect_func,
michael@0 674 const void *collect_data)
michael@0 675 {
michael@0 676 for (unsigned int i = 0; i < count; i++)
michael@0 677 collect_func (glyphs, values[i], collect_data);
michael@0 678 }
michael@0 679
michael@0 680
michael@0 681 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
michael@0 682 {
michael@0 683 return glyph_id == value;
michael@0 684 }
michael@0 685 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
michael@0 686 {
michael@0 687 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
michael@0 688 return class_def.get_class (glyph_id) == value;
michael@0 689 }
michael@0 690 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
michael@0 691 {
michael@0 692 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
michael@0 693 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
michael@0 694 }
michael@0 695
michael@0 696 static inline bool would_match_input (hb_would_apply_context_t *c,
michael@0 697 unsigned int count, /* Including the first glyph (not matched) */
michael@0 698 const USHORT input[], /* Array of input values--start with second glyph */
michael@0 699 match_func_t match_func,
michael@0 700 const void *match_data)
michael@0 701 {
michael@0 702 if (count != c->len)
michael@0 703 return false;
michael@0 704
michael@0 705 for (unsigned int i = 1; i < count; i++)
michael@0 706 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
michael@0 707 return false;
michael@0 708
michael@0 709 return true;
michael@0 710 }
michael@0 711 static inline bool match_input (hb_apply_context_t *c,
michael@0 712 unsigned int count, /* Including the first glyph (not matched) */
michael@0 713 const USHORT input[], /* Array of input values--start with second glyph */
michael@0 714 match_func_t match_func,
michael@0 715 const void *match_data,
michael@0 716 unsigned int *end_offset,
michael@0 717 unsigned int match_positions[MAX_CONTEXT_LENGTH],
michael@0 718 bool *p_is_mark_ligature = NULL,
michael@0 719 unsigned int *p_total_component_count = NULL)
michael@0 720 {
michael@0 721 TRACE_APPLY (NULL);
michael@0 722
michael@0 723 if (unlikely (count > MAX_CONTEXT_LENGTH)) TRACE_RETURN (false);
michael@0 724
michael@0 725 hb_buffer_t *buffer = c->buffer;
michael@0 726
michael@0 727 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, buffer->idx, count - 1);
michael@0 728 skippy_iter.set_match_func (match_func, match_data, input);
michael@0 729 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false);
michael@0 730
michael@0 731 /*
michael@0 732 * This is perhaps the trickiest part of OpenType... Remarks:
michael@0 733 *
michael@0 734 * - If all components of the ligature were marks, we call this a mark ligature.
michael@0 735 *
michael@0 736 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
michael@0 737 * it as a ligature glyph.
michael@0 738 *
michael@0 739 * - Ligatures cannot be formed across glyphs attached to different components
michael@0 740 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
michael@0 741 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
michael@0 742 * However, it would be wrong to ligate that SHADDA,FATHA sequence.o
michael@0 743 * There is an exception to this: If a ligature tries ligating with marks that
michael@0 744 * belong to it itself, go ahead, assuming that the font designer knows what
michael@0 745 * they are doing (otherwise it can break Indic stuff when a matra wants to
michael@0 746 * ligate with a conjunct...)
michael@0 747 */
michael@0 748
michael@0 749 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur());
michael@0 750
michael@0 751 unsigned int total_component_count = 0;
michael@0 752 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
michael@0 753
michael@0 754 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
michael@0 755 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
michael@0 756
michael@0 757 match_positions[0] = buffer->idx;
michael@0 758 for (unsigned int i = 1; i < count; i++)
michael@0 759 {
michael@0 760 if (!skippy_iter.next ()) return TRACE_RETURN (false);
michael@0 761
michael@0 762 match_positions[i] = skippy_iter.idx;
michael@0 763
michael@0 764 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
michael@0 765 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
michael@0 766
michael@0 767 if (first_lig_id && first_lig_comp) {
michael@0 768 /* If first component was attached to a previous ligature component,
michael@0 769 * all subsequent components should be attached to the same ligature
michael@0 770 * component, otherwise we shouldn't ligate them. */
michael@0 771 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
michael@0 772 return TRACE_RETURN (false);
michael@0 773 } else {
michael@0 774 /* If first component was NOT attached to a previous ligature component,
michael@0 775 * all subsequent components should also NOT be attached to any ligature
michael@0 776 * component, unless they are attached to the first component itself! */
michael@0 777 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
michael@0 778 return TRACE_RETURN (false);
michael@0 779 }
michael@0 780
michael@0 781 is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]);
michael@0 782 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
michael@0 783 }
michael@0 784
michael@0 785 *end_offset = skippy_iter.idx - buffer->idx + 1;
michael@0 786
michael@0 787 if (p_is_mark_ligature)
michael@0 788 *p_is_mark_ligature = is_mark_ligature;
michael@0 789
michael@0 790 if (p_total_component_count)
michael@0 791 *p_total_component_count = total_component_count;
michael@0 792
michael@0 793 return TRACE_RETURN (true);
michael@0 794 }
michael@0 795 static inline void ligate_input (hb_apply_context_t *c,
michael@0 796 unsigned int count, /* Including the first glyph */
michael@0 797 unsigned int match_positions[MAX_CONTEXT_LENGTH], /* Including the first glyph */
michael@0 798 unsigned int match_length,
michael@0 799 hb_codepoint_t lig_glyph,
michael@0 800 bool is_mark_ligature,
michael@0 801 unsigned int total_component_count)
michael@0 802 {
michael@0 803 TRACE_APPLY (NULL);
michael@0 804
michael@0 805 hb_buffer_t *buffer = c->buffer;
michael@0 806
michael@0 807 buffer->merge_clusters (buffer->idx, buffer->idx + match_length);
michael@0 808
michael@0 809 /*
michael@0 810 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave
michael@0 811 * the ligature to keep its old ligature id. This will allow it to attach to
michael@0 812 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
michael@0 813 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a
michael@0 814 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
michael@0 815 * later, we don't want them to lose their ligature id/component, otherwise
michael@0 816 * GPOS will fail to correctly position the mark ligature on top of the
michael@0 817 * LAM,LAM,HEH ligature. See:
michael@0 818 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
michael@0 819 *
michael@0 820 * - If a ligature is formed of components that some of which are also ligatures
michael@0 821 * themselves, and those ligature components had marks attached to *their*
michael@0 822 * components, we have to attach the marks to the new ligature component
michael@0 823 * positions! Now *that*'s tricky! And these marks may be following the
michael@0 824 * last component of the whole sequence, so we should loop forward looking
michael@0 825 * for them and update them.
michael@0 826 *
michael@0 827 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
michael@0 828 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
michael@0 829 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
michael@0 830 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
michael@0 831 * the new ligature with a component value of 2.
michael@0 832 *
michael@0 833 * This in fact happened to a font... See:
michael@0 834 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
michael@0 835 */
michael@0 836
michael@0 837 unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE;
michael@0 838 unsigned int lig_id = is_mark_ligature ? 0 : _hb_allocate_lig_id (buffer);
michael@0 839 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
michael@0 840 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
michael@0 841 unsigned int components_so_far = last_num_components;
michael@0 842
michael@0 843 if (!is_mark_ligature)
michael@0 844 {
michael@0 845 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
michael@0 846 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
michael@0 847 {
michael@0 848 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
michael@0 849 _hb_glyph_info_set_modified_combining_class (&buffer->cur(), 0);
michael@0 850 }
michael@0 851 }
michael@0 852 c->replace_glyph_with_ligature (lig_glyph, klass);
michael@0 853
michael@0 854 for (unsigned int i = 1; i < count; i++)
michael@0 855 {
michael@0 856 while (buffer->idx < match_positions[i])
michael@0 857 {
michael@0 858 if (!is_mark_ligature) {
michael@0 859 unsigned int new_lig_comp = components_so_far - last_num_components +
michael@0 860 MIN (MAX (_hb_glyph_info_get_lig_comp (&buffer->cur()), 1u), last_num_components);
michael@0 861 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
michael@0 862 }
michael@0 863 buffer->next_glyph ();
michael@0 864 }
michael@0 865
michael@0 866 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
michael@0 867 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
michael@0 868 components_so_far += last_num_components;
michael@0 869
michael@0 870 /* Skip the base glyph */
michael@0 871 buffer->idx++;
michael@0 872 }
michael@0 873
michael@0 874 if (!is_mark_ligature && last_lig_id) {
michael@0 875 /* Re-adjust components for any marks following. */
michael@0 876 for (unsigned int i = buffer->idx; i < buffer->len; i++) {
michael@0 877 if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) {
michael@0 878 unsigned int new_lig_comp = components_so_far - last_num_components +
michael@0 879 MIN (MAX (_hb_glyph_info_get_lig_comp (&buffer->info[i]), 1u), last_num_components);
michael@0 880 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
michael@0 881 } else
michael@0 882 break;
michael@0 883 }
michael@0 884 }
michael@0 885 }
michael@0 886
michael@0 887 static inline bool match_backtrack (hb_apply_context_t *c,
michael@0 888 unsigned int count,
michael@0 889 const USHORT backtrack[],
michael@0 890 match_func_t match_func,
michael@0 891 const void *match_data)
michael@0 892 {
michael@0 893 TRACE_APPLY (NULL);
michael@0 894
michael@0 895 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count, true);
michael@0 896 skippy_iter.set_match_func (match_func, match_data, backtrack);
michael@0 897 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false);
michael@0 898
michael@0 899 for (unsigned int i = 0; i < count; i++)
michael@0 900 if (!skippy_iter.prev ())
michael@0 901 return TRACE_RETURN (false);
michael@0 902
michael@0 903 return TRACE_RETURN (true);
michael@0 904 }
michael@0 905
michael@0 906 static inline bool match_lookahead (hb_apply_context_t *c,
michael@0 907 unsigned int count,
michael@0 908 const USHORT lookahead[],
michael@0 909 match_func_t match_func,
michael@0 910 const void *match_data,
michael@0 911 unsigned int offset)
michael@0 912 {
michael@0 913 TRACE_APPLY (NULL);
michael@0 914
michael@0 915 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count, true);
michael@0 916 skippy_iter.set_match_func (match_func, match_data, lookahead);
michael@0 917 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false);
michael@0 918
michael@0 919 for (unsigned int i = 0; i < count; i++)
michael@0 920 if (!skippy_iter.next ())
michael@0 921 return TRACE_RETURN (false);
michael@0 922
michael@0 923 return TRACE_RETURN (true);
michael@0 924 }
michael@0 925
michael@0 926
michael@0 927
michael@0 928 struct LookupRecord
michael@0 929 {
michael@0 930 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 931 TRACE_SANITIZE (this);
michael@0 932 return TRACE_RETURN (c->check_struct (this));
michael@0 933 }
michael@0 934
michael@0 935 USHORT sequenceIndex; /* Index into current glyph
michael@0 936 * sequence--first glyph = 0 */
michael@0 937 USHORT lookupListIndex; /* Lookup to apply to that
michael@0 938 * position--zero--based */
michael@0 939 public:
michael@0 940 DEFINE_SIZE_STATIC (4);
michael@0 941 };
michael@0 942
michael@0 943
michael@0 944 template <typename context_t>
michael@0 945 static inline void recurse_lookups (context_t *c,
michael@0 946 unsigned int lookupCount,
michael@0 947 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
michael@0 948 {
michael@0 949 for (unsigned int i = 0; i < lookupCount; i++)
michael@0 950 c->recurse (lookupRecord[i].lookupListIndex);
michael@0 951 }
michael@0 952
michael@0 953 static inline bool apply_lookup (hb_apply_context_t *c,
michael@0 954 unsigned int count, /* Including the first glyph */
michael@0 955 unsigned int match_positions[MAX_CONTEXT_LENGTH], /* Including the first glyph */
michael@0 956 unsigned int lookupCount,
michael@0 957 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
michael@0 958 unsigned int match_length)
michael@0 959 {
michael@0 960 TRACE_APPLY (NULL);
michael@0 961
michael@0 962 hb_buffer_t *buffer = c->buffer;
michael@0 963 unsigned int end;
michael@0 964
michael@0 965 /* All positions are distance from beginning of *output* buffer.
michael@0 966 * Adjust. */
michael@0 967 {
michael@0 968 unsigned int bl = buffer->backtrack_len ();
michael@0 969 end = bl + match_length;
michael@0 970
michael@0 971 int delta = bl - buffer->idx;
michael@0 972 /* Convert positions to new indexing. */
michael@0 973 for (unsigned int j = 0; j < count; j++)
michael@0 974 match_positions[j] += delta;
michael@0 975 }
michael@0 976
michael@0 977 for (unsigned int i = 0; i < lookupCount; i++)
michael@0 978 {
michael@0 979 unsigned int idx = lookupRecord[i].sequenceIndex;
michael@0 980 if (idx >= count)
michael@0 981 continue;
michael@0 982
michael@0 983 buffer->move_to (match_positions[idx]);
michael@0 984
michael@0 985 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
michael@0 986 if (!c->recurse (lookupRecord[i].lookupListIndex))
michael@0 987 continue;
michael@0 988
michael@0 989 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
michael@0 990 int delta = new_len - orig_len;
michael@0 991
michael@0 992 if (!delta)
michael@0 993 continue;
michael@0 994
michael@0 995 /* Recursed lookup changed buffer len. Adjust. */
michael@0 996
michael@0 997 /* end can't go back past the current match position. */
michael@0 998 end = MAX ((int) match_positions[idx] + 1, int (end) + delta);
michael@0 999
michael@0 1000 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
michael@0 1001
michael@0 1002 if (delta > 0)
michael@0 1003 {
michael@0 1004 if (unlikely (delta + count > MAX_CONTEXT_LENGTH))
michael@0 1005 break;
michael@0 1006 }
michael@0 1007 else
michael@0 1008 {
michael@0 1009 /* NOTE: delta is negative. */
michael@0 1010 delta = MAX (delta, (int) next - (int) count);
michael@0 1011 next -= delta;
michael@0 1012 }
michael@0 1013
michael@0 1014 /* Shift! */
michael@0 1015 memmove (match_positions + next + delta, match_positions + next,
michael@0 1016 (count - next) * sizeof (match_positions[0]));
michael@0 1017 next += delta;
michael@0 1018 count += delta;
michael@0 1019
michael@0 1020 /* Fill in new entries. */
michael@0 1021 for (unsigned int j = idx + 1; j < next; j++)
michael@0 1022 match_positions[j] = match_positions[j - 1] + 1;
michael@0 1023
michael@0 1024 /* And fixup the rest. */
michael@0 1025 for (; next < count; next++)
michael@0 1026 match_positions[next] += delta;
michael@0 1027 }
michael@0 1028
michael@0 1029 buffer->move_to (end);
michael@0 1030
michael@0 1031 return TRACE_RETURN (true);
michael@0 1032 }
michael@0 1033
michael@0 1034
michael@0 1035
michael@0 1036 /* Contextual lookups */
michael@0 1037
michael@0 1038 struct ContextClosureLookupContext
michael@0 1039 {
michael@0 1040 ContextClosureFuncs funcs;
michael@0 1041 const void *intersects_data;
michael@0 1042 };
michael@0 1043
michael@0 1044 struct ContextCollectGlyphsLookupContext
michael@0 1045 {
michael@0 1046 ContextCollectGlyphsFuncs funcs;
michael@0 1047 const void *collect_data;
michael@0 1048 };
michael@0 1049
michael@0 1050 struct ContextApplyLookupContext
michael@0 1051 {
michael@0 1052 ContextApplyFuncs funcs;
michael@0 1053 const void *match_data;
michael@0 1054 };
michael@0 1055
michael@0 1056 static inline void context_closure_lookup (hb_closure_context_t *c,
michael@0 1057 unsigned int inputCount, /* Including the first glyph (not matched) */
michael@0 1058 const USHORT input[], /* Array of input values--start with second glyph */
michael@0 1059 unsigned int lookupCount,
michael@0 1060 const LookupRecord lookupRecord[],
michael@0 1061 ContextClosureLookupContext &lookup_context)
michael@0 1062 {
michael@0 1063 if (intersects_array (c,
michael@0 1064 inputCount ? inputCount - 1 : 0, input,
michael@0 1065 lookup_context.funcs.intersects, lookup_context.intersects_data))
michael@0 1066 recurse_lookups (c,
michael@0 1067 lookupCount, lookupRecord);
michael@0 1068 }
michael@0 1069
michael@0 1070 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
michael@0 1071 unsigned int inputCount, /* Including the first glyph (not matched) */
michael@0 1072 const USHORT input[], /* Array of input values--start with second glyph */
michael@0 1073 unsigned int lookupCount,
michael@0 1074 const LookupRecord lookupRecord[],
michael@0 1075 ContextCollectGlyphsLookupContext &lookup_context)
michael@0 1076 {
michael@0 1077 collect_array (c, c->input,
michael@0 1078 inputCount ? inputCount - 1 : 0, input,
michael@0 1079 lookup_context.funcs.collect, lookup_context.collect_data);
michael@0 1080 recurse_lookups (c,
michael@0 1081 lookupCount, lookupRecord);
michael@0 1082 }
michael@0 1083
michael@0 1084 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
michael@0 1085 unsigned int inputCount, /* Including the first glyph (not matched) */
michael@0 1086 const USHORT input[], /* Array of input values--start with second glyph */
michael@0 1087 unsigned int lookupCount HB_UNUSED,
michael@0 1088 const LookupRecord lookupRecord[] HB_UNUSED,
michael@0 1089 ContextApplyLookupContext &lookup_context)
michael@0 1090 {
michael@0 1091 return would_match_input (c,
michael@0 1092 inputCount, input,
michael@0 1093 lookup_context.funcs.match, lookup_context.match_data);
michael@0 1094 }
michael@0 1095 static inline bool context_apply_lookup (hb_apply_context_t *c,
michael@0 1096 unsigned int inputCount, /* Including the first glyph (not matched) */
michael@0 1097 const USHORT input[], /* Array of input values--start with second glyph */
michael@0 1098 unsigned int lookupCount,
michael@0 1099 const LookupRecord lookupRecord[],
michael@0 1100 ContextApplyLookupContext &lookup_context)
michael@0 1101 {
michael@0 1102 unsigned int match_length = 0;
michael@0 1103 unsigned int match_positions[MAX_CONTEXT_LENGTH];
michael@0 1104 return match_input (c,
michael@0 1105 inputCount, input,
michael@0 1106 lookup_context.funcs.match, lookup_context.match_data,
michael@0 1107 &match_length, match_positions)
michael@0 1108 && apply_lookup (c,
michael@0 1109 inputCount, match_positions,
michael@0 1110 lookupCount, lookupRecord,
michael@0 1111 match_length);
michael@0 1112 }
michael@0 1113
michael@0 1114 struct Rule
michael@0 1115 {
michael@0 1116 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
michael@0 1117 {
michael@0 1118 TRACE_CLOSURE (this);
michael@0 1119 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
michael@0 1120 context_closure_lookup (c,
michael@0 1121 inputCount, input,
michael@0 1122 lookupCount, lookupRecord,
michael@0 1123 lookup_context);
michael@0 1124 }
michael@0 1125
michael@0 1126 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const
michael@0 1127 {
michael@0 1128 TRACE_COLLECT_GLYPHS (this);
michael@0 1129 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
michael@0 1130 context_collect_glyphs_lookup (c,
michael@0 1131 inputCount, input,
michael@0 1132 lookupCount, lookupRecord,
michael@0 1133 lookup_context);
michael@0 1134 }
michael@0 1135
michael@0 1136 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
michael@0 1137 {
michael@0 1138 TRACE_WOULD_APPLY (this);
michael@0 1139 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
michael@0 1140 return TRACE_RETURN (context_would_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context));
michael@0 1141 }
michael@0 1142
michael@0 1143 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
michael@0 1144 {
michael@0 1145 TRACE_APPLY (this);
michael@0 1146 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
michael@0 1147 return TRACE_RETURN (context_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context));
michael@0 1148 }
michael@0 1149
michael@0 1150 public:
michael@0 1151 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 1152 TRACE_SANITIZE (this);
michael@0 1153 return inputCount.sanitize (c)
michael@0 1154 && lookupCount.sanitize (c)
michael@0 1155 && c->check_range (input,
michael@0 1156 input[0].static_size * inputCount
michael@0 1157 + lookupRecordX[0].static_size * lookupCount);
michael@0 1158 }
michael@0 1159
michael@0 1160 protected:
michael@0 1161 USHORT inputCount; /* Total number of glyphs in input
michael@0 1162 * glyph sequence--includes the first
michael@0 1163 * glyph */
michael@0 1164 USHORT lookupCount; /* Number of LookupRecords */
michael@0 1165 USHORT input[VAR]; /* Array of match inputs--start with
michael@0 1166 * second glyph */
michael@0 1167 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
michael@0 1168 * design order */
michael@0 1169 public:
michael@0 1170 DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
michael@0 1171 };
michael@0 1172
michael@0 1173 struct RuleSet
michael@0 1174 {
michael@0 1175 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
michael@0 1176 {
michael@0 1177 TRACE_CLOSURE (this);
michael@0 1178 unsigned int num_rules = rule.len;
michael@0 1179 for (unsigned int i = 0; i < num_rules; i++)
michael@0 1180 (this+rule[i]).closure (c, lookup_context);
michael@0 1181 }
michael@0 1182
michael@0 1183 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const
michael@0 1184 {
michael@0 1185 TRACE_COLLECT_GLYPHS (this);
michael@0 1186 unsigned int num_rules = rule.len;
michael@0 1187 for (unsigned int i = 0; i < num_rules; i++)
michael@0 1188 (this+rule[i]).collect_glyphs (c, lookup_context);
michael@0 1189 }
michael@0 1190
michael@0 1191 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
michael@0 1192 {
michael@0 1193 TRACE_WOULD_APPLY (this);
michael@0 1194 unsigned int num_rules = rule.len;
michael@0 1195 for (unsigned int i = 0; i < num_rules; i++)
michael@0 1196 {
michael@0 1197 if ((this+rule[i]).would_apply (c, lookup_context))
michael@0 1198 return TRACE_RETURN (true);
michael@0 1199 }
michael@0 1200 return TRACE_RETURN (false);
michael@0 1201 }
michael@0 1202
michael@0 1203 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
michael@0 1204 {
michael@0 1205 TRACE_APPLY (this);
michael@0 1206 unsigned int num_rules = rule.len;
michael@0 1207 for (unsigned int i = 0; i < num_rules; i++)
michael@0 1208 {
michael@0 1209 if ((this+rule[i]).apply (c, lookup_context))
michael@0 1210 return TRACE_RETURN (true);
michael@0 1211 }
michael@0 1212 return TRACE_RETURN (false);
michael@0 1213 }
michael@0 1214
michael@0 1215 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 1216 TRACE_SANITIZE (this);
michael@0 1217 return TRACE_RETURN (rule.sanitize (c, this));
michael@0 1218 }
michael@0 1219
michael@0 1220 protected:
michael@0 1221 OffsetArrayOf<Rule>
michael@0 1222 rule; /* Array of Rule tables
michael@0 1223 * ordered by preference */
michael@0 1224 public:
michael@0 1225 DEFINE_SIZE_ARRAY (2, rule);
michael@0 1226 };
michael@0 1227
michael@0 1228
michael@0 1229 struct ContextFormat1
michael@0 1230 {
michael@0 1231 inline void closure (hb_closure_context_t *c) const
michael@0 1232 {
michael@0 1233 TRACE_CLOSURE (this);
michael@0 1234
michael@0 1235 const Coverage &cov = (this+coverage);
michael@0 1236
michael@0 1237 struct ContextClosureLookupContext lookup_context = {
michael@0 1238 {intersects_glyph},
michael@0 1239 NULL
michael@0 1240 };
michael@0 1241
michael@0 1242 unsigned int count = ruleSet.len;
michael@0 1243 for (unsigned int i = 0; i < count; i++)
michael@0 1244 if (cov.intersects_coverage (c->glyphs, i)) {
michael@0 1245 const RuleSet &rule_set = this+ruleSet[i];
michael@0 1246 rule_set.closure (c, lookup_context);
michael@0 1247 }
michael@0 1248 }
michael@0 1249
michael@0 1250 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
michael@0 1251 {
michael@0 1252 TRACE_COLLECT_GLYPHS (this);
michael@0 1253 (this+coverage).add_coverage (c->input);
michael@0 1254
michael@0 1255 struct ContextCollectGlyphsLookupContext lookup_context = {
michael@0 1256 {collect_glyph},
michael@0 1257 NULL
michael@0 1258 };
michael@0 1259
michael@0 1260 unsigned int count = ruleSet.len;
michael@0 1261 for (unsigned int i = 0; i < count; i++)
michael@0 1262 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
michael@0 1263 }
michael@0 1264
michael@0 1265 inline bool would_apply (hb_would_apply_context_t *c) const
michael@0 1266 {
michael@0 1267 TRACE_WOULD_APPLY (this);
michael@0 1268
michael@0 1269 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
michael@0 1270 struct ContextApplyLookupContext lookup_context = {
michael@0 1271 {match_glyph},
michael@0 1272 NULL
michael@0 1273 };
michael@0 1274 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
michael@0 1275 }
michael@0 1276
michael@0 1277 inline const Coverage &get_coverage (void) const
michael@0 1278 {
michael@0 1279 return this+coverage;
michael@0 1280 }
michael@0 1281
michael@0 1282 inline bool apply (hb_apply_context_t *c) const
michael@0 1283 {
michael@0 1284 TRACE_APPLY (this);
michael@0 1285 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
michael@0 1286 if (likely (index == NOT_COVERED))
michael@0 1287 return TRACE_RETURN (false);
michael@0 1288
michael@0 1289 const RuleSet &rule_set = this+ruleSet[index];
michael@0 1290 struct ContextApplyLookupContext lookup_context = {
michael@0 1291 {match_glyph},
michael@0 1292 NULL
michael@0 1293 };
michael@0 1294 return TRACE_RETURN (rule_set.apply (c, lookup_context));
michael@0 1295 }
michael@0 1296
michael@0 1297 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 1298 TRACE_SANITIZE (this);
michael@0 1299 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
michael@0 1300 }
michael@0 1301
michael@0 1302 protected:
michael@0 1303 USHORT format; /* Format identifier--format = 1 */
michael@0 1304 OffsetTo<Coverage>
michael@0 1305 coverage; /* Offset to Coverage table--from
michael@0 1306 * beginning of table */
michael@0 1307 OffsetArrayOf<RuleSet>
michael@0 1308 ruleSet; /* Array of RuleSet tables
michael@0 1309 * ordered by Coverage Index */
michael@0 1310 public:
michael@0 1311 DEFINE_SIZE_ARRAY (6, ruleSet);
michael@0 1312 };
michael@0 1313
michael@0 1314
michael@0 1315 struct ContextFormat2
michael@0 1316 {
michael@0 1317 inline void closure (hb_closure_context_t *c) const
michael@0 1318 {
michael@0 1319 TRACE_CLOSURE (this);
michael@0 1320 if (!(this+coverage).intersects (c->glyphs))
michael@0 1321 return;
michael@0 1322
michael@0 1323 const ClassDef &class_def = this+classDef;
michael@0 1324
michael@0 1325 struct ContextClosureLookupContext lookup_context = {
michael@0 1326 {intersects_class},
michael@0 1327 &class_def
michael@0 1328 };
michael@0 1329
michael@0 1330 unsigned int count = ruleSet.len;
michael@0 1331 for (unsigned int i = 0; i < count; i++)
michael@0 1332 if (class_def.intersects_class (c->glyphs, i)) {
michael@0 1333 const RuleSet &rule_set = this+ruleSet[i];
michael@0 1334 rule_set.closure (c, lookup_context);
michael@0 1335 }
michael@0 1336 }
michael@0 1337
michael@0 1338 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
michael@0 1339 {
michael@0 1340 TRACE_COLLECT_GLYPHS (this);
michael@0 1341 (this+coverage).add_coverage (c->input);
michael@0 1342
michael@0 1343 const ClassDef &class_def = this+classDef;
michael@0 1344 struct ContextCollectGlyphsLookupContext lookup_context = {
michael@0 1345 {collect_class},
michael@0 1346 &class_def
michael@0 1347 };
michael@0 1348
michael@0 1349 unsigned int count = ruleSet.len;
michael@0 1350 for (unsigned int i = 0; i < count; i++)
michael@0 1351 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
michael@0 1352 }
michael@0 1353
michael@0 1354 inline bool would_apply (hb_would_apply_context_t *c) const
michael@0 1355 {
michael@0 1356 TRACE_WOULD_APPLY (this);
michael@0 1357
michael@0 1358 const ClassDef &class_def = this+classDef;
michael@0 1359 unsigned int index = class_def.get_class (c->glyphs[0]);
michael@0 1360 const RuleSet &rule_set = this+ruleSet[index];
michael@0 1361 struct ContextApplyLookupContext lookup_context = {
michael@0 1362 {match_class},
michael@0 1363 &class_def
michael@0 1364 };
michael@0 1365 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
michael@0 1366 }
michael@0 1367
michael@0 1368 inline const Coverage &get_coverage (void) const
michael@0 1369 {
michael@0 1370 return this+coverage;
michael@0 1371 }
michael@0 1372
michael@0 1373 inline bool apply (hb_apply_context_t *c) const
michael@0 1374 {
michael@0 1375 TRACE_APPLY (this);
michael@0 1376 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
michael@0 1377 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
michael@0 1378
michael@0 1379 const ClassDef &class_def = this+classDef;
michael@0 1380 index = class_def.get_class (c->buffer->cur().codepoint);
michael@0 1381 const RuleSet &rule_set = this+ruleSet[index];
michael@0 1382 struct ContextApplyLookupContext lookup_context = {
michael@0 1383 {match_class},
michael@0 1384 &class_def
michael@0 1385 };
michael@0 1386 return TRACE_RETURN (rule_set.apply (c, lookup_context));
michael@0 1387 }
michael@0 1388
michael@0 1389 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 1390 TRACE_SANITIZE (this);
michael@0 1391 return TRACE_RETURN (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
michael@0 1392 }
michael@0 1393
michael@0 1394 protected:
michael@0 1395 USHORT format; /* Format identifier--format = 2 */
michael@0 1396 OffsetTo<Coverage>
michael@0 1397 coverage; /* Offset to Coverage table--from
michael@0 1398 * beginning of table */
michael@0 1399 OffsetTo<ClassDef>
michael@0 1400 classDef; /* Offset to glyph ClassDef table--from
michael@0 1401 * beginning of table */
michael@0 1402 OffsetArrayOf<RuleSet>
michael@0 1403 ruleSet; /* Array of RuleSet tables
michael@0 1404 * ordered by class */
michael@0 1405 public:
michael@0 1406 DEFINE_SIZE_ARRAY (8, ruleSet);
michael@0 1407 };
michael@0 1408
michael@0 1409
michael@0 1410 struct ContextFormat3
michael@0 1411 {
michael@0 1412 inline void closure (hb_closure_context_t *c) const
michael@0 1413 {
michael@0 1414 TRACE_CLOSURE (this);
michael@0 1415 if (!(this+coverage[0]).intersects (c->glyphs))
michael@0 1416 return;
michael@0 1417
michael@0 1418 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
michael@0 1419 struct ContextClosureLookupContext lookup_context = {
michael@0 1420 {intersects_coverage},
michael@0 1421 this
michael@0 1422 };
michael@0 1423 context_closure_lookup (c,
michael@0 1424 glyphCount, (const USHORT *) (coverage + 1),
michael@0 1425 lookupCount, lookupRecord,
michael@0 1426 lookup_context);
michael@0 1427 }
michael@0 1428
michael@0 1429 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
michael@0 1430 {
michael@0 1431 TRACE_COLLECT_GLYPHS (this);
michael@0 1432 (this+coverage[0]).add_coverage (c->input);
michael@0 1433
michael@0 1434 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
michael@0 1435 struct ContextCollectGlyphsLookupContext lookup_context = {
michael@0 1436 {collect_coverage},
michael@0 1437 this
michael@0 1438 };
michael@0 1439
michael@0 1440 context_collect_glyphs_lookup (c,
michael@0 1441 glyphCount, (const USHORT *) (coverage + 1),
michael@0 1442 lookupCount, lookupRecord,
michael@0 1443 lookup_context);
michael@0 1444 }
michael@0 1445
michael@0 1446 inline bool would_apply (hb_would_apply_context_t *c) const
michael@0 1447 {
michael@0 1448 TRACE_WOULD_APPLY (this);
michael@0 1449
michael@0 1450 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
michael@0 1451 struct ContextApplyLookupContext lookup_context = {
michael@0 1452 {match_coverage},
michael@0 1453 this
michael@0 1454 };
michael@0 1455 return TRACE_RETURN (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context));
michael@0 1456 }
michael@0 1457
michael@0 1458 inline const Coverage &get_coverage (void) const
michael@0 1459 {
michael@0 1460 return this+coverage[0];
michael@0 1461 }
michael@0 1462
michael@0 1463 inline bool apply (hb_apply_context_t *c) const
michael@0 1464 {
michael@0 1465 TRACE_APPLY (this);
michael@0 1466 unsigned int index = (this+coverage[0]).get_coverage (c->buffer->cur().codepoint);
michael@0 1467 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
michael@0 1468
michael@0 1469 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
michael@0 1470 struct ContextApplyLookupContext lookup_context = {
michael@0 1471 {match_coverage},
michael@0 1472 this
michael@0 1473 };
michael@0 1474 return TRACE_RETURN (context_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context));
michael@0 1475 }
michael@0 1476
michael@0 1477 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 1478 TRACE_SANITIZE (this);
michael@0 1479 if (!c->check_struct (this)) return TRACE_RETURN (false);
michael@0 1480 unsigned int count = glyphCount;
michael@0 1481 if (!c->check_array (coverage, coverage[0].static_size, count)) return TRACE_RETURN (false);
michael@0 1482 for (unsigned int i = 0; i < count; i++)
michael@0 1483 if (!coverage[i].sanitize (c, this)) return TRACE_RETURN (false);
michael@0 1484 LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
michael@0 1485 return TRACE_RETURN (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount));
michael@0 1486 }
michael@0 1487
michael@0 1488 protected:
michael@0 1489 USHORT format; /* Format identifier--format = 3 */
michael@0 1490 USHORT glyphCount; /* Number of glyphs in the input glyph
michael@0 1491 * sequence */
michael@0 1492 USHORT lookupCount; /* Number of LookupRecords */
michael@0 1493 OffsetTo<Coverage>
michael@0 1494 coverage[VAR]; /* Array of offsets to Coverage
michael@0 1495 * table in glyph sequence order */
michael@0 1496 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
michael@0 1497 * design order */
michael@0 1498 public:
michael@0 1499 DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
michael@0 1500 };
michael@0 1501
michael@0 1502 struct Context
michael@0 1503 {
michael@0 1504 template <typename context_t>
michael@0 1505 inline typename context_t::return_t dispatch (context_t *c) const
michael@0 1506 {
michael@0 1507 TRACE_DISPATCH (this);
michael@0 1508 switch (u.format) {
michael@0 1509 case 1: return TRACE_RETURN (c->dispatch (u.format1));
michael@0 1510 case 2: return TRACE_RETURN (c->dispatch (u.format2));
michael@0 1511 case 3: return TRACE_RETURN (c->dispatch (u.format3));
michael@0 1512 default:return TRACE_RETURN (c->default_return_value ());
michael@0 1513 }
michael@0 1514 }
michael@0 1515
michael@0 1516 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 1517 TRACE_SANITIZE (this);
michael@0 1518 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
michael@0 1519 switch (u.format) {
michael@0 1520 case 1: return TRACE_RETURN (u.format1.sanitize (c));
michael@0 1521 case 2: return TRACE_RETURN (u.format2.sanitize (c));
michael@0 1522 case 3: return TRACE_RETURN (u.format3.sanitize (c));
michael@0 1523 default:return TRACE_RETURN (true);
michael@0 1524 }
michael@0 1525 }
michael@0 1526
michael@0 1527 protected:
michael@0 1528 union {
michael@0 1529 USHORT format; /* Format identifier */
michael@0 1530 ContextFormat1 format1;
michael@0 1531 ContextFormat2 format2;
michael@0 1532 ContextFormat3 format3;
michael@0 1533 } u;
michael@0 1534 };
michael@0 1535
michael@0 1536
michael@0 1537 /* Chaining Contextual lookups */
michael@0 1538
michael@0 1539 struct ChainContextClosureLookupContext
michael@0 1540 {
michael@0 1541 ContextClosureFuncs funcs;
michael@0 1542 const void *intersects_data[3];
michael@0 1543 };
michael@0 1544
michael@0 1545 struct ChainContextCollectGlyphsLookupContext
michael@0 1546 {
michael@0 1547 ContextCollectGlyphsFuncs funcs;
michael@0 1548 const void *collect_data[3];
michael@0 1549 };
michael@0 1550
michael@0 1551 struct ChainContextApplyLookupContext
michael@0 1552 {
michael@0 1553 ContextApplyFuncs funcs;
michael@0 1554 const void *match_data[3];
michael@0 1555 };
michael@0 1556
michael@0 1557 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
michael@0 1558 unsigned int backtrackCount,
michael@0 1559 const USHORT backtrack[],
michael@0 1560 unsigned int inputCount, /* Including the first glyph (not matched) */
michael@0 1561 const USHORT input[], /* Array of input values--start with second glyph */
michael@0 1562 unsigned int lookaheadCount,
michael@0 1563 const USHORT lookahead[],
michael@0 1564 unsigned int lookupCount,
michael@0 1565 const LookupRecord lookupRecord[],
michael@0 1566 ChainContextClosureLookupContext &lookup_context)
michael@0 1567 {
michael@0 1568 if (intersects_array (c,
michael@0 1569 backtrackCount, backtrack,
michael@0 1570 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
michael@0 1571 && intersects_array (c,
michael@0 1572 inputCount ? inputCount - 1 : 0, input,
michael@0 1573 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
michael@0 1574 && intersects_array (c,
michael@0 1575 lookaheadCount, lookahead,
michael@0 1576 lookup_context.funcs.intersects, lookup_context.intersects_data[2]))
michael@0 1577 recurse_lookups (c,
michael@0 1578 lookupCount, lookupRecord);
michael@0 1579 }
michael@0 1580
michael@0 1581 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
michael@0 1582 unsigned int backtrackCount,
michael@0 1583 const USHORT backtrack[],
michael@0 1584 unsigned int inputCount, /* Including the first glyph (not matched) */
michael@0 1585 const USHORT input[], /* Array of input values--start with second glyph */
michael@0 1586 unsigned int lookaheadCount,
michael@0 1587 const USHORT lookahead[],
michael@0 1588 unsigned int lookupCount,
michael@0 1589 const LookupRecord lookupRecord[],
michael@0 1590 ChainContextCollectGlyphsLookupContext &lookup_context)
michael@0 1591 {
michael@0 1592 collect_array (c, c->before,
michael@0 1593 backtrackCount, backtrack,
michael@0 1594 lookup_context.funcs.collect, lookup_context.collect_data[0]);
michael@0 1595 collect_array (c, c->input,
michael@0 1596 inputCount ? inputCount - 1 : 0, input,
michael@0 1597 lookup_context.funcs.collect, lookup_context.collect_data[1]);
michael@0 1598 collect_array (c, c->after,
michael@0 1599 lookaheadCount, lookahead,
michael@0 1600 lookup_context.funcs.collect, lookup_context.collect_data[2]);
michael@0 1601 recurse_lookups (c,
michael@0 1602 lookupCount, lookupRecord);
michael@0 1603 }
michael@0 1604
michael@0 1605 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
michael@0 1606 unsigned int backtrackCount,
michael@0 1607 const USHORT backtrack[] HB_UNUSED,
michael@0 1608 unsigned int inputCount, /* Including the first glyph (not matched) */
michael@0 1609 const USHORT input[], /* Array of input values--start with second glyph */
michael@0 1610 unsigned int lookaheadCount,
michael@0 1611 const USHORT lookahead[] HB_UNUSED,
michael@0 1612 unsigned int lookupCount HB_UNUSED,
michael@0 1613 const LookupRecord lookupRecord[] HB_UNUSED,
michael@0 1614 ChainContextApplyLookupContext &lookup_context)
michael@0 1615 {
michael@0 1616 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
michael@0 1617 && would_match_input (c,
michael@0 1618 inputCount, input,
michael@0 1619 lookup_context.funcs.match, lookup_context.match_data[1]);
michael@0 1620 }
michael@0 1621
michael@0 1622 static inline bool chain_context_apply_lookup (hb_apply_context_t *c,
michael@0 1623 unsigned int backtrackCount,
michael@0 1624 const USHORT backtrack[],
michael@0 1625 unsigned int inputCount, /* Including the first glyph (not matched) */
michael@0 1626 const USHORT input[], /* Array of input values--start with second glyph */
michael@0 1627 unsigned int lookaheadCount,
michael@0 1628 const USHORT lookahead[],
michael@0 1629 unsigned int lookupCount,
michael@0 1630 const LookupRecord lookupRecord[],
michael@0 1631 ChainContextApplyLookupContext &lookup_context)
michael@0 1632 {
michael@0 1633 unsigned int match_length = 0;
michael@0 1634 unsigned int match_positions[MAX_CONTEXT_LENGTH];
michael@0 1635 return match_input (c,
michael@0 1636 inputCount, input,
michael@0 1637 lookup_context.funcs.match, lookup_context.match_data[1],
michael@0 1638 &match_length, match_positions)
michael@0 1639 && match_backtrack (c,
michael@0 1640 backtrackCount, backtrack,
michael@0 1641 lookup_context.funcs.match, lookup_context.match_data[0])
michael@0 1642 && match_lookahead (c,
michael@0 1643 lookaheadCount, lookahead,
michael@0 1644 lookup_context.funcs.match, lookup_context.match_data[2],
michael@0 1645 match_length)
michael@0 1646 && apply_lookup (c,
michael@0 1647 inputCount, match_positions,
michael@0 1648 lookupCount, lookupRecord,
michael@0 1649 match_length);
michael@0 1650 }
michael@0 1651
michael@0 1652 struct ChainRule
michael@0 1653 {
michael@0 1654 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
michael@0 1655 {
michael@0 1656 TRACE_CLOSURE (this);
michael@0 1657 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
michael@0 1658 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
michael@0 1659 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
michael@0 1660 chain_context_closure_lookup (c,
michael@0 1661 backtrack.len, backtrack.array,
michael@0 1662 input.len, input.array,
michael@0 1663 lookahead.len, lookahead.array,
michael@0 1664 lookup.len, lookup.array,
michael@0 1665 lookup_context);
michael@0 1666 }
michael@0 1667
michael@0 1668 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
michael@0 1669 {
michael@0 1670 TRACE_COLLECT_GLYPHS (this);
michael@0 1671 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
michael@0 1672 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
michael@0 1673 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
michael@0 1674 chain_context_collect_glyphs_lookup (c,
michael@0 1675 backtrack.len, backtrack.array,
michael@0 1676 input.len, input.array,
michael@0 1677 lookahead.len, lookahead.array,
michael@0 1678 lookup.len, lookup.array,
michael@0 1679 lookup_context);
michael@0 1680 }
michael@0 1681
michael@0 1682 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
michael@0 1683 {
michael@0 1684 TRACE_WOULD_APPLY (this);
michael@0 1685 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
michael@0 1686 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
michael@0 1687 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
michael@0 1688 return TRACE_RETURN (chain_context_would_apply_lookup (c,
michael@0 1689 backtrack.len, backtrack.array,
michael@0 1690 input.len, input.array,
michael@0 1691 lookahead.len, lookahead.array, lookup.len,
michael@0 1692 lookup.array, lookup_context));
michael@0 1693 }
michael@0 1694
michael@0 1695 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
michael@0 1696 {
michael@0 1697 TRACE_APPLY (this);
michael@0 1698 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
michael@0 1699 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
michael@0 1700 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
michael@0 1701 return TRACE_RETURN (chain_context_apply_lookup (c,
michael@0 1702 backtrack.len, backtrack.array,
michael@0 1703 input.len, input.array,
michael@0 1704 lookahead.len, lookahead.array, lookup.len,
michael@0 1705 lookup.array, lookup_context));
michael@0 1706 }
michael@0 1707
michael@0 1708 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 1709 TRACE_SANITIZE (this);
michael@0 1710 if (!backtrack.sanitize (c)) return TRACE_RETURN (false);
michael@0 1711 HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
michael@0 1712 if (!input.sanitize (c)) return TRACE_RETURN (false);
michael@0 1713 ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
michael@0 1714 if (!lookahead.sanitize (c)) return TRACE_RETURN (false);
michael@0 1715 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
michael@0 1716 return TRACE_RETURN (lookup.sanitize (c));
michael@0 1717 }
michael@0 1718
michael@0 1719 protected:
michael@0 1720 ArrayOf<USHORT>
michael@0 1721 backtrack; /* Array of backtracking values
michael@0 1722 * (to be matched before the input
michael@0 1723 * sequence) */
michael@0 1724 HeadlessArrayOf<USHORT>
michael@0 1725 inputX; /* Array of input values (start with
michael@0 1726 * second glyph) */
michael@0 1727 ArrayOf<USHORT>
michael@0 1728 lookaheadX; /* Array of lookahead values's (to be
michael@0 1729 * matched after the input sequence) */
michael@0 1730 ArrayOf<LookupRecord>
michael@0 1731 lookupX; /* Array of LookupRecords--in
michael@0 1732 * design order) */
michael@0 1733 public:
michael@0 1734 DEFINE_SIZE_MIN (8);
michael@0 1735 };
michael@0 1736
michael@0 1737 struct ChainRuleSet
michael@0 1738 {
michael@0 1739 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
michael@0 1740 {
michael@0 1741 TRACE_CLOSURE (this);
michael@0 1742 unsigned int num_rules = rule.len;
michael@0 1743 for (unsigned int i = 0; i < num_rules; i++)
michael@0 1744 (this+rule[i]).closure (c, lookup_context);
michael@0 1745 }
michael@0 1746
michael@0 1747 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
michael@0 1748 {
michael@0 1749 TRACE_COLLECT_GLYPHS (this);
michael@0 1750 unsigned int num_rules = rule.len;
michael@0 1751 for (unsigned int i = 0; i < num_rules; i++)
michael@0 1752 (this+rule[i]).collect_glyphs (c, lookup_context);
michael@0 1753 }
michael@0 1754
michael@0 1755 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
michael@0 1756 {
michael@0 1757 TRACE_WOULD_APPLY (this);
michael@0 1758 unsigned int num_rules = rule.len;
michael@0 1759 for (unsigned int i = 0; i < num_rules; i++)
michael@0 1760 if ((this+rule[i]).would_apply (c, lookup_context))
michael@0 1761 return TRACE_RETURN (true);
michael@0 1762
michael@0 1763 return TRACE_RETURN (false);
michael@0 1764 }
michael@0 1765
michael@0 1766 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
michael@0 1767 {
michael@0 1768 TRACE_APPLY (this);
michael@0 1769 unsigned int num_rules = rule.len;
michael@0 1770 for (unsigned int i = 0; i < num_rules; i++)
michael@0 1771 if ((this+rule[i]).apply (c, lookup_context))
michael@0 1772 return TRACE_RETURN (true);
michael@0 1773
michael@0 1774 return TRACE_RETURN (false);
michael@0 1775 }
michael@0 1776
michael@0 1777 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 1778 TRACE_SANITIZE (this);
michael@0 1779 return TRACE_RETURN (rule.sanitize (c, this));
michael@0 1780 }
michael@0 1781
michael@0 1782 protected:
michael@0 1783 OffsetArrayOf<ChainRule>
michael@0 1784 rule; /* Array of ChainRule tables
michael@0 1785 * ordered by preference */
michael@0 1786 public:
michael@0 1787 DEFINE_SIZE_ARRAY (2, rule);
michael@0 1788 };
michael@0 1789
michael@0 1790 struct ChainContextFormat1
michael@0 1791 {
michael@0 1792 inline void closure (hb_closure_context_t *c) const
michael@0 1793 {
michael@0 1794 TRACE_CLOSURE (this);
michael@0 1795 const Coverage &cov = (this+coverage);
michael@0 1796
michael@0 1797 struct ChainContextClosureLookupContext lookup_context = {
michael@0 1798 {intersects_glyph},
michael@0 1799 {NULL, NULL, NULL}
michael@0 1800 };
michael@0 1801
michael@0 1802 unsigned int count = ruleSet.len;
michael@0 1803 for (unsigned int i = 0; i < count; i++)
michael@0 1804 if (cov.intersects_coverage (c->glyphs, i)) {
michael@0 1805 const ChainRuleSet &rule_set = this+ruleSet[i];
michael@0 1806 rule_set.closure (c, lookup_context);
michael@0 1807 }
michael@0 1808 }
michael@0 1809
michael@0 1810 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
michael@0 1811 {
michael@0 1812 TRACE_COLLECT_GLYPHS (this);
michael@0 1813 (this+coverage).add_coverage (c->input);
michael@0 1814
michael@0 1815 struct ChainContextCollectGlyphsLookupContext lookup_context = {
michael@0 1816 {collect_glyph},
michael@0 1817 {NULL, NULL, NULL}
michael@0 1818 };
michael@0 1819
michael@0 1820 unsigned int count = ruleSet.len;
michael@0 1821 for (unsigned int i = 0; i < count; i++)
michael@0 1822 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
michael@0 1823 }
michael@0 1824
michael@0 1825 inline bool would_apply (hb_would_apply_context_t *c) const
michael@0 1826 {
michael@0 1827 TRACE_WOULD_APPLY (this);
michael@0 1828
michael@0 1829 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
michael@0 1830 struct ChainContextApplyLookupContext lookup_context = {
michael@0 1831 {match_glyph},
michael@0 1832 {NULL, NULL, NULL}
michael@0 1833 };
michael@0 1834 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
michael@0 1835 }
michael@0 1836
michael@0 1837 inline const Coverage &get_coverage (void) const
michael@0 1838 {
michael@0 1839 return this+coverage;
michael@0 1840 }
michael@0 1841
michael@0 1842 inline bool apply (hb_apply_context_t *c) const
michael@0 1843 {
michael@0 1844 TRACE_APPLY (this);
michael@0 1845 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
michael@0 1846 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
michael@0 1847
michael@0 1848 const ChainRuleSet &rule_set = this+ruleSet[index];
michael@0 1849 struct ChainContextApplyLookupContext lookup_context = {
michael@0 1850 {match_glyph},
michael@0 1851 {NULL, NULL, NULL}
michael@0 1852 };
michael@0 1853 return TRACE_RETURN (rule_set.apply (c, lookup_context));
michael@0 1854 }
michael@0 1855
michael@0 1856 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 1857 TRACE_SANITIZE (this);
michael@0 1858 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
michael@0 1859 }
michael@0 1860
michael@0 1861 protected:
michael@0 1862 USHORT format; /* Format identifier--format = 1 */
michael@0 1863 OffsetTo<Coverage>
michael@0 1864 coverage; /* Offset to Coverage table--from
michael@0 1865 * beginning of table */
michael@0 1866 OffsetArrayOf<ChainRuleSet>
michael@0 1867 ruleSet; /* Array of ChainRuleSet tables
michael@0 1868 * ordered by Coverage Index */
michael@0 1869 public:
michael@0 1870 DEFINE_SIZE_ARRAY (6, ruleSet);
michael@0 1871 };
michael@0 1872
michael@0 1873 struct ChainContextFormat2
michael@0 1874 {
michael@0 1875 inline void closure (hb_closure_context_t *c) const
michael@0 1876 {
michael@0 1877 TRACE_CLOSURE (this);
michael@0 1878 if (!(this+coverage).intersects (c->glyphs))
michael@0 1879 return;
michael@0 1880
michael@0 1881 const ClassDef &backtrack_class_def = this+backtrackClassDef;
michael@0 1882 const ClassDef &input_class_def = this+inputClassDef;
michael@0 1883 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
michael@0 1884
michael@0 1885 struct ChainContextClosureLookupContext lookup_context = {
michael@0 1886 {intersects_class},
michael@0 1887 {&backtrack_class_def,
michael@0 1888 &input_class_def,
michael@0 1889 &lookahead_class_def}
michael@0 1890 };
michael@0 1891
michael@0 1892 unsigned int count = ruleSet.len;
michael@0 1893 for (unsigned int i = 0; i < count; i++)
michael@0 1894 if (input_class_def.intersects_class (c->glyphs, i)) {
michael@0 1895 const ChainRuleSet &rule_set = this+ruleSet[i];
michael@0 1896 rule_set.closure (c, lookup_context);
michael@0 1897 }
michael@0 1898 }
michael@0 1899
michael@0 1900 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
michael@0 1901 {
michael@0 1902 TRACE_COLLECT_GLYPHS (this);
michael@0 1903 (this+coverage).add_coverage (c->input);
michael@0 1904
michael@0 1905 const ClassDef &backtrack_class_def = this+backtrackClassDef;
michael@0 1906 const ClassDef &input_class_def = this+inputClassDef;
michael@0 1907 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
michael@0 1908
michael@0 1909 struct ChainContextCollectGlyphsLookupContext lookup_context = {
michael@0 1910 {collect_class},
michael@0 1911 {&backtrack_class_def,
michael@0 1912 &input_class_def,
michael@0 1913 &lookahead_class_def}
michael@0 1914 };
michael@0 1915
michael@0 1916 unsigned int count = ruleSet.len;
michael@0 1917 for (unsigned int i = 0; i < count; i++)
michael@0 1918 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
michael@0 1919 }
michael@0 1920
michael@0 1921 inline bool would_apply (hb_would_apply_context_t *c) const
michael@0 1922 {
michael@0 1923 TRACE_WOULD_APPLY (this);
michael@0 1924
michael@0 1925 const ClassDef &backtrack_class_def = this+backtrackClassDef;
michael@0 1926 const ClassDef &input_class_def = this+inputClassDef;
michael@0 1927 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
michael@0 1928
michael@0 1929 unsigned int index = input_class_def.get_class (c->glyphs[0]);
michael@0 1930 const ChainRuleSet &rule_set = this+ruleSet[index];
michael@0 1931 struct ChainContextApplyLookupContext lookup_context = {
michael@0 1932 {match_class},
michael@0 1933 {&backtrack_class_def,
michael@0 1934 &input_class_def,
michael@0 1935 &lookahead_class_def}
michael@0 1936 };
michael@0 1937 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
michael@0 1938 }
michael@0 1939
michael@0 1940 inline const Coverage &get_coverage (void) const
michael@0 1941 {
michael@0 1942 return this+coverage;
michael@0 1943 }
michael@0 1944
michael@0 1945 inline bool apply (hb_apply_context_t *c) const
michael@0 1946 {
michael@0 1947 TRACE_APPLY (this);
michael@0 1948 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
michael@0 1949 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
michael@0 1950
michael@0 1951 const ClassDef &backtrack_class_def = this+backtrackClassDef;
michael@0 1952 const ClassDef &input_class_def = this+inputClassDef;
michael@0 1953 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
michael@0 1954
michael@0 1955 index = input_class_def.get_class (c->buffer->cur().codepoint);
michael@0 1956 const ChainRuleSet &rule_set = this+ruleSet[index];
michael@0 1957 struct ChainContextApplyLookupContext lookup_context = {
michael@0 1958 {match_class},
michael@0 1959 {&backtrack_class_def,
michael@0 1960 &input_class_def,
michael@0 1961 &lookahead_class_def}
michael@0 1962 };
michael@0 1963 return TRACE_RETURN (rule_set.apply (c, lookup_context));
michael@0 1964 }
michael@0 1965
michael@0 1966 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 1967 TRACE_SANITIZE (this);
michael@0 1968 return TRACE_RETURN (coverage.sanitize (c, this) && backtrackClassDef.sanitize (c, this) &&
michael@0 1969 inputClassDef.sanitize (c, this) && lookaheadClassDef.sanitize (c, this) &&
michael@0 1970 ruleSet.sanitize (c, this));
michael@0 1971 }
michael@0 1972
michael@0 1973 protected:
michael@0 1974 USHORT format; /* Format identifier--format = 2 */
michael@0 1975 OffsetTo<Coverage>
michael@0 1976 coverage; /* Offset to Coverage table--from
michael@0 1977 * beginning of table */
michael@0 1978 OffsetTo<ClassDef>
michael@0 1979 backtrackClassDef; /* Offset to glyph ClassDef table
michael@0 1980 * containing backtrack sequence
michael@0 1981 * data--from beginning of table */
michael@0 1982 OffsetTo<ClassDef>
michael@0 1983 inputClassDef; /* Offset to glyph ClassDef
michael@0 1984 * table containing input sequence
michael@0 1985 * data--from beginning of table */
michael@0 1986 OffsetTo<ClassDef>
michael@0 1987 lookaheadClassDef; /* Offset to glyph ClassDef table
michael@0 1988 * containing lookahead sequence
michael@0 1989 * data--from beginning of table */
michael@0 1990 OffsetArrayOf<ChainRuleSet>
michael@0 1991 ruleSet; /* Array of ChainRuleSet tables
michael@0 1992 * ordered by class */
michael@0 1993 public:
michael@0 1994 DEFINE_SIZE_ARRAY (12, ruleSet);
michael@0 1995 };
michael@0 1996
michael@0 1997 struct ChainContextFormat3
michael@0 1998 {
michael@0 1999 inline void closure (hb_closure_context_t *c) const
michael@0 2000 {
michael@0 2001 TRACE_CLOSURE (this);
michael@0 2002 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
michael@0 2003
michael@0 2004 if (!(this+input[0]).intersects (c->glyphs))
michael@0 2005 return;
michael@0 2006
michael@0 2007 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
michael@0 2008 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
michael@0 2009 struct ChainContextClosureLookupContext lookup_context = {
michael@0 2010 {intersects_coverage},
michael@0 2011 {this, this, this}
michael@0 2012 };
michael@0 2013 chain_context_closure_lookup (c,
michael@0 2014 backtrack.len, (const USHORT *) backtrack.array,
michael@0 2015 input.len, (const USHORT *) input.array + 1,
michael@0 2016 lookahead.len, (const USHORT *) lookahead.array,
michael@0 2017 lookup.len, lookup.array,
michael@0 2018 lookup_context);
michael@0 2019 }
michael@0 2020
michael@0 2021 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
michael@0 2022 {
michael@0 2023 TRACE_COLLECT_GLYPHS (this);
michael@0 2024 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
michael@0 2025
michael@0 2026 (this+input[0]).add_coverage (c->input);
michael@0 2027
michael@0 2028 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
michael@0 2029 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
michael@0 2030 struct ChainContextCollectGlyphsLookupContext lookup_context = {
michael@0 2031 {collect_coverage},
michael@0 2032 {this, this, this}
michael@0 2033 };
michael@0 2034 chain_context_collect_glyphs_lookup (c,
michael@0 2035 backtrack.len, (const USHORT *) backtrack.array,
michael@0 2036 input.len, (const USHORT *) input.array + 1,
michael@0 2037 lookahead.len, (const USHORT *) lookahead.array,
michael@0 2038 lookup.len, lookup.array,
michael@0 2039 lookup_context);
michael@0 2040 }
michael@0 2041
michael@0 2042 inline bool would_apply (hb_would_apply_context_t *c) const
michael@0 2043 {
michael@0 2044 TRACE_WOULD_APPLY (this);
michael@0 2045
michael@0 2046 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
michael@0 2047 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
michael@0 2048 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
michael@0 2049 struct ChainContextApplyLookupContext lookup_context = {
michael@0 2050 {match_coverage},
michael@0 2051 {this, this, this}
michael@0 2052 };
michael@0 2053 return TRACE_RETURN (chain_context_would_apply_lookup (c,
michael@0 2054 backtrack.len, (const USHORT *) backtrack.array,
michael@0 2055 input.len, (const USHORT *) input.array + 1,
michael@0 2056 lookahead.len, (const USHORT *) lookahead.array,
michael@0 2057 lookup.len, lookup.array, lookup_context));
michael@0 2058 }
michael@0 2059
michael@0 2060 inline const Coverage &get_coverage (void) const
michael@0 2061 {
michael@0 2062 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
michael@0 2063 return this+input[0];
michael@0 2064 }
michael@0 2065
michael@0 2066 inline bool apply (hb_apply_context_t *c) const
michael@0 2067 {
michael@0 2068 TRACE_APPLY (this);
michael@0 2069 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
michael@0 2070
michael@0 2071 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
michael@0 2072 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
michael@0 2073
michael@0 2074 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
michael@0 2075 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
michael@0 2076 struct ChainContextApplyLookupContext lookup_context = {
michael@0 2077 {match_coverage},
michael@0 2078 {this, this, this}
michael@0 2079 };
michael@0 2080 return TRACE_RETURN (chain_context_apply_lookup (c,
michael@0 2081 backtrack.len, (const USHORT *) backtrack.array,
michael@0 2082 input.len, (const USHORT *) input.array + 1,
michael@0 2083 lookahead.len, (const USHORT *) lookahead.array,
michael@0 2084 lookup.len, lookup.array, lookup_context));
michael@0 2085 }
michael@0 2086
michael@0 2087 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 2088 TRACE_SANITIZE (this);
michael@0 2089 if (!backtrack.sanitize (c, this)) return TRACE_RETURN (false);
michael@0 2090 OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
michael@0 2091 if (!input.sanitize (c, this)) return TRACE_RETURN (false);
michael@0 2092 OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
michael@0 2093 if (!lookahead.sanitize (c, this)) return TRACE_RETURN (false);
michael@0 2094 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
michael@0 2095 return TRACE_RETURN (lookup.sanitize (c));
michael@0 2096 }
michael@0 2097
michael@0 2098 protected:
michael@0 2099 USHORT format; /* Format identifier--format = 3 */
michael@0 2100 OffsetArrayOf<Coverage>
michael@0 2101 backtrack; /* Array of coverage tables
michael@0 2102 * in backtracking sequence, in glyph
michael@0 2103 * sequence order */
michael@0 2104 OffsetArrayOf<Coverage>
michael@0 2105 inputX ; /* Array of coverage
michael@0 2106 * tables in input sequence, in glyph
michael@0 2107 * sequence order */
michael@0 2108 OffsetArrayOf<Coverage>
michael@0 2109 lookaheadX; /* Array of coverage tables
michael@0 2110 * in lookahead sequence, in glyph
michael@0 2111 * sequence order */
michael@0 2112 ArrayOf<LookupRecord>
michael@0 2113 lookupX; /* Array of LookupRecords--in
michael@0 2114 * design order) */
michael@0 2115 public:
michael@0 2116 DEFINE_SIZE_MIN (10);
michael@0 2117 };
michael@0 2118
michael@0 2119 struct ChainContext
michael@0 2120 {
michael@0 2121 template <typename context_t>
michael@0 2122 inline typename context_t::return_t dispatch (context_t *c) const
michael@0 2123 {
michael@0 2124 TRACE_DISPATCH (this);
michael@0 2125 switch (u.format) {
michael@0 2126 case 1: return TRACE_RETURN (c->dispatch (u.format1));
michael@0 2127 case 2: return TRACE_RETURN (c->dispatch (u.format2));
michael@0 2128 case 3: return TRACE_RETURN (c->dispatch (u.format3));
michael@0 2129 default:return TRACE_RETURN (c->default_return_value ());
michael@0 2130 }
michael@0 2131 }
michael@0 2132
michael@0 2133 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 2134 TRACE_SANITIZE (this);
michael@0 2135 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
michael@0 2136 switch (u.format) {
michael@0 2137 case 1: return TRACE_RETURN (u.format1.sanitize (c));
michael@0 2138 case 2: return TRACE_RETURN (u.format2.sanitize (c));
michael@0 2139 case 3: return TRACE_RETURN (u.format3.sanitize (c));
michael@0 2140 default:return TRACE_RETURN (true);
michael@0 2141 }
michael@0 2142 }
michael@0 2143
michael@0 2144 protected:
michael@0 2145 union {
michael@0 2146 USHORT format; /* Format identifier */
michael@0 2147 ChainContextFormat1 format1;
michael@0 2148 ChainContextFormat2 format2;
michael@0 2149 ChainContextFormat3 format3;
michael@0 2150 } u;
michael@0 2151 };
michael@0 2152
michael@0 2153
michael@0 2154 struct ExtensionFormat1
michael@0 2155 {
michael@0 2156 inline unsigned int get_type (void) const { return extensionLookupType; }
michael@0 2157 inline unsigned int get_offset (void) const { return extensionOffset; }
michael@0 2158
michael@0 2159 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 2160 TRACE_SANITIZE (this);
michael@0 2161 return TRACE_RETURN (c->check_struct (this));
michael@0 2162 }
michael@0 2163
michael@0 2164 protected:
michael@0 2165 USHORT format; /* Format identifier. Set to 1. */
michael@0 2166 USHORT extensionLookupType; /* Lookup type of subtable referenced
michael@0 2167 * by ExtensionOffset (i.e. the
michael@0 2168 * extension subtable). */
michael@0 2169 ULONG extensionOffset; /* Offset to the extension subtable,
michael@0 2170 * of lookup type subtable. */
michael@0 2171 public:
michael@0 2172 DEFINE_SIZE_STATIC (8);
michael@0 2173 };
michael@0 2174
michael@0 2175 template <typename T>
michael@0 2176 struct Extension
michael@0 2177 {
michael@0 2178 inline unsigned int get_type (void) const
michael@0 2179 {
michael@0 2180 switch (u.format) {
michael@0 2181 case 1: return u.format1.get_type ();
michael@0 2182 default:return 0;
michael@0 2183 }
michael@0 2184 }
michael@0 2185 inline unsigned int get_offset (void) const
michael@0 2186 {
michael@0 2187 switch (u.format) {
michael@0 2188 case 1: return u.format1.get_offset ();
michael@0 2189 default:return 0;
michael@0 2190 }
michael@0 2191 }
michael@0 2192
michael@0 2193 template <typename X>
michael@0 2194 inline const X& get_subtable (void) const
michael@0 2195 {
michael@0 2196 unsigned int offset = get_offset ();
michael@0 2197 if (unlikely (!offset)) return Null(typename T::LookupSubTable);
michael@0 2198 return StructAtOffset<typename T::LookupSubTable> (this, offset);
michael@0 2199 }
michael@0 2200
michael@0 2201 template <typename context_t>
michael@0 2202 inline typename context_t::return_t dispatch (context_t *c) const
michael@0 2203 {
michael@0 2204 return get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ());
michael@0 2205 }
michael@0 2206
michael@0 2207 inline bool sanitize_self (hb_sanitize_context_t *c) {
michael@0 2208 TRACE_SANITIZE (this);
michael@0 2209 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
michael@0 2210 switch (u.format) {
michael@0 2211 case 1: return TRACE_RETURN (u.format1.sanitize (c));
michael@0 2212 default:return TRACE_RETURN (true);
michael@0 2213 }
michael@0 2214 }
michael@0 2215
michael@0 2216 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 2217 TRACE_SANITIZE (this);
michael@0 2218 if (!sanitize_self (c)) return TRACE_RETURN (false);
michael@0 2219 unsigned int offset = get_offset ();
michael@0 2220 if (unlikely (!offset)) return TRACE_RETURN (true);
michael@0 2221 return TRACE_RETURN (StructAtOffset<typename T::LookupSubTable> (this, offset).sanitize (c, get_type ()));
michael@0 2222 }
michael@0 2223
michael@0 2224 protected:
michael@0 2225 union {
michael@0 2226 USHORT format; /* Format identifier */
michael@0 2227 ExtensionFormat1 format1;
michael@0 2228 } u;
michael@0 2229 };
michael@0 2230
michael@0 2231
michael@0 2232 /*
michael@0 2233 * GSUB/GPOS Common
michael@0 2234 */
michael@0 2235
michael@0 2236 struct GSUBGPOS
michael@0 2237 {
michael@0 2238 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB;
michael@0 2239 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS;
michael@0 2240
michael@0 2241 inline unsigned int get_script_count (void) const
michael@0 2242 { return (this+scriptList).len; }
michael@0 2243 inline const Tag& get_script_tag (unsigned int i) const
michael@0 2244 { return (this+scriptList).get_tag (i); }
michael@0 2245 inline unsigned int get_script_tags (unsigned int start_offset,
michael@0 2246 unsigned int *script_count /* IN/OUT */,
michael@0 2247 hb_tag_t *script_tags /* OUT */) const
michael@0 2248 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
michael@0 2249 inline const Script& get_script (unsigned int i) const
michael@0 2250 { return (this+scriptList)[i]; }
michael@0 2251 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
michael@0 2252 { return (this+scriptList).find_index (tag, index); }
michael@0 2253
michael@0 2254 inline unsigned int get_feature_count (void) const
michael@0 2255 { return (this+featureList).len; }
michael@0 2256 inline const Tag& get_feature_tag (unsigned int i) const
michael@0 2257 { return (this+featureList).get_tag (i); }
michael@0 2258 inline unsigned int get_feature_tags (unsigned int start_offset,
michael@0 2259 unsigned int *feature_count /* IN/OUT */,
michael@0 2260 hb_tag_t *feature_tags /* OUT */) const
michael@0 2261 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
michael@0 2262 inline const Feature& get_feature (unsigned int i) const
michael@0 2263 { return (this+featureList)[i]; }
michael@0 2264 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
michael@0 2265 { return (this+featureList).find_index (tag, index); }
michael@0 2266
michael@0 2267 inline unsigned int get_lookup_count (void) const
michael@0 2268 { return (this+lookupList).len; }
michael@0 2269 inline const Lookup& get_lookup (unsigned int i) const
michael@0 2270 { return (this+lookupList)[i]; }
michael@0 2271
michael@0 2272 inline bool sanitize (hb_sanitize_context_t *c) {
michael@0 2273 TRACE_SANITIZE (this);
michael@0 2274 return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) &&
michael@0 2275 scriptList.sanitize (c, this) &&
michael@0 2276 featureList.sanitize (c, this) &&
michael@0 2277 lookupList.sanitize (c, this));
michael@0 2278 }
michael@0 2279
michael@0 2280 protected:
michael@0 2281 FixedVersion version; /* Version of the GSUB/GPOS table--initially set
michael@0 2282 * to 0x00010000 */
michael@0 2283 OffsetTo<ScriptList>
michael@0 2284 scriptList; /* ScriptList table */
michael@0 2285 OffsetTo<FeatureList>
michael@0 2286 featureList; /* FeatureList table */
michael@0 2287 OffsetTo<LookupList>
michael@0 2288 lookupList; /* LookupList table */
michael@0 2289 public:
michael@0 2290 DEFINE_SIZE_STATIC (10);
michael@0 2291 };
michael@0 2292
michael@0 2293
michael@0 2294 } /* namespace OT */
michael@0 2295
michael@0 2296
michael@0 2297 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */

mercurial