Merge pull request #66004 from bruvzg/hb52

Update to version 5.2.0, add new Unicode 15 blocks and scripts.
This commit is contained in:
Rémi Verschelde 2022-09-18 00:43:49 +02:00
commit 09d1ebb7ae
88 changed files with 6628 additions and 3942 deletions

View file

@ -1072,6 +1072,7 @@ static const char *script_list[][2] = {
{ "Jurchen", "Jurc" },
{ "Kayah Li", "Kali" },
{ "Katakana", "Kana" },
{ "Kawi", "Kawi" },
{ "Kharoshthi", "Khar" },
{ "Khmer", "Khmr" },
{ "Khojki", "Khoj" },
@ -1110,6 +1111,7 @@ static const char *script_list[][2] = {
{ "Meitei Mayek", "Mtei" },
{ "Multani", "Mult" },
{ "Myanmar (Burmese)", "Mymr" },
{ "Nag Mundari", "Nagm" },
{ "Nandinagari", "Nand" },
{ "Old North Arabian", "Narb" },
{ "Nabataean", "Nbat" },

View file

@ -302,6 +302,7 @@ static UniRange unicode_ranges[] = {
{ 0x10D00, 0x10D3F, U"Hanifi Rohingya" },
{ 0x10E60, 0x10E7F, U"Rumi Numeral Symbols" },
{ 0x10E80, 0x10EBF, U"Yezidi" },
{ 0x10EC0, 0x10EFF, U"Arabic Extended-C" },
{ 0x10F00, 0x10F2F, U"Old Sogdian" },
{ 0x10F30, 0x10F6F, U"Sogdian" },
{ 0x10F70, 0x10FAF, U"Old Uyghur" },
@ -333,11 +334,13 @@ static UniRange unicode_ranges[] = {
{ 0x11A50, 0x11AAF, U"Soyombo" },
{ 0x11AB0, 0x11ABF, U"Unified Canadian Aboriginal Syllabics Extended-A" },
{ 0x11AC0, 0x11AFF, U"Pau Cin Hau" },
{ 0x11B00, 0x11B5F, U"Devanagari Extended-A" },
{ 0x11C00, 0x11C6F, U"Bhaiksuki" },
{ 0x11C70, 0x11CBF, U"Marchen" },
{ 0x11D00, 0x11D5F, U"Masaram Gondi" },
{ 0x11D60, 0x11DAF, U"Gunjala Gondi" },
{ 0x11EE0, 0x11EFF, U"Makasar" },
{ 0x11F00, 0x11F5F, U"Kawi" },
{ 0x11FB0, 0x11FBF, U"Lisu Supplement" },
{ 0x11FC0, 0x11FFF, U"Tamil Supplement" },
{ 0x12000, 0x123FF, U"Cuneiform" },
@ -370,6 +373,7 @@ static UniRange unicode_ranges[] = {
{ 0x1D000, 0x1D0FF, U"Byzantine Musical Symbols" },
{ 0x1D100, 0x1D1FF, U"Musical Symbols" },
{ 0x1D200, 0x1D24F, U"Ancient Greek Musical Notation" },
{ 0x1D2C0, 0x1D2DF, U"Kaktovik Numerals" },
{ 0x1D2E0, 0x1D2FF, U"Mayan Numerals" },
{ 0x1D300, 0x1D35F, U"Tai Xuan Jing Symbols" },
{ 0x1D360, 0x1D37F, U"Counting Rod Numerals" },
@ -377,9 +381,11 @@ static UniRange unicode_ranges[] = {
{ 0x1D800, 0x1DAAF, U"Sutton SignWriting" },
{ 0x1DF00, 0x1DFFF, U"Latin Extended-G" },
{ 0x1E000, 0x1E02F, U"Glagolitic Supplement" },
{ 0x1E030, 0x1E08F, U"Cyrillic Extended-D" },
{ 0x1E100, 0x1E14F, U"Nyiakeng Puachue Hmong" },
{ 0x1E290, 0x1E2BF, U"Toto" },
{ 0x1E2C0, 0x1E2FF, U"Wancho" },
{ 0x1E4D0, 0x1E4FF, U"Nag Mundari" },
{ 0x1E7E0, 0x1E7FF, U"Ethiopic Extended-B" },
{ 0x1E800, 0x1E8DF, U"Mende Kikakui" },
{ 0x1E900, 0x1E95F, U"Adlam" },
@ -409,6 +415,7 @@ static UniRange unicode_ranges[] = {
{ 0x2CEB0, 0x2EBEF, U"CJK Unified Ideographs Extension F" },
{ 0x2F800, 0x2FA1F, U"CJK Compatibility Ideographs Supplement" },
{ 0x30000, 0x3134F, U"CJK Unified Ideographs Extension G" },
{ 0x31350, 0x323AF, U"CJK Unified Ideographs Extension H" },
//{ 0xE0000, 0xE007F, U"Tags" },
//{ 0xE0100, 0xE01EF, U"Variation Selectors Supplement" },
{ 0xF0000, 0xFFFFF, U"Supplementary Private Use Area-A" },

View file

@ -214,7 +214,7 @@ Files extracted from upstream source:
## harfbuzz
- Upstream: https://github.com/harfbuzz/harfbuzz
- Version: 5.1.0 (f1f2be776bcd994fa9262622e1a7098a066e5cf7, 2022)
- Version: 5.2.0 (4a1d891c6317d2c83e5f3c2607ec5f5ccedffcde, 2022)
- License: MIT
Files extracted from upstream source:

View file

@ -206,7 +206,7 @@ struct Coverage
template <typename IterableOut,
hb_requires (hb_is_sink_of (IterableOut, hb_codepoint_t))>
void intersect_set (const hb_set_t &glyphs, IterableOut &intersect_glyphs) const
void intersect_set (const hb_set_t &glyphs, IterableOut&& intersect_glyphs) const
{
switch (u.format)
{

View file

@ -88,7 +88,7 @@ struct CoverageFormat1_3
template <typename IterableOut,
hb_requires (hb_is_sink_of (IterableOut, hb_codepoint_t))>
void intersect_set (const hb_set_t &glyphs, IterableOut &intersect_glyphs) const
void intersect_set (const hb_set_t &glyphs, IterableOut&& intersect_glyphs) const
{
unsigned count = glyphArray.len;
for (unsigned i = 0; i < count; i++)

View file

@ -140,7 +140,7 @@ struct CoverageFormat2_4
template <typename IterableOut,
hb_requires (hb_is_sink_of (IterableOut, hb_codepoint_t))>
void intersect_set (const hb_set_t &glyphs, IterableOut &intersect_glyphs) const
void intersect_set (const hb_set_t &glyphs, IterableOut&& intersect_glyphs) const
{
for (const auto& range : rangeRecord)
{

View file

@ -58,8 +58,7 @@ struct Anchor
return_trace (bool (reinterpret_cast<Anchor *> (u.format1.copy (c->serializer))));
}
return_trace (bool (reinterpret_cast<Anchor *> (u.format2.copy (c->serializer))));
case 3: return_trace (bool (reinterpret_cast<Anchor *> (u.format3.copy (c->serializer,
c->plan->layout_variation_idx_map))));
case 3: return_trace (u.format3.subset (c));
default:return_trace (false);
}
}

View file

@ -41,24 +41,54 @@ struct AnchorFormat3
*y += (this+yDeviceTable).get_y_delta (font, c->var_store, c->var_store_cache);
}
AnchorFormat3* copy (hb_serialize_context_t *c,
const hb_map_t *layout_variation_idx_map) const
bool subset (hb_subset_context_t *c) const
{
TRACE_SERIALIZE (this);
if (!layout_variation_idx_map) return_trace (nullptr);
TRACE_SUBSET (this);
auto *out = c->serializer->start_embed (*this);
if (unlikely (!out)) return_trace (false);
if (unlikely (!c->serializer->embed (format))) return_trace (false);
if (unlikely (!c->serializer->embed (xCoordinate))) return_trace (false);
if (unlikely (!c->serializer->embed (yCoordinate))) return_trace (false);
auto *out = c->embed<AnchorFormat3> (this);
if (unlikely (!out)) return_trace (nullptr);
unsigned x_varidx = xDeviceTable ? (this+xDeviceTable).get_variation_index () : HB_OT_LAYOUT_NO_VARIATIONS_INDEX;
if (c->plan->layout_variation_idx_delta_map->has (x_varidx))
{
int delta = hb_second (c->plan->layout_variation_idx_delta_map->get (x_varidx));
if (delta != 0)
{
if (!c->serializer->check_assign (out->xCoordinate, xCoordinate + delta,
HB_SERIALIZE_ERROR_INT_OVERFLOW))
return_trace (false);
}
}
out->xDeviceTable.serialize_copy (c, xDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map);
out->yDeviceTable.serialize_copy (c, yDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map);
unsigned y_varidx = yDeviceTable ? (this+yDeviceTable).get_variation_index () : HB_OT_LAYOUT_NO_VARIATIONS_INDEX;
if (c->plan->layout_variation_idx_delta_map->has (y_varidx))
{
int delta = hb_second (c->plan->layout_variation_idx_delta_map->get (y_varidx));
if (delta != 0)
{
if (!c->serializer->check_assign (out->yCoordinate, yCoordinate + delta,
HB_SERIALIZE_ERROR_INT_OVERFLOW))
return_trace (false);
}
}
if (c->plan->all_axes_pinned)
return_trace (c->serializer->check_assign (out->format, 1, HB_SERIALIZE_ERROR_INT_OVERFLOW));
if (!c->serializer->embed (xDeviceTable)) return_trace (false);
if (!c->serializer->embed (yDeviceTable)) return_trace (false);
out->xDeviceTable.serialize_copy (c->serializer, xDeviceTable, this, 0, hb_serialize_context_t::Head, c->plan->layout_variation_idx_delta_map);
out->yDeviceTable.serialize_copy (c->serializer, yDeviceTable, this, 0, hb_serialize_context_t::Head, c->plan->layout_variation_idx_delta_map);
return_trace (out);
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
(this+xDeviceTable).collect_variation_indices (c->layout_variation_indices);
(this+yDeviceTable).collect_variation_indices (c->layout_variation_indices);
(this+xDeviceTable).collect_variation_indices (c);
(this+yDeviceTable).collect_variation_indices (c);
}
};

View file

@ -22,7 +22,8 @@ template<typename Iterator, typename SrcLookup>
static void SinglePos_serialize (hb_serialize_context_t *c,
const SrcLookup *src,
Iterator it,
const hb_map_t *layout_variation_idx_map);
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
bool all_axes_pinned);
}

View file

@ -9,7 +9,7 @@ struct MarkRecord
{
friend struct MarkArray;
protected:
public:
HBUINT16 klass; /* Class defined for this mark */
Offset16To<Anchor>
markAnchor; /* Offset to Anchor table--from

View file

@ -127,6 +127,12 @@ struct PairPosFormat1_3
out->valueFormat[1] = newFormats.second;
}
if (c->plan->all_axes_pinned)
{
out->valueFormat[0] = out->valueFormat[0].drop_device_table_flags ();
out->valueFormat[1] = out->valueFormat[1].drop_device_table_flags ();
}
hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ hb_zip (this+coverage, pairSet)

View file

@ -274,13 +274,19 @@ struct PairPosFormat2_4
out->valueFormat1 = newFormats.first;
out->valueFormat2 = newFormats.second;
if (c->plan->all_axes_pinned)
{
out->valueFormat1 = out->valueFormat1.drop_device_table_flags ();
out->valueFormat2 = out->valueFormat2.drop_device_table_flags ();
}
for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
{
for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
{
unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
valueFormat1.copy_values (c->serializer, newFormats.first, this, &values[idx], c->plan->layout_variation_idx_map);
valueFormat2.copy_values (c->serializer, newFormats.second, this, &values[idx + len1], c->plan->layout_variation_idx_map);
valueFormat1.copy_values (c->serializer, out->valueFormat1, this, &values[idx], c->plan->layout_variation_idx_delta_map);
valueFormat2.copy_values (c->serializer, out->valueFormat2, this, &values[idx + len1], c->plan->layout_variation_idx_delta_map);
}
}

View file

@ -163,7 +163,7 @@ struct PairSet
newFormats,
len1,
&glyph_map,
c->plan->layout_variation_idx_map
c->plan->layout_variation_idx_delta_map
};
const PairValueRecord *record = &firstPairValueRecord;

View file

@ -34,7 +34,7 @@ struct PairValueRecord
const ValueFormat *newFormats;
unsigned len1; /* valueFormats[0].get_len() */
const hb_map_t *glyph_map;
const hb_map_t *layout_variation_idx_map;
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map;
};
bool subset (hb_subset_context_t *c,
@ -50,12 +50,12 @@ struct PairValueRecord
closure->valueFormats[0].copy_values (s,
closure->newFormats[0],
closure->base, &values[0],
closure->layout_variation_idx_map);
closure->layout_variation_idx_delta_map);
closure->valueFormats[1].copy_values (s,
closure->newFormats[1],
closure->base,
&values[closure->len1],
closure->layout_variation_idx_map);
closure->layout_variation_idx_delta_map);
return_trace (true);
}

View file

@ -38,12 +38,16 @@ struct SinglePos
void serialize (hb_serialize_context_t *c,
const SrcLookup* src,
Iterator glyph_val_iter_pairs,
const hb_map_t *layout_variation_idx_map)
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
bool all_axes_pinned)
{
if (unlikely (!c->extend_min (u.format))) return;
unsigned format = 2;
ValueFormat new_format = src->get_value_format ();
if (all_axes_pinned)
new_format = new_format.drop_device_table_flags ();
if (glyph_val_iter_pairs)
format = get_format (glyph_val_iter_pairs);
@ -53,13 +57,13 @@ struct SinglePos
src,
glyph_val_iter_pairs,
new_format,
layout_variation_idx_map);
layout_variation_idx_delta_map);
return;
case 2: u.format2.serialize (c,
src,
glyph_val_iter_pairs,
new_format,
layout_variation_idx_map);
layout_variation_idx_delta_map);
return;
default:return;
}
@ -84,8 +88,9 @@ static void
SinglePos_serialize (hb_serialize_context_t *c,
const SrcLookup *src,
Iterator it,
const hb_map_t *layout_variation_idx_map)
{ c->start_embed<SinglePos> ()->serialize (c, src, it, layout_variation_idx_map); }
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
bool all_axes_pinned)
{ c->start_embed<SinglePos> ()->serialize (c, src, it, layout_variation_idx_delta_map, all_axes_pinned); }
}

View file

@ -87,7 +87,7 @@ struct SinglePosFormat1
const SrcLookup *src,
Iterator it,
ValueFormat newFormat,
const hb_map_t *layout_variation_idx_map)
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map)
{
if (unlikely (!c->extend_min (this))) return;
if (unlikely (!c->check_assign (valueFormat,
@ -96,7 +96,7 @@ struct SinglePosFormat1
for (const hb_array_t<const Value>& _ : + it | hb_map (hb_second))
{
src->get_value_format ().copy_values (c, newFormat, src, &_, layout_variation_idx_map);
src->get_value_format ().copy_values (c, newFormat, src, &_, layout_variation_idx_delta_map);
// Only serialize the first entry in the iterator, the rest are assumed to
// be the same.
break;
@ -126,7 +126,7 @@ struct SinglePosFormat1
;
bool ret = bool (it);
SinglePos_serialize (c->serializer, this, it, c->plan->layout_variation_idx_map);
SinglePos_serialize (c->serializer, this, it, c->plan->layout_variation_idx_delta_map, c->plan->all_axes_pinned);
return_trace (ret);
}
};

View file

@ -99,7 +99,7 @@ struct SinglePosFormat2
const SrcLookup *src,
Iterator it,
ValueFormat newFormat,
const hb_map_t *layout_variation_idx_map)
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map)
{
auto out = c->extend_min (this);
if (unlikely (!out)) return;
@ -109,7 +109,7 @@ struct SinglePosFormat2
+ it
| hb_map (hb_second)
| hb_apply ([&] (hb_array_t<const Value> _)
{ src->get_value_format ().copy_values (c, newFormat, src, &_, layout_variation_idx_map); })
{ src->get_value_format ().copy_values (c, newFormat, src, &_, layout_variation_idx_delta_map); })
;
auto glyphs =
@ -141,7 +141,7 @@ struct SinglePosFormat2
;
bool ret = bool (it);
SinglePos_serialize (c->serializer, this, it, c->plan->layout_variation_idx_map);
SinglePos_serialize (c->serializer, this, it, c->plan->layout_variation_idx_delta_map, c->plan->all_axes_pinned);
return_trace (ret);
}
};

View file

@ -59,6 +59,24 @@ struct ValueFormat : HBUINT16
unsigned int get_len () const { return hb_popcount ((unsigned int) *this); }
unsigned int get_size () const { return get_len () * Value::static_size; }
hb_vector_t<unsigned> get_device_table_indices () const {
unsigned i = 0;
hb_vector_t<unsigned> result;
unsigned format = *this;
if (format & xPlacement) i++;
if (format & yPlacement) i++;
if (format & xAdvance) i++;
if (format & yAdvance) i++;
if (format & xPlaDevice) result.push (i++);
if (format & yPlaDevice) result.push (i++);
if (format & xAdvDevice) result.push (i++);
if (format & yAdvDevice) result.push (i++);
return result;
}
bool apply_value (hb_ot_apply_context_t *c,
const void *base,
const Value *values,
@ -145,30 +163,50 @@ struct ValueFormat : HBUINT16
unsigned int new_format,
const void *base,
const Value *values,
const hb_map_t *layout_variation_idx_map) const
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
{
unsigned int format = *this;
if (!format) return;
if (format & xPlacement) copy_value (c, new_format, xPlacement, *values++);
if (format & yPlacement) copy_value (c, new_format, yPlacement, *values++);
if (format & xAdvance) copy_value (c, new_format, xAdvance, *values++);
if (format & yAdvance) copy_value (c, new_format, yAdvance, *values++);
HBINT16 *x_placement = nullptr, *y_placement = nullptr, *x_adv = nullptr, *y_adv = nullptr;
if (format & xPlacement) x_placement = copy_value (c, new_format, xPlacement, *values++);
if (format & yPlacement) y_placement = copy_value (c, new_format, yPlacement, *values++);
if (format & xAdvance) x_adv = copy_value (c, new_format, xAdvance, *values++);
if (format & yAdvance) y_adv = copy_value (c, new_format, yAdvance, *values++);
if (format & xPlaDevice) copy_device (c, base, values++, layout_variation_idx_map);
if (format & yPlaDevice) copy_device (c, base, values++, layout_variation_idx_map);
if (format & xAdvDevice) copy_device (c, base, values++, layout_variation_idx_map);
if (format & yAdvDevice) copy_device (c, base, values++, layout_variation_idx_map);
if (format & xPlaDevice)
{
add_delta_to_value (x_placement, base, values, layout_variation_idx_delta_map);
copy_device (c, base, values++, layout_variation_idx_delta_map, new_format, xPlaDevice);
}
if (format & yPlaDevice)
{
add_delta_to_value (y_placement, base, values, layout_variation_idx_delta_map);
copy_device (c, base, values++, layout_variation_idx_delta_map, new_format, yPlaDevice);
}
if (format & xAdvDevice)
{
add_delta_to_value (x_adv, base, values, layout_variation_idx_delta_map);
copy_device (c, base, values++, layout_variation_idx_delta_map, new_format, xAdvDevice);
}
if (format & yAdvDevice)
{
add_delta_to_value (y_adv, base, values, layout_variation_idx_delta_map);
copy_device (c, base, values++, layout_variation_idx_delta_map, new_format, yAdvDevice);
}
}
void copy_value (hb_serialize_context_t *c,
unsigned int new_format,
Flags flag,
Value value) const
HBINT16* copy_value (hb_serialize_context_t *c,
unsigned int new_format,
Flags flag,
Value value) const
{
// Filter by new format.
if (!(new_format & flag)) return;
c->copy (value);
if (!(new_format & flag)) return nullptr;
return reinterpret_cast<HBINT16 *> (c->copy (value));
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
@ -183,31 +221,40 @@ struct ValueFormat : HBUINT16
if (format & yAdvance) i++;
if (format & xPlaDevice)
{
(base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
(base + get_device (&(values[i]))).collect_variation_indices (c);
i++;
}
if (format & ValueFormat::yPlaDevice)
{
(base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
(base + get_device (&(values[i]))).collect_variation_indices (c);
i++;
}
if (format & ValueFormat::xAdvDevice)
{
(base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
(base + get_device (&(values[i]))).collect_variation_indices (c);
i++;
}
if (format & ValueFormat::yAdvDevice)
{
(base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
(base + get_device (&(values[i]))).collect_variation_indices (c);
i++;
}
}
unsigned drop_device_table_flags () const
{
unsigned format = *this;
for (unsigned flag = xPlaDevice; flag <= yAdvDevice; flag = flag << 1)
format = format & ~flag;
return format;
}
private:
bool sanitize_value_devices (hb_sanitize_context_t *c, const void *base, const Value *values) const
{
@ -236,9 +283,27 @@ struct ValueFormat : HBUINT16
return *static_cast<const Offset16To<Device> *> (value);
}
bool copy_device (hb_serialize_context_t *c, const void *base,
const Value *src_value, const hb_map_t *layout_variation_idx_map) const
void add_delta_to_value (HBINT16 *value,
const void *base,
const Value *src_value,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
{
if (!value) return;
unsigned varidx = (base + get_device (src_value)).get_variation_index ();
hb_pair_t<unsigned, int> *varidx_delta;
if (!layout_variation_idx_delta_map->has (varidx, &varidx_delta)) return;
*value += hb_second (*varidx_delta);
}
bool copy_device (hb_serialize_context_t *c, const void *base,
const Value *src_value,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
unsigned int new_format, Flags flag) const
{
// Filter by new format.
if (!(new_format & flag)) return true;
Value *dst_value = c->copy (*src_value);
if (!dst_value) return false;
@ -246,7 +311,7 @@ struct ValueFormat : HBUINT16
*dst_value = 0;
c->push ();
if ((base + get_device (src_value)).copy (c, layout_variation_idx_map))
if ((base + get_device (src_value)).copy (c, layout_variation_idx_delta_map))
{
c->add_link (*dst_value, c->pop_pack ());
return true;

View file

@ -35,19 +35,17 @@ struct MultipleSubst
}
}
/* TODO This function is unused and not updated to 24bit GIDs. Should be done by using
* iterators. While at it perhaps using iterator of arrays of hb_codepoint_t instead. */
template<typename Iterator,
hb_requires (hb_is_sorted_iterator (Iterator))>
bool serialize (hb_serialize_context_t *c,
hb_sorted_array_t<const HBGlyphID16> glyphs,
hb_array_t<const unsigned int> substitute_len_list,
hb_array_t<const HBGlyphID16> substitute_glyphs_list)
Iterator it)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (u.format))) return_trace (false);
unsigned int format = 1;
u.format = format;
switch (u.format) {
case 1: return_trace (u.format1.serialize (c, glyphs, substitute_len_list, substitute_glyphs_list));
case 1: return_trace (u.format1.serialize (c, it));
default:return_trace (false);
}
}

View file

@ -71,22 +71,31 @@ struct MultipleSubstFormat1_2
return_trace ((this+sequence[index]).apply (c));
}
template<typename Iterator,
hb_requires (hb_is_sorted_iterator (Iterator))>
bool serialize (hb_serialize_context_t *c,
hb_sorted_array_t<const HBGlyphID16> glyphs,
hb_array_t<const unsigned int> substitute_len_list,
hb_array_t<const HBGlyphID16> substitute_glyphs_list)
Iterator it)
{
TRACE_SERIALIZE (this);
auto sequences =
+ it
| hb_map (hb_second)
;
auto glyphs =
+ it
| hb_map_retains_sorting (hb_first)
;
if (unlikely (!c->extend_min (this))) return_trace (false);
if (unlikely (!sequence.serialize (c, glyphs.length))) return_trace (false);
for (unsigned int i = 0; i < glyphs.length; i++)
if (unlikely (!sequence.serialize (c, sequences.length))) return_trace (false);
for (auto& pair : hb_zip (sequences, sequence))
{
unsigned int substitute_len = substitute_len_list[i];
if (unlikely (!sequence[i]
.serialize_serialize (c, substitute_glyphs_list.sub_array (0, substitute_len))))
if (unlikely (!pair.second
.serialize_serialize (c, pair.first)))
return_trace (false);
substitute_glyphs_list += substitute_len;
}
return_trace (coverage.serialize_serialize (c, glyphs));
}

View file

@ -42,6 +42,11 @@ struct SingleSubstFormat1_3
hb_codepoint_t d = deltaGlyphID;
hb_codepoint_t mask = get_mask ();
/* Help fuzzer avoid this function as much. */
unsigned pop = (this+coverage).get_population ();
if (pop >= mask)
return;
hb_set_t intersection;
(this+coverage).intersect_set (c->parent_active_glyphs (), intersection);
@ -52,7 +57,7 @@ struct SingleSubstFormat1_3
hb_codepoint_t max_before = intersection.get_max ();
hb_codepoint_t min_after = (min_before + d) & mask;
hb_codepoint_t max_after = (max_before + d) & mask;
if ((this+coverage).get_population () >= max_before - min_before &&
if (pop >= max_before - min_before &&
((min_before <= min_after && min_after <= max_before) ||
(min_before <= max_after && max_after <= max_before)))
return;

View file

@ -41,7 +41,6 @@ struct SingleSubstFormat2_4
| hb_map (hb_second)
| hb_sink (c->output)
;
}
void closure_lookups (hb_closure_lookups_context_t *c) const {}

View file

@ -119,19 +119,16 @@ struct SubstLookup : Lookup
return_trace (false);
}
bool serialize_multiple (hb_serialize_context_t *c,
uint32_t lookup_props,
hb_sorted_array_t<const HBGlyphID16> glyphs,
hb_array_t<const unsigned int> substitute_len_list,
hb_array_t<const HBGlyphID16> substitute_glyphs_list)
template<typename Iterator,
hb_requires (hb_is_sorted_iterator (Iterator))>
bool serialize (hb_serialize_context_t *c,
uint32_t lookup_props,
Iterator it)
{
TRACE_SERIALIZE (this);
if (unlikely (!Lookup::serialize (c, SubTable::Multiple, lookup_props, 1))) return_trace (false);
if (c->push<SubTable> ()->u.multiple.
serialize (c,
glyphs,
substitute_len_list,
substitute_glyphs_list))
serialize (c, it))
{
c->add_link (get_subtables<SubTable> ()[0], c->pop_pack ());
return_trace (true);

View file

@ -105,6 +105,67 @@ struct CompositeGlyphRecord
}
}
unsigned compile_with_deltas (const contour_point_t &p_delta,
char *out) const
{
const HBINT8 *p = &StructAfter<const HBINT8> (flags);
#ifndef HB_NO_BEYOND_64K
if (flags & GID_IS_24BIT)
p += HBGlyphID24::static_size;
else
#endif
p += HBGlyphID16::static_size;
unsigned len = get_size ();
unsigned len_before_val = (const char *)p - (const char *)this;
if (flags & ARG_1_AND_2_ARE_WORDS)
{
// no overflow, copy and update value with deltas
memcpy (out, this, len);
const HBINT16 *px = reinterpret_cast<const HBINT16 *> (p);
HBINT16 *o = reinterpret_cast<HBINT16 *> (out + len_before_val);
o[0] = px[0] + roundf (p_delta.x);
o[1] = px[1] + roundf (p_delta.y);
}
else
{
int new_x = p[0] + roundf (p_delta.x);
int new_y = p[1] + roundf (p_delta.y);
if (new_x <= 127 && new_x >= -128 &&
new_y <= 127 && new_y >= -128)
{
memcpy (out, this, len);
HBINT8 *o = reinterpret_cast<HBINT8 *> (out + len_before_val);
o[0] = new_x;
o[1] = new_y;
}
else
{
// int8 overflows after deltas applied
memcpy (out, this, len_before_val);
//update flags
CompositeGlyphRecord *o = reinterpret_cast<CompositeGlyphRecord *> (out);
o->flags = flags | ARG_1_AND_2_ARE_WORDS;
out += len_before_val;
HBINT16 new_value;
new_value = new_x;
memcpy (out, &new_value, HBINT16::static_size);
out += HBINT16::static_size;
new_value = new_y;
memcpy (out, &new_value, HBINT16::static_size);
out += HBINT16::static_size;
memcpy (out, p+2, len - len_before_val - 2);
len += 2;
}
}
return len;
}
protected:
bool scaled_offsets () const
{ return (flags & (SCALED_COMPONENT_OFFSET | UNSCALED_COMPONENT_OFFSET)) == SCALED_COMPONENT_OFFSET; }
@ -288,6 +349,63 @@ struct CompositeGlyph
return;
glyph_chain.set_overlaps_flag ();
}
bool compile_bytes_with_deltas (const hb_bytes_t &source_bytes,
const contour_point_vector_t &deltas,
hb_bytes_t &dest_bytes /* OUT */)
{
if (source_bytes.length <= GlyphHeader::static_size ||
header.numberOfContours != -1)
{
dest_bytes = hb_bytes_t ();
return true;
}
unsigned source_len = source_bytes.length - GlyphHeader::static_size;
/* try to allocate more memories than source glyph bytes
* in case that there might be an overflow for int8 value
* and we would need to use int16 instead */
char *o = (char *) hb_calloc (source_len + source_len/2, sizeof (char));
if (unlikely (!o)) return false;
const CompositeGlyphRecord *c = reinterpret_cast<const CompositeGlyphRecord *> (source_bytes.arrayZ + GlyphHeader::static_size);
auto it = composite_iter_t (hb_bytes_t ((const char *)c, source_len), c);
char *p = o;
unsigned i = 0, source_comp_len = 0;
for (const auto &component : it)
{
/* last 4 points in deltas are phantom points and should not be included */
if (i >= deltas.length - 4) return false;
unsigned comp_len = component.get_size ();
if (component.is_anchored ())
{
memcpy (p, &component, comp_len);
p += comp_len;
}
else
{
unsigned new_len = component.compile_with_deltas (deltas[i], p);
p += new_len;
}
i++;
source_comp_len += comp_len;
}
//copy instructions if any
if (source_len > source_comp_len)
{
unsigned instr_len = source_len - source_comp_len;
memcpy (p, (const char *)c + source_comp_len, instr_len);
p += instr_len;
}
unsigned len = p - o;
dest_bytes = hb_bytes_t (o, len);
return true;
}
};

View file

@ -72,12 +72,117 @@ struct Glyph
}
}
void update_mtx (const hb_subset_plan_t *plan,
int xMin, int yMax,
const contour_point_vector_t &all_points) const
{
hb_codepoint_t new_gid = 0;
if (!plan->new_gid_for_old_gid (gid, &new_gid))
return;
unsigned len = all_points.length;
float leftSideX = all_points[len - 4].x;
float rightSideX = all_points[len - 3].x;
float topSideY = all_points[len - 2].y;
float bottomSideY = all_points[len - 1].y;
int hori_aw = roundf (rightSideX - leftSideX);
if (hori_aw < 0) hori_aw = 0;
int lsb = roundf (xMin - leftSideX);
plan->hmtx_map->set (new_gid, hb_pair (hori_aw, lsb));
int vert_aw = roundf (topSideY - bottomSideY);
if (vert_aw < 0) vert_aw = 0;
int tsb = roundf (topSideY - yMax);
plan->vmtx_map->set (new_gid, hb_pair (vert_aw, tsb));
}
bool compile_header_bytes (const hb_subset_plan_t *plan,
const contour_point_vector_t &all_points,
hb_bytes_t &dest_bytes /* OUT */) const
{
GlyphHeader *glyph_header = nullptr;
if (all_points.length > 4)
{
glyph_header = (GlyphHeader *) hb_calloc (1, GlyphHeader::static_size);
if (unlikely (!glyph_header)) return false;
}
int xMin, xMax;
xMin = xMax = roundf (all_points[0].x);
int yMin, yMax;
yMin = yMax = roundf (all_points[0].y);
for (unsigned i = 1; i < all_points.length - 4; i++)
{
float rounded_x = roundf (all_points[i].x);
float rounded_y = roundf (all_points[i].y);
xMin = hb_min (xMin, rounded_x);
xMax = hb_max (xMax, rounded_x);
yMin = hb_min (yMin, rounded_y);
yMax = hb_max (yMax, rounded_y);
}
update_mtx (plan, xMin, yMax, all_points);
/*for empty glyphs: all_points only include phantom points.
*just update metrics and then return */
if (all_points.length == 4)
return true;
glyph_header->numberOfContours = header->numberOfContours;
glyph_header->xMin = xMin;
glyph_header->yMin = yMin;
glyph_header->xMax = xMax;
glyph_header->yMax = yMax;
dest_bytes = hb_bytes_t ((const char *)glyph_header, GlyphHeader::static_size);
return true;
}
bool compile_bytes_with_deltas (const hb_subset_plan_t *plan,
hb_font_t *font,
const glyf_accelerator_t &glyf,
hb_bytes_t &dest_start, /* IN/OUT */
hb_bytes_t &dest_end /* OUT */) const
{
contour_point_vector_t all_points, deltas;
get_points (font, glyf, all_points, &deltas, false);
switch (type) {
case COMPOSITE:
if (!CompositeGlyph (*header, bytes).compile_bytes_with_deltas (dest_start,
deltas,
dest_end))
return false;
break;
case SIMPLE:
if (!SimpleGlyph (*header, bytes).compile_bytes_with_deltas (all_points,
plan->flags & HB_SUBSET_FLAGS_NO_HINTING,
dest_end))
return false;
break;
default:
/* set empty bytes for empty glyph
* do not use source glyph's pointers */
dest_start = hb_bytes_t ();
dest_end = hb_bytes_t ();
break;
}
return compile_header_bytes (plan, all_points, dest_start);
}
/* Note: Recursively calls itself.
* all_points includes phantom points
*/
template <typename accelerator_t>
bool get_points (hb_font_t *font, const accelerator_t &glyf_accelerator,
contour_point_vector_t &all_points /* OUT */,
contour_point_vector_t *deltas = nullptr, /* OUT */
bool use_my_metrics = true,
bool phantom_only = false,
unsigned int depth = 0) const
{
@ -130,10 +235,28 @@ struct Glyph
phantoms[PHANTOM_BOTTOM].y = v_orig - (int) v_adv;
}
if (deltas != nullptr && depth == 0 && type == COMPOSITE)
{
if (unlikely (!deltas->resize (points.length))) return false;
for (unsigned i = 0 ; i < points.length; i++)
deltas->arrayZ[i] = points.arrayZ[i];
}
#ifndef HB_NO_VAR
glyf_accelerator.gvar->apply_deltas_to_points (gid, font, points.as_array ());
#endif
// mainly used by CompositeGlyph calculating new X/Y offset value so no need to extend it
// with child glyphs' points
if (deltas != nullptr && depth == 0 && type == COMPOSITE)
{
for (unsigned i = 0 ; i < points.length; i++)
{
deltas->arrayZ[i].x = points.arrayZ[i].x - deltas->arrayZ[i].x;
deltas->arrayZ[i].y = points.arrayZ[i].y - deltas->arrayZ[i].y;
}
}
switch (type) {
case SIMPLE:
if (!inplace)
@ -148,11 +271,11 @@ struct Glyph
comp_points.reset ();
if (unlikely (!glyf_accelerator.glyph_for_gid (item.get_gid ())
.get_points (font, glyf_accelerator, comp_points,
phantom_only, depth + 1)))
deltas, use_my_metrics, phantom_only, depth + 1)))
return false;
/* Copy phantom points from component if USE_MY_METRICS flag set */
if (item.is_use_my_metrics ())
if (use_my_metrics && item.is_use_my_metrics ())
for (unsigned int i = 0; i < PHANTOM_COUNT; i++)
phantoms[i] = comp_points[comp_points.length - PHANTOM_COUNT + i];

View file

@ -206,6 +206,132 @@ struct SimpleGlyph
&& read_points (p, points_, end, &contour_point_t::y,
FLAG_Y_SHORT, FLAG_Y_SAME);
}
static void encode_coord (int value,
uint8_t &flag,
const simple_glyph_flag_t short_flag,
const simple_glyph_flag_t same_flag,
hb_vector_t<uint8_t> &coords /* OUT */)
{
if (value == 0)
{
flag |= same_flag;
}
else if (value >= -255 && value <= 255)
{
flag |= short_flag;
if (value > 0) flag |= same_flag;
else value = -value;
coords.push ((uint8_t)value);
}
else
{
int16_t val = value;
coords.push (val >> 8);
coords.push (val & 0xff);
}
}
static void encode_flag (uint8_t &flag,
uint8_t &repeat,
uint8_t &lastflag,
hb_vector_t<uint8_t> &flags /* OUT */)
{
if (flag == lastflag && repeat != 255)
{
repeat = repeat + 1;
if (repeat == 1)
{
flags.push(flag);
}
else
{
unsigned len = flags.length;
flags[len-2] = flag | FLAG_REPEAT;
flags[len-1] = repeat;
}
}
else
{
repeat = 0;
flags.push (flag);
}
lastflag = flag;
}
bool compile_bytes_with_deltas (const contour_point_vector_t &all_points,
bool no_hinting,
hb_bytes_t &dest_bytes /* OUT */)
{
if (header.numberOfContours == 0 || all_points.length <= 4)
{
dest_bytes = hb_bytes_t ();
return true;
}
//convert absolute values to relative values
unsigned num_points = all_points.length - 4;
hb_vector_t<hb_pair_t<int, int>> deltas;
deltas.resize (num_points);
for (unsigned i = 0; i < num_points; i++)
{
deltas[i].first = i == 0 ? roundf (all_points[i].x) : roundf (all_points[i].x) - roundf (all_points[i-1].x);
deltas[i].second = i == 0 ? roundf (all_points[i].y) : roundf (all_points[i].y) - roundf (all_points[i-1].y);
}
hb_vector_t<uint8_t> flags, x_coords, y_coords;
flags.alloc (num_points);
x_coords.alloc (2*num_points);
y_coords.alloc (2*num_points);
uint8_t lastflag = 0, repeat = 0;
for (unsigned i = 0; i < num_points; i++)
{
uint8_t flag = all_points[i].flag;
flag &= FLAG_ON_CURVE + FLAG_OVERLAP_SIMPLE;
encode_coord (deltas[i].first, flag, FLAG_X_SHORT, FLAG_X_SAME, x_coords);
encode_coord (deltas[i].second, flag, FLAG_Y_SHORT, FLAG_Y_SAME, y_coords);
if (i == 0) lastflag = flag + 1; //make lastflag != flag for the first point
encode_flag (flag, repeat, lastflag, flags);
}
unsigned len_before_instrs = 2 * header.numberOfContours + 2;
unsigned len_instrs = instructions_length ();
unsigned total_len = len_before_instrs + flags.length + x_coords.length + y_coords.length;
if (!no_hinting)
total_len += len_instrs;
char *p = (char *) hb_calloc (total_len, sizeof (char));
if (unlikely (!p)) return false;
const char *src = bytes.arrayZ + GlyphHeader::static_size;
char *cur = p;
memcpy (p, src, len_before_instrs);
cur += len_before_instrs;
src += len_before_instrs;
if (!no_hinting)
{
memcpy (cur, src, len_instrs);
cur += len_instrs;
}
memcpy (cur, flags.arrayZ, flags.length);
cur += flags.length;
memcpy (cur, x_coords.arrayZ, x_coords.length);
cur += x_coords.length;
memcpy (cur, y_coords.arrayZ, y_coords.length);
dest_bytes = hb_bytes_t (p, total_len);
return true;
}
};

View file

@ -6,6 +6,9 @@
namespace OT {
struct glyf_accelerator_t;
namespace glyf_impl {
@ -55,6 +58,17 @@ struct SubsetGlyph
return_trace (true);
}
bool compile_bytes_with_deltas (const hb_subset_plan_t *plan,
hb_font_t *font,
const glyf_accelerator_t &glyf)
{ return source_glyph.compile_bytes_with_deltas (plan, font, glyf, dest_start, dest_end); }
void free_compiled_bytes ()
{
dest_start.fini ();
dest_end.fini ();
}
void drop_hints_bytes ()
{ source_glyph.drop_hints_bytes (dest_start, dest_end); }

View file

@ -16,7 +16,7 @@ template<typename IteratorIn, typename IteratorOut,
hb_requires (hb_is_source_of (IteratorIn, unsigned int)),
hb_requires (hb_is_sink_of (IteratorOut, unsigned))>
static void
_write_loca (IteratorIn it, bool short_offsets, IteratorOut dest)
_write_loca (IteratorIn&& it, bool short_offsets, IteratorOut&& dest)
{
unsigned right_shift = short_offsets ? 1 : 0;
unsigned int offset = 0;

View file

@ -24,7 +24,6 @@ namespace OT {
*/
#define HB_OT_TAG_glyf HB_TAG('g','l','y','f')
struct glyf
{
friend struct glyf_accelerator_t;
@ -75,6 +74,9 @@ struct glyf
hb_vector_t<glyf_impl::SubsetGlyph> glyphs;
_populate_subset_glyphs (c->plan, &glyphs);
if (!c->plan->pinned_at_default)
_compile_subset_glyphs_with_deltas (c->plan, &glyphs);
auto padded_offsets =
+ hb_iter (glyphs)
| hb_map (&glyf_impl::SubsetGlyph::padded_size)
@ -93,6 +95,8 @@ struct glyf
}
if (!c->plan->pinned_at_default)
_free_compiled_subset_glyphs (&glyphs);
if (unlikely (c->serializer->in_error ())) return_trace (false);
return_trace (c->serializer->check_success (glyf_impl::_add_loca_and_head (c->plan,
padded_offsets,
@ -102,6 +106,16 @@ struct glyf
void
_populate_subset_glyphs (const hb_subset_plan_t *plan,
hb_vector_t<glyf_impl::SubsetGlyph> *glyphs /* OUT */) const;
void
_compile_subset_glyphs_with_deltas (const hb_subset_plan_t *plan,
hb_vector_t<glyf_impl::SubsetGlyph> *glyphs /* OUT */) const;
void _free_compiled_subset_glyphs (hb_vector_t<glyf_impl::SubsetGlyph> *glyphs) const
{
for (auto _ : *glyphs)
_.free_compiled_bytes ();
}
protected:
UnsizedArrayOf<HBUINT8>
@ -166,7 +180,7 @@ struct glyf_accelerator_t
contour_point_vector_t all_points;
bool phantom_only = !consumer.is_consuming_contour_points ();
if (unlikely (!glyph_for_gid (gid).get_points (font, *this, all_points, phantom_only)))
if (unlikely (!glyph_for_gid (gid).get_points (font, *this, all_points, nullptr, true, phantom_only)))
return false;
if (consumer.is_consuming_contour_points ())
@ -389,6 +403,30 @@ glyf::_populate_subset_glyphs (const hb_subset_plan_t *plan,
;
}
inline void
glyf::_compile_subset_glyphs_with_deltas (const hb_subset_plan_t *plan,
hb_vector_t<glyf_impl::SubsetGlyph> *glyphs /* OUT */) const
{
OT::glyf_accelerator_t glyf (plan->source);
hb_font_t *font = hb_font_create (plan->source);
hb_vector_t<hb_variation_t> vars;
vars.alloc (plan->user_axes_location->get_population ());
for (auto _ : *plan->user_axes_location)
{
hb_variation_t var;
var.tag = _.first;
var.value = _.second;
vars.push (var);
}
hb_font_set_variations (font, vars.arrayZ, plan->user_axes_location->get_population ());
for (auto& subset_glyph : *glyphs)
const_cast<glyf_impl::SubsetGlyph &> (subset_glyph).compile_bytes_with_deltas (plan, font, glyf);
hb_font_destroy (font);
}
} /* namespace OT */

View file

@ -0,0 +1,216 @@
/*
* Copyright © 2022 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Google Author(s): Garret Rieger
*/
#include "graph.hh"
#include "../hb-ot-layout-common.hh"
#ifndef GRAPH_CLASSDEF_GRAPH_HH
#define GRAPH_CLASSDEF_GRAPH_HH
namespace graph {
struct ClassDefFormat1 : public OT::ClassDefFormat1_3<SmallTypes>
{
bool sanitize (graph_t::vertex_t& vertex) const
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
constexpr unsigned min_size = OT::ClassDefFormat1_3<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
return vertex_len >= min_size + classValue.get_size () - classValue.len.get_size ();
}
};
struct ClassDefFormat2 : public OT::ClassDefFormat2_4<SmallTypes>
{
bool sanitize (graph_t::vertex_t& vertex) const
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
constexpr unsigned min_size = OT::ClassDefFormat2_4<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
return vertex_len >= min_size + rangeRecord.get_size () - rangeRecord.len.get_size ();
}
};
struct ClassDef : public OT::ClassDef
{
template<typename It>
static bool add_class_def (gsubgpos_graph_context_t& c,
unsigned parent_id,
unsigned link_position,
It glyph_and_class,
unsigned max_size)
{
unsigned class_def_prime_id = c.graph.new_node (nullptr, nullptr);
auto& class_def_prime_vertex = c.graph.vertices_[class_def_prime_id];
if (!make_class_def (c, glyph_and_class, class_def_prime_id, max_size))
return false;
auto* class_def_link = c.graph.vertices_[parent_id].obj.real_links.push ();
class_def_link->width = SmallTypes::size;
class_def_link->objidx = class_def_prime_id;
class_def_link->position = link_position;
class_def_prime_vertex.parents.push (parent_id);
return true;
}
template<typename It>
static bool make_class_def (gsubgpos_graph_context_t& c,
It glyph_and_class,
unsigned dest_obj,
unsigned max_size)
{
char* buffer = (char*) hb_calloc (1, max_size);
hb_serialize_context_t serializer (buffer, max_size);
OT::ClassDef_serialize (&serializer, glyph_and_class);
serializer.end_serialize ();
if (serializer.in_error ())
{
hb_free (buffer);
return false;
}
hb_bytes_t class_def_copy = serializer.copy_bytes ();
c.add_buffer ((char *) class_def_copy.arrayZ); // Give ownership to the context, it will cleanup the buffer.
auto& obj = c.graph.vertices_[dest_obj].obj;
obj.head = (char *) class_def_copy.arrayZ;
obj.tail = obj.head + class_def_copy.length;
hb_free (buffer);
return true;
}
bool sanitize (graph_t::vertex_t& vertex) const
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < OT::ClassDef::min_size) return false;
switch (u.format)
{
case 1: return ((ClassDefFormat1*)this)->sanitize (vertex);
case 2: return ((ClassDefFormat2*)this)->sanitize (vertex);
#ifndef HB_NO_BORING_EXPANSION
// Not currently supported
case 3:
case 4:
#endif
default: return false;
}
}
};
struct class_def_size_estimator_t
{
template<typename It>
class_def_size_estimator_t (It glyph_and_class)
: gids_consecutive (true), num_ranges_per_class (), glyphs_per_class ()
{
unsigned last_gid = (unsigned) -1;
for (auto p : + glyph_and_class)
{
unsigned gid = p.first;
unsigned klass = p.second;
if (last_gid != (unsigned) -1 && gid != last_gid + 1)
gids_consecutive = false;
last_gid = gid;
hb_set_t* glyphs;
if (glyphs_per_class.has (klass, &glyphs) && glyphs) {
glyphs->add (gid);
continue;
}
hb_set_t new_glyphs;
new_glyphs.add (gid);
glyphs_per_class.set (klass, std::move (new_glyphs));
}
if (in_error ()) return;
for (unsigned klass : glyphs_per_class.keys ())
{
if (!klass) continue; // class 0 doesn't get encoded.
const hb_set_t& glyphs = glyphs_per_class.get (klass);
hb_codepoint_t start = HB_SET_VALUE_INVALID;
hb_codepoint_t end = HB_SET_VALUE_INVALID;
unsigned count = 0;
while (glyphs.next_range (&start, &end))
count++;
num_ranges_per_class.set (klass, count);
}
}
// Incremental increase in the Coverage and ClassDef table size
// (worst case) if all glyphs associated with 'klass' were added.
unsigned incremental_coverage_size (unsigned klass) const
{
// Coverage takes 2 bytes per glyph worst case,
return 2 * glyphs_per_class.get (klass).get_population ();
}
// Incremental increase in the Coverage and ClassDef table size
// (worst case) if all glyphs associated with 'klass' were added.
unsigned incremental_class_def_size (unsigned klass) const
{
// ClassDef takes 6 bytes per range
unsigned class_def_2_size = 6 * num_ranges_per_class.get (klass);
if (gids_consecutive)
{
// ClassDef1 takes 2 bytes per glyph, but only can be used
// when gids are consecutive.
return hb_min (2 * glyphs_per_class.get (klass).get_population (), class_def_2_size);
}
return class_def_2_size;
}
bool in_error ()
{
if (num_ranges_per_class.in_error ()) return true;
if (glyphs_per_class.in_error ()) return true;
for (const hb_set_t& s : glyphs_per_class.values ())
{
if (s.in_error ()) return true;
}
return false;
}
private:
bool gids_consecutive;
hb_hashmap_t<unsigned, unsigned> num_ranges_per_class;
hb_hashmap_t<unsigned, hb_set_t> glyphs_per_class;
};
}
#endif // GRAPH_CLASSDEF_GRAPH_HH

View file

@ -56,6 +56,78 @@ struct CoverageFormat2 : public OT::Layout::Common::CoverageFormat2_4<SmallTypes
struct Coverage : public OT::Layout::Common::Coverage
{
static Coverage* clone_coverage (gsubgpos_graph_context_t& c,
unsigned coverage_id,
unsigned new_parent_id,
unsigned link_position,
unsigned start, unsigned end)
{
unsigned coverage_size = c.graph.vertices_[coverage_id].table_size ();
auto& coverage_v = c.graph.vertices_[coverage_id];
Coverage* coverage_table = (Coverage*) coverage_v.obj.head;
if (!coverage_table || !coverage_table->sanitize (coverage_v))
return nullptr;
auto new_coverage =
+ hb_zip (coverage_table->iter (), hb_range ())
| hb_filter ([&] (hb_pair_t<unsigned, unsigned> p) {
return p.second >= start && p.second < end;
})
| hb_map_retains_sorting (hb_first)
;
return add_coverage (c, new_parent_id, link_position, new_coverage, coverage_size);
}
template<typename It>
static Coverage* add_coverage (gsubgpos_graph_context_t& c,
unsigned parent_id,
unsigned link_position,
It glyphs,
unsigned max_size)
{
unsigned coverage_prime_id = c.graph.new_node (nullptr, nullptr);
auto& coverage_prime_vertex = c.graph.vertices_[coverage_prime_id];
if (!make_coverage (c, glyphs, coverage_prime_id, max_size))
return nullptr;
auto* coverage_link = c.graph.vertices_[parent_id].obj.real_links.push ();
coverage_link->width = SmallTypes::size;
coverage_link->objidx = coverage_prime_id;
coverage_link->position = link_position;
coverage_prime_vertex.parents.push (parent_id);
return (Coverage*) coverage_prime_vertex.obj.head;
}
template<typename It>
static bool make_coverage (gsubgpos_graph_context_t& c,
It glyphs,
unsigned dest_obj,
unsigned max_size)
{
char* buffer = (char*) hb_calloc (1, max_size);
hb_serialize_context_t serializer (buffer, max_size);
OT::Layout::Common::Coverage_serialize (&serializer, glyphs);
serializer.end_serialize ();
if (serializer.in_error ())
{
hb_free (buffer);
return false;
}
hb_bytes_t coverage_copy = serializer.copy_bytes ();
c.add_buffer ((char *) coverage_copy.arrayZ); // Give ownership to the context, it will cleanup the buffer.
auto& obj = c.graph.vertices_[dest_obj].obj;
obj.head = (char *) coverage_copy.arrayZ;
obj.tail = obj.head + coverage_copy.length;
hb_free (buffer);
return true;
}
bool sanitize (graph_t::vertex_t& vertex) const
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;

View file

@ -49,6 +49,51 @@ struct graph_t
unsigned end = 0;
unsigned priority = 0;
void normalize ()
{
obj.real_links.qsort ();
for (auto& l : obj.real_links)
{
for (unsigned i = 0; i < l.width; i++)
{
obj.head[l.position + i] = 0;
}
}
}
bool equals (const vertex_t& other,
const graph_t& graph,
const graph_t& other_graph,
unsigned depth) const
{
if (!(as_bytes () == other.as_bytes ()))
{
DEBUG_MSG (SUBSET_REPACK, nullptr,
"vertex [%lu] bytes != [%lu] bytes, depth = %u",
table_size (),
other.table_size (),
depth);
auto a = as_bytes ();
auto b = other.as_bytes ();
while (a || b)
{
DEBUG_MSG (SUBSET_REPACK, nullptr,
" 0x%x %s 0x%x", *a, (*a == *b) ? "==" : "!=", *b);
a++;
b++;
}
return false;
}
return links_equal (obj.real_links, other.obj.real_links, graph, other_graph, depth);
}
hb_bytes_t as_bytes () const
{
return hb_bytes_t (obj.head, table_size ());
}
friend void swap (vertex_t& a, vertex_t& b)
{
hb_swap (a.obj, b.obj);
@ -60,6 +105,18 @@ struct graph_t
hb_swap (a.priority, b.priority);
}
hb_hashmap_t<unsigned, unsigned>
position_to_index_map () const
{
hb_hashmap_t<unsigned, unsigned> result;
for (const auto& l : obj.real_links) {
result.set (l.position, l.objidx);
}
return result;
}
bool is_shared () const
{
return parents.length > 1;
@ -84,7 +141,7 @@ struct graph_t
{
for (unsigned i = 0; i < obj.real_links.length; i++)
{
auto& link = obj.real_links[i];
auto& link = obj.real_links.arrayZ[i];
if (link.objidx != child_index)
continue;
@ -155,6 +212,57 @@ struct graph_t
return -table_size;
}
private:
bool links_equal (const hb_vector_t<hb_serialize_context_t::object_t::link_t>& this_links,
const hb_vector_t<hb_serialize_context_t::object_t::link_t>& other_links,
const graph_t& graph,
const graph_t& other_graph,
unsigned depth) const
{
auto a = this_links.iter ();
auto b = other_links.iter ();
while (a && b)
{
const auto& link_a = *a;
const auto& link_b = *b;
if (link_a.width != link_b.width ||
link_a.is_signed != link_b.is_signed ||
link_a.whence != link_b.whence ||
link_a.position != link_b.position ||
link_a.bias != link_b.bias)
return false;
if (!graph.vertices_[link_a.objidx].equals (
other_graph.vertices_[link_b.objidx], graph, other_graph, depth + 1))
return false;
a++;
b++;
}
if (bool (a) != bool (b))
return false;
return true;
}
};
template <typename T>
struct vertex_and_table_t
{
vertex_and_table_t () : index (0), vertex (nullptr), table (nullptr)
{}
unsigned index;
vertex_t* vertex;
T* table;
operator bool () {
return table && vertex;
}
};
/*
@ -169,7 +277,8 @@ struct graph_t
: parents_invalid (true),
distance_invalid (true),
positions_invalid (true),
successful (true)
successful (true),
buffers ()
{
num_roots_for_space_.push (1);
bool removed_nil = false;
@ -201,6 +310,20 @@ struct graph_t
~graph_t ()
{
vertices_.fini ();
for (char* b : buffers)
hb_free (b);
}
bool operator== (const graph_t& other) const
{
return root ().equals (other.root (), *this, other, 0);
}
// Sorts links of all objects in a consistent manner and zeroes all offsets.
void normalize ()
{
for (auto& v : vertices_.writer ())
v.normalize ();
}
bool in_error () const
@ -228,6 +351,27 @@ struct graph_t
return vertices_[i].obj;
}
void add_buffer (char* buffer)
{
buffers.push (buffer);
}
/*
* Adds a 16 bit link from parent_id to child_id
*/
template<typename T>
void add_link (T* offset,
unsigned parent_id,
unsigned child_id)
{
auto& v = vertices_[parent_id];
auto* link = v.obj.real_links.push ();
link->width = 2;
link->objidx = child_id;
link->position = (char*) offset - (char*) v.obj.head;
vertices_[child_id].parents.push (parent_id);
}
/*
* Generates a new topological sorting of graph ordered by the shortest
* distance to each node if positions are marked as invalid.
@ -345,13 +489,43 @@ struct graph_t
}
}
unsigned index_for_offset(unsigned node_idx, const void* offset) const
template <typename T, typename ...Ts>
vertex_and_table_t<T> as_table (unsigned parent, const void* offset, Ts... ds)
{
return as_table_from_index<T> (index_for_offset (parent, offset), std::forward<Ts>(ds)...);
}
template <typename T, typename ...Ts>
vertex_and_table_t<T> as_table_from_index (unsigned index, Ts... ds)
{
if (index >= vertices_.length)
return vertex_and_table_t<T> ();
vertex_and_table_t<T> r;
r.vertex = &vertices_[index];
r.table = (T*) r.vertex->obj.head;
r.index = index;
if (!r.table)
return vertex_and_table_t<T> ();
if (!r.table->sanitize (*(r.vertex), std::forward<Ts>(ds)...))
return vertex_and_table_t<T> ();
return r;
}
// Finds the object id of the object pointed to by the offset at 'offset'
// within object[node_idx].
unsigned index_for_offset (unsigned node_idx, const void* offset) const
{
const auto& node = object (node_idx);
if (offset < node.head || offset >= node.tail) return -1;
for (const auto& link : node.real_links)
unsigned length = node.real_links.length;
for (unsigned i = 0; i < length; i++)
{
// Use direct access for increased performance, this is a hot method.
const auto& link = node.real_links.arrayZ[i];
if (offset != node.head + link.position)
continue;
return link.objidx;
@ -360,6 +534,24 @@ struct graph_t
return -1;
}
// Finds the object id of the object pointed to by the offset at 'offset'
// within object[node_idx]. Ensures that the returned object is safe to mutate.
// That is, if the original child object is shared by parents other than node_idx
// it will be duplicated and the duplicate will be returned instead.
unsigned mutable_index_for_offset (unsigned node_idx, const void* offset)
{
unsigned child_idx = index_for_offset (node_idx, offset);
auto& child = vertices_[child_idx];
for (unsigned p : child.parents)
{
if (p != node_idx) {
return duplicate (node_idx, child_idx);
}
}
return child_idx;
}
/*
* Assign unique space numbers to each connected subgraph of 24 bit and/or 32 bit offset(s).
@ -1039,6 +1231,7 @@ struct graph_t
bool positions_invalid;
bool successful;
hb_vector_t<unsigned> num_roots_for_space_;
hb_vector_t<char*> buffers;
};
}

View file

@ -33,8 +33,7 @@ gsubgpos_graph_context_t::gsubgpos_graph_context_t (hb_tag_t table_tag_,
: table_tag (table_tag_),
graph (graph_),
lookup_list_index (0),
lookups (),
buffers ()
lookups ()
{
if (table_tag_ != HB_OT_TAG_GPOS
&& table_tag_ != HB_OT_TAG_GSUB)
@ -53,7 +52,7 @@ unsigned gsubgpos_graph_context_t::create_node (unsigned size)
if (!buffer)
return -1;
buffers.push (buffer);
add_buffer (buffer);
return graph.new_node (buffer, buffer + size);
}

View file

@ -40,22 +40,16 @@ struct gsubgpos_graph_context_t
graph_t& graph;
unsigned lookup_list_index;
hb_hashmap_t<unsigned, graph::Lookup*> lookups;
hb_vector_t<char*> buffers;
HB_INTERNAL gsubgpos_graph_context_t (hb_tag_t table_tag_,
graph_t& graph_);
~gsubgpos_graph_context_t ()
{
for (char* b : buffers)
hb_free (b);
}
HB_INTERNAL unsigned create_node (unsigned size);
void add_buffer (char* buffer)
{
buffers.push (buffer);
graph.add_buffer (buffer);
}
private:

View file

@ -29,6 +29,7 @@
#include "../OT/Layout/GSUB/ExtensionSubst.hh"
#include "gsubgpos-context.hh"
#include "pairpos-graph.hh"
#include "markbasepos-graph.hh"
#ifndef GRAPH_GSUBGPOS_GRAPH_HH
#define GRAPH_GSUBGPOS_GRAPH_HH
@ -121,10 +122,12 @@ struct Lookup : public OT::Lookup
if (c.table_tag != HB_OT_TAG_GPOS)
return true;
if (!is_ext && type != OT::Layout::GPOS_impl::PosLookupSubTable::Type::Pair)
if (!is_ext &&
type != OT::Layout::GPOS_impl::PosLookupSubTable::Type::Pair &&
type != OT::Layout::GPOS_impl::PosLookupSubTable::Type::MarkBase)
return true;
hb_vector_t<unsigned> all_new_subtables;
hb_vector_t<hb_pair_t<unsigned, hb_vector_t<unsigned>>> all_new_subtables;
for (unsigned i = 0; i < subTable.len; i++)
{
unsigned subtable_index = c.graph.index_for_offset (this_index, &subTable[i]);
@ -133,39 +136,66 @@ struct Lookup : public OT::Lookup
ExtensionFormat1<OT::Layout::GSUB_impl::ExtensionSubst>* extension =
(ExtensionFormat1<OT::Layout::GSUB_impl::ExtensionSubst>*)
c.graph.object (ext_subtable_index).head;
if (!extension->sanitize (c.graph.vertices_[ext_subtable_index]))
if (!extension || !extension->sanitize (c.graph.vertices_[ext_subtable_index]))
continue;
subtable_index = extension->get_subtable_index (c.graph, ext_subtable_index);
type = extension->get_lookup_type ();
if (type != OT::Layout::GPOS_impl::PosLookupSubTable::Type::Pair)
if (type != OT::Layout::GPOS_impl::PosLookupSubTable::Type::Pair
&& type != OT::Layout::GPOS_impl::PosLookupSubTable::Type::MarkBase)
continue;
}
PairPos* pairPos = (PairPos*) c.graph.object (subtable_index).head;
if (!pairPos->sanitize (c.graph.vertices_[subtable_index])) continue;
hb_vector_t<unsigned> new_sub_tables = pairPos->split_subtables (c, subtable_index);
hb_vector_t<unsigned> new_sub_tables;
switch (type)
{
case 2:
new_sub_tables = split_subtable<PairPos> (c, subtable_index); break;
case 4:
new_sub_tables = split_subtable<MarkBasePos> (c, subtable_index); break;
default:
break;
}
if (new_sub_tables.in_error ()) return false;
+ new_sub_tables.iter() | hb_sink (all_new_subtables);
if (!new_sub_tables) continue;
hb_pair_t<unsigned, hb_vector_t<unsigned>>* entry = all_new_subtables.push ();
entry->first = i;
entry->second = std::move (new_sub_tables);
}
if (all_new_subtables)
if (all_new_subtables) {
add_sub_tables (c, this_index, type, all_new_subtables);
}
return true;
}
template<typename T>
hb_vector_t<unsigned> split_subtable (gsubgpos_graph_context_t& c,
unsigned objidx)
{
T* sub_table = (T*) c.graph.object (objidx).head;
if (!sub_table || !sub_table->sanitize (c.graph.vertices_[objidx]))
return hb_vector_t<unsigned> ();
return sub_table->split_subtables (c, objidx);
}
void add_sub_tables (gsubgpos_graph_context_t& c,
unsigned this_index,
unsigned type,
hb_vector_t<unsigned>& subtable_indices)
hb_vector_t<hb_pair_t<unsigned, hb_vector_t<unsigned>>>& subtable_ids)
{
bool is_ext = is_extension (c.table_tag);
auto& v = c.graph.vertices_[this_index];
fix_existing_subtable_links (c, this_index, subtable_ids);
unsigned new_subtable_count = 0;
for (const auto& p : subtable_ids)
new_subtable_count += p.second.length;
size_t new_size = v.table_size ()
+ subtable_indices.length * OT::Offset16::static_size;
+ new_subtable_count * OT::Offset16::static_size;
char* buffer = (char*) hb_calloc (1, new_size);
c.add_buffer (buffer);
memcpy (buffer, v.obj.head, v.table_size());
@ -175,30 +205,61 @@ struct Lookup : public OT::Lookup
Lookup* new_lookup = (Lookup*) buffer;
new_lookup->subTable.len = subTable.len + subtable_indices.length;
unsigned offset_index = subTable.len;
for (unsigned subtable_id : subtable_indices)
unsigned shift = 0;
new_lookup->subTable.len = subTable.len + new_subtable_count;
for (const auto& p : subtable_ids)
{
if (is_ext)
{
unsigned ext_id = create_extension_subtable (c, subtable_id, type);
c.graph.vertices_[subtable_id].parents.push (ext_id);
subtable_id = ext_id;
}
unsigned offset_index = p.first + shift + 1;
shift += p.second.length;
auto* link = v.obj.real_links.push ();
link->width = 2;
link->objidx = subtable_id;
link->position = (char*) &new_lookup->subTable[offset_index++] -
(char*) new_lookup;
c.graph.vertices_[subtable_id].parents.push (this_index);
for (unsigned subtable_id : p.second)
{
if (is_ext)
{
unsigned ext_id = create_extension_subtable (c, subtable_id, type);
c.graph.vertices_[subtable_id].parents.push (ext_id);
subtable_id = ext_id;
}
auto* link = v.obj.real_links.push ();
link->width = 2;
link->objidx = subtable_id;
link->position = (char*) &new_lookup->subTable[offset_index++] -
(char*) new_lookup;
c.graph.vertices_[subtable_id].parents.push (this_index);
}
}
// Repacker sort order depends on link order, which we've messed up so resort it.
v.obj.real_links.qsort ();
// The head location of the lookup has changed, invalidating the lookups map entry
// in the context. Update the map.
c.lookups.set (this_index, new_lookup);
}
void fix_existing_subtable_links (gsubgpos_graph_context_t& c,
unsigned this_index,
hb_vector_t<hb_pair_t<unsigned, hb_vector_t<unsigned>>>& subtable_ids)
{
auto& v = c.graph.vertices_[this_index];
Lookup* lookup = (Lookup*) v.obj.head;
unsigned shift = 0;
for (const auto& p : subtable_ids)
{
unsigned insert_index = p.first + shift;
unsigned pos_offset = p.second.length * OT::Offset16::static_size;
unsigned insert_offset = (char*) &lookup->subTable[insert_index] - (char*) lookup;
shift += p.second.length;
for (auto& l : v.obj.all_links_writer ())
{
if (l.position > insert_offset) l.position += pos_offset;
}
}
}
unsigned create_extension_subtable (gsubgpos_graph_context_t& c,
unsigned subtable_index,
unsigned type)
@ -281,7 +342,7 @@ struct GSTAR : public OT::GSUBGPOS
const auto& r = graph.root ();
GSTAR* gstar = (GSTAR*) r.obj.head;
if (!gstar->sanitize (r))
if (!gstar || !gstar->sanitize (r))
return nullptr;
return gstar;
@ -327,17 +388,16 @@ struct GSTAR : public OT::GSUBGPOS
hb_hashmap_t<unsigned, Lookup*>& lookups /* OUT */)
{
unsigned lookup_list_idx = get_lookup_list_index (graph);
const LookupList<Types>* lookupList =
(const LookupList<Types>*) graph.object (lookup_list_idx).head;
if (!lookupList->sanitize (graph.vertices_[lookup_list_idx]))
if (!lookupList || !lookupList->sanitize (graph.vertices_[lookup_list_idx]))
return;
for (unsigned i = 0; i < lookupList->len; i++)
{
unsigned lookup_idx = graph.index_for_offset (lookup_list_idx, &(lookupList->arrayZ[i]));
Lookup* lookup = (Lookup*) graph.object (lookup_idx).head;
if (!lookup->sanitize (graph.vertices_[lookup_idx])) continue;
if (!lookup || !lookup->sanitize (graph.vertices_[lookup_idx])) continue;
lookups.set (lookup_idx, lookup);
}
}

View file

@ -0,0 +1,507 @@
/*
* Copyright © 2022 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Google Author(s): Garret Rieger
*/
#ifndef GRAPH_MARKBASEPOS_GRAPH_HH
#define GRAPH_MARKBASEPOS_GRAPH_HH
#include "split-helpers.hh"
#include "coverage-graph.hh"
#include "../OT/Layout/GPOS/MarkBasePos.hh"
#include "../OT/Layout/GPOS/PosLookupSubTable.hh"
namespace graph {
struct AnchorMatrix : public OT::Layout::GPOS_impl::AnchorMatrix
{
bool sanitize (graph_t::vertex_t& vertex, unsigned class_count) const
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < AnchorMatrix::min_size) return false;
return vertex_len >= AnchorMatrix::min_size +
OT::Offset16::static_size * class_count * this->rows;
}
bool shrink (gsubgpos_graph_context_t& c,
unsigned this_index,
unsigned old_class_count,
unsigned new_class_count)
{
if (new_class_count >= old_class_count) return false;
auto& o = c.graph.vertices_[this_index].obj;
unsigned base_count = rows;
o.tail = o.head +
AnchorMatrix::min_size +
OT::Offset16::static_size * base_count * new_class_count;
// Reposition links into the new indexing scheme.
for (auto& link : o.real_links.writer ())
{
unsigned index = (link.position - 2) / 2;
unsigned base = index / old_class_count;
unsigned klass = index % old_class_count;
if (klass >= new_class_count)
// should have already been removed
return false;
unsigned new_index = base * new_class_count + klass;
link.position = (char*) &(this->matrixZ[new_index]) - (char*) this;
}
return true;
}
unsigned clone (gsubgpos_graph_context_t& c,
unsigned this_index,
unsigned start,
unsigned end,
unsigned class_count)
{
unsigned base_count = rows;
unsigned new_class_count = end - start;
unsigned size = AnchorMatrix::min_size +
OT::Offset16::static_size * new_class_count * rows;
unsigned prime_id = c.create_node (size);
if (prime_id == (unsigned) -1) return -1;
AnchorMatrix* prime = (AnchorMatrix*) c.graph.object (prime_id).head;
prime->rows = base_count;
auto& o = c.graph.vertices_[this_index].obj;
int num_links = o.real_links.length;
for (int i = 0; i < num_links; i++)
{
const auto& link = o.real_links[i];
unsigned old_index = (link.position - 2) / OT::Offset16::static_size;
unsigned klass = old_index % class_count;
if (klass < start || klass >= end) continue;
unsigned base = old_index / class_count;
unsigned new_klass = klass - start;
unsigned new_index = base * new_class_count + new_klass;
unsigned child_idx = link.objidx;
c.graph.add_link (&(prime->matrixZ[new_index]),
prime_id,
child_idx);
auto& child = c.graph.vertices_[child_idx];
child.remove_parent (this_index);
o.real_links.remove (i);
num_links--;
i--;
}
return prime_id;
}
};
struct MarkArray : public OT::Layout::GPOS_impl::MarkArray
{
bool sanitize (graph_t::vertex_t& vertex) const
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
unsigned min_size = MarkArray::min_size;
if (vertex_len < min_size) return false;
return vertex_len >= get_size ();
}
bool shrink (gsubgpos_graph_context_t& c,
const hb_hashmap_t<unsigned, unsigned>& mark_array_links,
unsigned this_index,
unsigned new_class_count)
{
auto& o = c.graph.vertices_[this_index].obj;
for (const auto& link : o.real_links)
c.graph.vertices_[link.objidx].remove_parent (this_index);
o.real_links.reset ();
unsigned new_index = 0;
for (const auto& record : this->iter ())
{
unsigned klass = record.klass;
if (klass >= new_class_count) continue;
(*this)[new_index].klass = klass;
unsigned position = (char*) &record.markAnchor - (char*) this;
unsigned* objidx;
if (!mark_array_links.has (position, &objidx))
{
new_index++;
continue;
}
c.graph.add_link (&(*this)[new_index].markAnchor, this_index, *objidx);
new_index++;
}
this->len = new_index;
o.tail = o.head + MarkArray::min_size +
OT::Layout::GPOS_impl::MarkRecord::static_size * new_index;
return true;
}
unsigned clone (gsubgpos_graph_context_t& c,
unsigned this_index,
const hb_hashmap_t<unsigned, unsigned>& pos_to_index,
hb_set_t& marks,
unsigned start_class)
{
unsigned size = MarkArray::min_size +
OT::Layout::GPOS_impl::MarkRecord::static_size *
marks.get_population ();
unsigned prime_id = c.create_node (size);
if (prime_id == (unsigned) -1) return -1;
MarkArray* prime = (MarkArray*) c.graph.object (prime_id).head;
prime->len = marks.get_population ();
unsigned i = 0;
for (hb_codepoint_t mark : marks)
{
(*prime)[i].klass = (*this)[mark].klass - start_class;
unsigned offset_pos = (char*) &((*this)[mark].markAnchor) - (char*) this;
unsigned* anchor_index;
if (pos_to_index.has (offset_pos, &anchor_index))
c.graph.move_child (this_index,
&((*this)[mark].markAnchor),
prime_id,
&((*prime)[i].markAnchor));
i++;
}
return prime_id;
}
};
struct MarkBasePosFormat1 : public OT::Layout::GPOS_impl::MarkBasePosFormat1_2<SmallTypes>
{
bool sanitize (graph_t::vertex_t& vertex) const
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
return vertex_len >= MarkBasePosFormat1::static_size;
}
hb_vector_t<unsigned> split_subtables (gsubgpos_graph_context_t& c, unsigned this_index)
{
hb_set_t visited;
const unsigned base_coverage_id = c.graph.index_for_offset (this_index, &baseCoverage);
const unsigned base_size =
OT::Layout::GPOS_impl::PairPosFormat1_3<SmallTypes>::min_size +
MarkArray::min_size +
AnchorMatrix::min_size +
c.graph.vertices_[base_coverage_id].table_size ();
hb_vector_t<class_info_t> class_to_info = get_class_info (c, this_index);
unsigned class_count = classCount;
auto base_array = c.graph.as_table<AnchorMatrix> (this_index,
&baseArray,
class_count);
if (!base_array) return hb_vector_t<unsigned> ();
unsigned base_count = base_array.table->rows;
unsigned partial_coverage_size = 4;
unsigned accumulated = base_size;
hb_vector_t<unsigned> split_points;
for (unsigned klass = 0; klass < class_count; klass++)
{
class_info_t& info = class_to_info[klass];
partial_coverage_size += OT::HBUINT16::static_size * info.marks.get_population ();
unsigned accumulated_delta =
OT::Layout::GPOS_impl::MarkRecord::static_size * info.marks.get_population () +
OT::Offset16::static_size * base_count;
for (unsigned objidx : info.child_indices)
accumulated_delta += c.graph.find_subgraph_size (objidx, visited);
accumulated += accumulated_delta;
unsigned total = accumulated + partial_coverage_size;
if (total >= (1 << 16))
{
split_points.push (klass);
accumulated = base_size + accumulated_delta;
partial_coverage_size = 4 + OT::HBUINT16::static_size * info.marks.get_population ();
visited.clear (); // node sharing isn't allowed between splits.
}
}
const unsigned mark_array_id = c.graph.index_for_offset (this_index, &markArray);
split_context_t split_context {
c,
this,
this_index,
std::move (class_to_info),
c.graph.vertices_[mark_array_id].position_to_index_map (),
};
return actuate_subtable_split<split_context_t> (split_context, split_points);
}
private:
struct class_info_t {
hb_set_t marks;
hb_vector_t<unsigned> child_indices;
};
struct split_context_t {
gsubgpos_graph_context_t& c;
MarkBasePosFormat1* thiz;
unsigned this_index;
hb_vector_t<class_info_t> class_to_info;
hb_hashmap_t<unsigned, unsigned> mark_array_links;
hb_set_t marks_for (unsigned start, unsigned end)
{
hb_set_t marks;
for (unsigned klass = start; klass < end; klass++)
{
+ class_to_info[klass].marks.iter ()
| hb_sink (marks)
;
}
return marks;
}
unsigned original_count ()
{
return thiz->classCount;
}
unsigned clone_range (unsigned start, unsigned end)
{
return thiz->clone_range (*this, this->this_index, start, end);
}
bool shrink (unsigned count)
{
return thiz->shrink (*this, this->this_index, count);
}
};
hb_vector_t<class_info_t> get_class_info (gsubgpos_graph_context_t& c,
unsigned this_index)
{
hb_vector_t<class_info_t> class_to_info;
unsigned class_count= classCount;
class_to_info.resize (class_count);
auto mark_array = c.graph.as_table<MarkArray> (this_index, &markArray);
if (!mark_array) return hb_vector_t<class_info_t> ();
unsigned mark_count = mark_array.table->len;
for (unsigned mark = 0; mark < mark_count; mark++)
{
unsigned klass = (*mark_array.table)[mark].get_class ();
class_to_info[klass].marks.add (mark);
}
for (const auto& link : mark_array.vertex->obj.real_links)
{
unsigned mark = (link.position - 2) /
OT::Layout::GPOS_impl::MarkRecord::static_size;
unsigned klass = (*mark_array.table)[mark].get_class ();
class_to_info[klass].child_indices.push (link.objidx);
}
unsigned base_array_id =
c.graph.index_for_offset (this_index, &baseArray);
auto& base_array_v = c.graph.vertices_[base_array_id];
for (const auto& link : base_array_v.obj.real_links)
{
unsigned index = (link.position - 2) / OT::Offset16::static_size;
unsigned klass = index % class_count;
class_to_info[klass].child_indices.push (link.objidx);
}
return class_to_info;
}
bool shrink (split_context_t& sc,
unsigned this_index,
unsigned count)
{
DEBUG_MSG (SUBSET_REPACK, nullptr,
" Shrinking MarkBasePosFormat1 (%u) to [0, %u).",
this_index,
count);
unsigned old_count = classCount;
if (count >= old_count)
return true;
classCount = count;
auto mark_coverage = sc.c.graph.as_table<Coverage> (this_index,
&markCoverage);
if (!mark_coverage) return false;
hb_set_t marks = sc.marks_for (0, count);
auto new_coverage =
+ hb_zip (hb_range (), mark_coverage.table->iter ())
| hb_filter (marks, hb_first)
| hb_map_retains_sorting (hb_second)
;
if (!Coverage::make_coverage (sc.c, + new_coverage,
mark_coverage.index,
4 + 2 * marks.get_population ()))
return false;
auto base_array = sc.c.graph.as_table<AnchorMatrix> (this_index,
&baseArray,
old_count);
if (!base_array || !base_array.table->shrink (sc.c,
base_array.index,
old_count,
count))
return false;
auto mark_array = sc.c.graph.as_table<MarkArray> (this_index,
&markArray);
if (!mark_array || !mark_array.table->shrink (sc.c,
sc.mark_array_links,
mark_array.index,
count))
return false;
return true;
}
// Create a new MarkBasePos that has all of the data for classes from [start, end).
unsigned clone_range (split_context_t& sc,
unsigned this_index,
unsigned start, unsigned end) const
{
DEBUG_MSG (SUBSET_REPACK, nullptr,
" Cloning MarkBasePosFormat1 (%u) range [%u, %u).", this_index, start, end);
graph_t& graph = sc.c.graph;
unsigned prime_size = OT::Layout::GPOS_impl::MarkBasePosFormat1_2<SmallTypes>::static_size;
unsigned prime_id = sc.c.create_node (prime_size);
if (prime_id == (unsigned) -1) return -1;
MarkBasePosFormat1* prime = (MarkBasePosFormat1*) graph.object (prime_id).head;
prime->format = this->format;
unsigned new_class_count = end - start;
prime->classCount = new_class_count;
unsigned base_coverage_id =
graph.index_for_offset (sc.this_index, &baseCoverage);
graph.add_link (&(prime->baseCoverage), prime_id, base_coverage_id);
graph.duplicate (prime_id, base_coverage_id);
auto mark_coverage = sc.c.graph.as_table<Coverage> (this_index,
&markCoverage);
if (!mark_coverage) return false;
hb_set_t marks = sc.marks_for (start, end);
auto new_coverage =
+ hb_zip (hb_range (), mark_coverage.table->iter ())
| hb_filter (marks, hb_first)
| hb_map_retains_sorting (hb_second)
;
if (!Coverage::add_coverage (sc.c,
prime_id,
2,
+ new_coverage,
marks.get_population () * 2 + 4))
return -1;
auto mark_array =
graph.as_table <MarkArray> (sc.this_index, &markArray);
if (!mark_array) return -1;
unsigned new_mark_array =
mark_array.table->clone (sc.c,
mark_array.index,
sc.mark_array_links,
marks,
start);
graph.add_link (&(prime->markArray), prime_id, new_mark_array);
unsigned class_count = classCount;
auto base_array =
graph.as_table<AnchorMatrix> (sc.this_index, &baseArray, class_count);
if (!base_array) return -1;
unsigned new_base_array =
base_array.table->clone (sc.c,
base_array.index,
start, end, this->classCount);
graph.add_link (&(prime->baseArray), prime_id, new_base_array);
return prime_id;
}
};
struct MarkBasePos : public OT::Layout::GPOS_impl::MarkBasePos
{
hb_vector_t<unsigned> split_subtables (gsubgpos_graph_context_t& c,
unsigned this_index)
{
switch (u.format) {
case 1:
return ((MarkBasePosFormat1*)(&u.format1))->split_subtables (c, this_index);
#ifndef HB_NO_BORING_EXPANSION
case 2: HB_FALLTHROUGH;
// Don't split 24bit PairPos's.
#endif
default:
return hb_vector_t<unsigned> ();
}
}
bool sanitize (graph_t::vertex_t& vertex) const
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < u.format.get_size ()) return false;
switch (u.format) {
case 1:
return ((MarkBasePosFormat1*)(&u.format1))->sanitize (vertex);
#ifndef HB_NO_BORING_EXPANSION
case 2: HB_FALLTHROUGH;
#endif
default:
// We don't handle format 3 and 4 here.
return false;
}
}
};
}
#endif // GRAPH_MARKBASEPOS_GRAPH_HH

View file

@ -27,7 +27,9 @@
#ifndef GRAPH_PAIRPOS_GRAPH_HH
#define GRAPH_PAIRPOS_GRAPH_HH
#include "split-helpers.hh"
#include "coverage-graph.hh"
#include "classdef-graph.hh"
#include "../OT/Layout/GPOS/PairPos.hh"
#include "../OT/Layout/GPOS/PosLookupSubTable.hh"
@ -51,68 +53,62 @@ struct PairPosFormat1 : public OT::Layout::GPOS_impl::PairPosFormat1_3<SmallType
const unsigned coverage_id = c.graph.index_for_offset (this_index, &coverage);
const unsigned coverage_size = c.graph.vertices_[coverage_id].table_size ();
const unsigned base_size = OT::Layout::GPOS_impl::PairPosFormat1_3<SmallTypes>::min_size
+ coverage_size;
const unsigned base_size = OT::Layout::GPOS_impl::PairPosFormat1_3<SmallTypes>::min_size;
unsigned partial_coverage_size = 4;
unsigned accumulated = base_size;
hb_vector_t<unsigned> split_points;
for (unsigned i = 0; i < pairSet.len; i++)
{
unsigned pair_set_index = pair_set_graph_index (c, this_index, i);
accumulated += c.graph.find_subgraph_size (pair_set_index, visited);
accumulated += SmallTypes::size; // for PairSet offset.
unsigned accumulated_delta =
c.graph.find_subgraph_size (pair_set_index, visited) +
SmallTypes::size; // for PairSet offset.
partial_coverage_size += OT::HBUINT16::static_size;
// TODO(garretrieger): don't count the size of the largest pairset against the limit, since
// it will be packed last in the order and does not contribute to
// the 64kb limit.
accumulated += accumulated_delta;
unsigned total = accumulated + hb_min (partial_coverage_size, coverage_size);
if (accumulated > (1 << 16))
if (total >= (1 << 16))
{
split_points.push (i);
accumulated = base_size;
visited.clear (); // Pretend node sharing isn't allowed between splits.
accumulated = base_size + accumulated_delta;
partial_coverage_size = 6;
visited.clear (); // node sharing isn't allowed between splits.
}
}
return do_split (c, this_index, split_points);
split_context_t split_context {
c,
this,
this_index,
};
return actuate_subtable_split<split_context_t> (split_context, split_points);
}
private:
// Split this PairPos into two or more PairPos's. split_points defines
// the indices (first index to include in the new table) to split at.
// Returns the object id's of the newly created PairPos subtables.
hb_vector_t<unsigned> do_split (gsubgpos_graph_context_t& c,
unsigned this_index,
const hb_vector_t<unsigned> split_points)
{
hb_vector_t<unsigned> new_objects;
if (!split_points)
return new_objects;
struct split_context_t {
gsubgpos_graph_context_t& c;
PairPosFormat1* thiz;
unsigned this_index;
for (unsigned i = 0; i < split_points.length; i++)
unsigned original_count ()
{
unsigned start = split_points[i];
unsigned end = (i < split_points.length - 1) ? split_points[i + 1] : pairSet.len;
unsigned id = clone_range (c, this_index, start, end);
if (id == (unsigned) -1)
{
new_objects.reset ();
new_objects.allocated = -1; // mark error
return new_objects;
}
new_objects.push (id);
return thiz->pairSet.len;
}
if (!shrink (c, this_index, split_points[0]))
unsigned clone_range (unsigned start, unsigned end)
{
new_objects.reset ();
new_objects.allocated = -1; // mark error
return thiz->clone_range (this->c, this->this_index, start, end);
}
return new_objects;
}
bool shrink (unsigned count)
{
return thiz->shrink (this->c, this->this_index, count);
}
};
bool shrink (gsubgpos_graph_context_t& c,
unsigned this_index,
@ -129,11 +125,12 @@ struct PairPosFormat1 : public OT::Layout::GPOS_impl::PairPosFormat1_3<SmallType
pairSet.len = count;
c.graph.vertices_[this_index].obj.tail -= (old_count - count) * SmallTypes::size;
unsigned coverage_id = c.graph.index_for_offset (this_index, &coverage);
unsigned coverage_id = c.graph.mutable_index_for_offset (this_index, &coverage);
unsigned coverage_size = c.graph.vertices_[coverage_id].table_size ();
auto& coverage_v = c.graph.vertices_[coverage_id];
Coverage* coverage_table = (Coverage*) coverage_v.obj.head;
if (!coverage_table->sanitize (coverage_v))
if (!coverage_table || !coverage_table->sanitize (coverage_v))
return false;
auto new_coverage =
@ -144,7 +141,7 @@ struct PairPosFormat1 : public OT::Layout::GPOS_impl::PairPosFormat1_3<SmallType
| hb_map_retains_sorting (hb_first)
;
return make_coverage (c, new_coverage, coverage_id, coverage_size);
return Coverage::make_coverage (c, new_coverage, coverage_id, coverage_size);
}
// Create a new PairPos including PairSet's from start (inclusive) to end (exclusive).
@ -178,60 +175,17 @@ struct PairPosFormat1 : public OT::Layout::GPOS_impl::PairPosFormat1_3<SmallType
}
unsigned coverage_id = c.graph.index_for_offset (this_index, &coverage);
unsigned coverage_size = c.graph.vertices_[coverage_id].table_size ();
auto& coverage_v = c.graph.vertices_[coverage_id];
Coverage* coverage_table = (Coverage*) coverage_v.obj.head;
if (!coverage_table->sanitize (coverage_v))
return false;
auto new_coverage =
+ hb_zip (coverage_table->iter (), hb_range ())
| hb_filter ([&] (hb_pair_t<unsigned, unsigned> p) {
return p.second >= start && p.second < end;
})
| hb_map_retains_sorting (hb_first)
;
unsigned coverage_prime_id = c.graph.new_node (nullptr, nullptr);
auto& coverage_prime_vertex = c.graph.vertices_[coverage_prime_id];
if (!make_coverage (c, new_coverage, coverage_prime_id, coverage_size))
if (!Coverage::clone_coverage (c,
coverage_id,
pair_pos_prime_id,
2,
start, end))
return -1;
auto* coverage_link = c.graph.vertices_[pair_pos_prime_id].obj.real_links.push ();
coverage_link->width = SmallTypes::size;
coverage_link->objidx = coverage_prime_id;
coverage_link->position = 2;
coverage_prime_vertex.parents.push (pair_pos_prime_id);
return pair_pos_prime_id;
}
template<typename It>
bool make_coverage (gsubgpos_graph_context_t& c,
It glyphs,
unsigned dest_obj,
unsigned max_size) const
{
char* buffer = (char*) hb_calloc (1, max_size);
hb_serialize_context_t serializer (buffer, max_size);
Coverage_serialize (&serializer, glyphs);
serializer.end_serialize ();
if (serializer.in_error ())
{
hb_free (buffer);
return false;
}
hb_bytes_t coverage_copy = serializer.copy_bytes ();
c.add_buffer ((char *) coverage_copy.arrayZ); // Give ownership to the context, it will cleanup the buffer.
auto& obj = c.graph.vertices_[dest_obj].obj;
obj.head = (char *) coverage_copy.arrayZ;
obj.tail = obj.head + coverage_copy.length;
hb_free (buffer);
return true;
}
unsigned pair_set_graph_index (gsubgpos_graph_context_t& c, unsigned this_index, unsigned i) const
{
@ -243,14 +197,409 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
{
bool sanitize (graph_t::vertex_t& vertex) const
{
// TODO(garretrieger): implement me!
return true;
size_t vertex_len = vertex.table_size ();
unsigned min_size = OT::Layout::GPOS_impl::PairPosFormat2_4<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
const unsigned class1_count = class1Count;
return vertex_len >=
min_size + class1_count * get_class1_record_size ();
}
hb_vector_t<unsigned> split_subtables (gsubgpos_graph_context_t& c, unsigned this_index)
{
// TODO(garretrieger): implement me!
return hb_vector_t<unsigned> ();
const unsigned base_size = OT::Layout::GPOS_impl::PairPosFormat2_4<SmallTypes>::min_size;
const unsigned class_def_2_size = size_of (c, this_index, &classDef2);
const Coverage* coverage = get_coverage (c, this_index);
const ClassDef* class_def_1 = get_class_def_1 (c, this_index);
auto gid_and_class =
+ coverage->iter ()
| hb_map_retains_sorting ([&] (hb_codepoint_t gid) {
return hb_pair_t<hb_codepoint_t, hb_codepoint_t> (gid, class_def_1->get_class (gid));
})
;
class_def_size_estimator_t estimator (gid_and_class);
const unsigned class1_count = class1Count;
const unsigned class2_count = class2Count;
const unsigned class1_record_size = get_class1_record_size ();
const unsigned value_1_len = valueFormat1.get_len ();
const unsigned value_2_len = valueFormat2.get_len ();
const unsigned total_value_len = value_1_len + value_2_len;
unsigned accumulated = base_size;
unsigned coverage_size = 4;
unsigned class_def_1_size = 4;
unsigned max_coverage_size = coverage_size;
unsigned max_class_def_1_size = class_def_1_size;
hb_vector_t<unsigned> split_points;
hb_hashmap_t<unsigned, unsigned> device_tables = get_all_device_tables (c, this_index);
hb_vector_t<unsigned> format1_device_table_indices = valueFormat1.get_device_table_indices ();
hb_vector_t<unsigned> format2_device_table_indices = valueFormat2.get_device_table_indices ();
bool has_device_tables = bool(format1_device_table_indices) || bool(format2_device_table_indices);
hb_set_t visited;
for (unsigned i = 0; i < class1_count; i++)
{
unsigned accumulated_delta = class1_record_size;
coverage_size += estimator.incremental_coverage_size (i);
class_def_1_size += estimator.incremental_class_def_size (i);
max_coverage_size = hb_max (max_coverage_size, coverage_size);
max_class_def_1_size = hb_max (max_class_def_1_size, class_def_1_size);
if (has_device_tables) {
for (unsigned j = 0; j < class2_count; j++)
{
unsigned value1_index = total_value_len * (class2_count * i + j);
unsigned value2_index = value1_index + value_1_len;
accumulated_delta += size_of_value_record_children (c,
device_tables,
format1_device_table_indices,
value1_index,
visited);
accumulated_delta += size_of_value_record_children (c,
device_tables,
format2_device_table_indices,
value2_index,
visited);
}
}
accumulated += accumulated_delta;
unsigned total = accumulated
+ coverage_size + class_def_1_size + class_def_2_size
// The largest object will pack last and can exceed the size limit.
- hb_max (hb_max (coverage_size, class_def_1_size), class_def_2_size);
if (total >= (1 << 16))
{
split_points.push (i);
// split does not include i, so add the size for i when we reset the size counters.
accumulated = base_size + accumulated_delta;
coverage_size = 4 + estimator.incremental_coverage_size (i);
class_def_1_size = 4 + estimator.incremental_class_def_size (i);
visited.clear (); // node sharing isn't allowed between splits.
}
}
split_context_t split_context {
c,
this,
this_index,
class1_record_size,
total_value_len,
value_1_len,
value_2_len,
max_coverage_size,
max_class_def_1_size,
device_tables,
format1_device_table_indices,
format2_device_table_indices
};
return actuate_subtable_split<split_context_t> (split_context, split_points);
}
private:
struct split_context_t
{
gsubgpos_graph_context_t& c;
PairPosFormat2* thiz;
unsigned this_index;
unsigned class1_record_size;
unsigned value_record_len;
unsigned value1_record_len;
unsigned value2_record_len;
unsigned max_coverage_size;
unsigned max_class_def_size;
const hb_hashmap_t<unsigned, unsigned>& device_tables;
const hb_vector_t<unsigned>& format1_device_table_indices;
const hb_vector_t<unsigned>& format2_device_table_indices;
unsigned original_count ()
{
return thiz->class1Count;
}
unsigned clone_range (unsigned start, unsigned end)
{
return thiz->clone_range (*this, start, end);
}
bool shrink (unsigned count)
{
return thiz->shrink (*this, count);
}
};
size_t get_class1_record_size () const
{
const size_t class2_count = class2Count;
return
class2_count * (valueFormat1.get_size () + valueFormat2.get_size ());
}
unsigned clone_range (split_context_t& split_context,
unsigned start, unsigned end) const
{
DEBUG_MSG (SUBSET_REPACK, nullptr,
" Cloning PairPosFormat2 (%u) range [%u, %u).", split_context.this_index, start, end);
graph_t& graph = split_context.c.graph;
unsigned num_records = end - start;
unsigned prime_size = OT::Layout::GPOS_impl::PairPosFormat2_4<SmallTypes>::min_size
+ num_records * split_context.class1_record_size;
unsigned pair_pos_prime_id = split_context.c.create_node (prime_size);
if (pair_pos_prime_id == (unsigned) -1) return -1;
PairPosFormat2* pair_pos_prime =
(PairPosFormat2*) graph.object (pair_pos_prime_id).head;
pair_pos_prime->format = this->format;
pair_pos_prime->valueFormat1 = this->valueFormat1;
pair_pos_prime->valueFormat2 = this->valueFormat2;
pair_pos_prime->class1Count = num_records;
pair_pos_prime->class2Count = this->class2Count;
clone_class1_records (split_context,
pair_pos_prime_id,
start,
end);
unsigned coverage_id =
graph.index_for_offset (split_context.this_index, &coverage);
unsigned class_def_1_id =
graph.index_for_offset (split_context.this_index, &classDef1);
auto& coverage_v = graph.vertices_[coverage_id];
auto& class_def_1_v = graph.vertices_[class_def_1_id];
Coverage* coverage_table = (Coverage*) coverage_v.obj.head;
ClassDef* class_def_1_table = (ClassDef*) class_def_1_v.obj.head;
if (!coverage_table
|| !coverage_table->sanitize (coverage_v)
|| !class_def_1_table
|| !class_def_1_table->sanitize (class_def_1_v))
return -1;
auto klass_map =
+ coverage_table->iter ()
| hb_map_retains_sorting ([&] (hb_codepoint_t gid) {
return hb_pair_t<hb_codepoint_t, hb_codepoint_t> (gid, class_def_1_table->get_class (gid));
})
| hb_filter ([&] (hb_codepoint_t klass) {
return klass >= start && klass < end;
}, hb_second)
| hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, hb_codepoint_t> gid_and_class) {
// Classes must be from 0...N so subtract start
return hb_pair_t<hb_codepoint_t, hb_codepoint_t> (gid_and_class.first, gid_and_class.second - start);
})
;
if (!Coverage::add_coverage (split_context.c,
pair_pos_prime_id,
2,
+ klass_map | hb_map_retains_sorting (hb_first),
split_context.max_coverage_size))
return -1;
// classDef1
if (!ClassDef::add_class_def (split_context.c,
pair_pos_prime_id,
8,
+ klass_map,
split_context.max_class_def_size))
return -1;
// classDef2
unsigned class_def_2_id =
graph.index_for_offset (split_context.this_index, &classDef2);
auto* class_def_link = graph.vertices_[pair_pos_prime_id].obj.real_links.push ();
class_def_link->width = SmallTypes::size;
class_def_link->objidx = class_def_2_id;
class_def_link->position = 10;
graph.vertices_[class_def_2_id].parents.push (pair_pos_prime_id);
graph.duplicate (pair_pos_prime_id, class_def_2_id);
return pair_pos_prime_id;
}
void clone_class1_records (split_context_t& split_context,
unsigned pair_pos_prime_id,
unsigned start, unsigned end) const
{
PairPosFormat2* pair_pos_prime =
(PairPosFormat2*) split_context.c.graph.object (pair_pos_prime_id).head;
char* start_addr = ((char*)&values[0]) + start * split_context.class1_record_size;
unsigned num_records = end - start;
memcpy (&pair_pos_prime->values[0],
start_addr,
num_records * split_context.class1_record_size);
if (!split_context.format1_device_table_indices
&& !split_context.format2_device_table_indices)
// No device tables to move over.
return;
unsigned class2_count = class2Count;
for (unsigned i = start; i < end; i++)
{
for (unsigned j = 0; j < class2_count; j++)
{
unsigned value1_index = split_context.value_record_len * (class2_count * i + j);
unsigned value2_index = value1_index + split_context.value1_record_len;
unsigned new_value1_index = split_context.value_record_len * (class2_count * (i - start) + j);
unsigned new_value2_index = new_value1_index + split_context.value1_record_len;
transfer_device_tables (split_context,
pair_pos_prime_id,
split_context.format1_device_table_indices,
value1_index,
new_value1_index);
transfer_device_tables (split_context,
pair_pos_prime_id,
split_context.format2_device_table_indices,
value2_index,
new_value2_index);
}
}
}
void transfer_device_tables (split_context_t& split_context,
unsigned pair_pos_prime_id,
const hb_vector_t<unsigned>& device_table_indices,
unsigned old_value_record_index,
unsigned new_value_record_index) const
{
PairPosFormat2* pair_pos_prime =
(PairPosFormat2*) split_context.c.graph.object (pair_pos_prime_id).head;
for (unsigned i : device_table_indices)
{
OT::Offset16* record = (OT::Offset16*) &values[old_value_record_index + i];
unsigned record_position = ((char*) record) - ((char*) this);
if (!split_context.device_tables.has (record_position)) continue;
split_context.c.graph.move_child (
split_context.this_index,
record,
pair_pos_prime_id,
(OT::Offset16*) &pair_pos_prime->values[new_value_record_index + i]);
}
}
bool shrink (split_context_t& split_context,
unsigned count)
{
DEBUG_MSG (SUBSET_REPACK, nullptr,
" Shrinking PairPosFormat2 (%u) to [0, %u).",
split_context.this_index,
count);
unsigned old_count = class1Count;
if (count >= old_count)
return true;
graph_t& graph = split_context.c.graph;
class1Count = count;
graph.vertices_[split_context.this_index].obj.tail -=
(old_count - count) * split_context.class1_record_size;
unsigned coverage_id =
graph.mutable_index_for_offset (split_context.this_index, &coverage);
unsigned class_def_1_id =
graph.mutable_index_for_offset (split_context.this_index, &classDef1);
auto& coverage_v = graph.vertices_[coverage_id];
auto& class_def_1_v = graph.vertices_[class_def_1_id];
Coverage* coverage_table = (Coverage*) coverage_v.obj.head;
ClassDef* class_def_1_table = (ClassDef*) class_def_1_v.obj.head;
if (!coverage_table
|| !coverage_table->sanitize (coverage_v)
|| !class_def_1_table
|| !class_def_1_table->sanitize (class_def_1_v))
return false;
auto klass_map =
+ coverage_table->iter ()
| hb_map_retains_sorting ([&] (hb_codepoint_t gid) {
return hb_pair_t<hb_codepoint_t, hb_codepoint_t> (gid, class_def_1_table->get_class (gid));
})
| hb_filter ([&] (hb_codepoint_t klass) {
return klass < count;
}, hb_second)
;
if (!Coverage::make_coverage (split_context.c,
+ klass_map | hb_map_retains_sorting (hb_first),
coverage_id,
coverage_v.table_size ()))
return false;
return ClassDef::make_class_def (split_context.c,
+ klass_map,
class_def_1_id,
class_def_1_v.table_size ());
}
hb_hashmap_t<unsigned, unsigned>
get_all_device_tables (gsubgpos_graph_context_t& c,
unsigned this_index) const
{
const auto& v = c.graph.vertices_[this_index];
return v.position_to_index_map ();
}
const Coverage* get_coverage (gsubgpos_graph_context_t& c,
unsigned this_index) const
{
unsigned coverage_id = c.graph.index_for_offset (this_index, &coverage);
auto& coverage_v = c.graph.vertices_[coverage_id];
Coverage* coverage_table = (Coverage*) coverage_v.obj.head;
if (!coverage_table || !coverage_table->sanitize (coverage_v))
return &Null(Coverage);
return coverage_table;
}
const ClassDef* get_class_def_1 (gsubgpos_graph_context_t& c,
unsigned this_index) const
{
unsigned class_def_1_id = c.graph.index_for_offset (this_index, &classDef1);
auto& class_def_1_v = c.graph.vertices_[class_def_1_id];
ClassDef* class_def_1_table = (ClassDef*) class_def_1_v.obj.head;
if (!class_def_1_table || !class_def_1_table->sanitize (class_def_1_v))
return &Null(ClassDef);
return class_def_1_table;
}
unsigned size_of_value_record_children (gsubgpos_graph_context_t& c,
const hb_hashmap_t<unsigned, unsigned>& device_tables,
const hb_vector_t<unsigned> device_table_indices,
unsigned value_record_index,
hb_set_t& visited)
{
unsigned size = 0;
for (unsigned i : device_table_indices)
{
OT::Layout::GPOS_impl::Value* record = &values[value_record_index + i];
unsigned record_position = ((char*) record) - ((char*) this);
unsigned* obj_idx;
if (!device_tables.has (record_position, &obj_idx)) continue;
size += c.graph.find_subgraph_size (*obj_idx, visited);
}
return size;
}
unsigned size_of (gsubgpos_graph_context_t& c,
unsigned this_index,
const void* offset) const
{
const unsigned id = c.graph.index_for_offset (this_index, offset);
return c.graph.vertices_[id].table_size ();
}
};

View file

@ -0,0 +1,69 @@
/*
* Copyright © 2022 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Google Author(s): Garret Rieger
*/
#ifndef GRAPH_SPLIT_HELPERS_HH
#define GRAPH_SPLIT_HELPERS_HH
namespace graph {
template<typename Context>
HB_INTERNAL
hb_vector_t<unsigned> actuate_subtable_split (Context& split_context,
const hb_vector_t<unsigned>& split_points)
{
hb_vector_t<unsigned> new_objects;
if (!split_points)
return new_objects;
for (unsigned i = 0; i < split_points.length; i++)
{
unsigned start = split_points[i];
unsigned end = (i < split_points.length - 1)
? split_points[i + 1]
: split_context.original_count ();
unsigned id = split_context.clone_range (start, end);
if (id == (unsigned) -1)
{
new_objects.reset ();
new_objects.allocated = -1; // mark error
return new_objects;
}
new_objects.push (id);
}
if (!split_context.shrink (split_points[0]))
{
new_objects.reset ();
new_objects.allocated = -1; // mark error
}
return new_objects;
}
}
#endif // GRAPH_SPLIT_HELPERS_HH

View file

@ -0,0 +1,119 @@
/*
* Copyright © 2022 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Google Author(s): Garret Rieger
*/
#include "gsubgpos-context.hh"
#include "classdef-graph.hh"
typedef hb_pair_t<hb_codepoint_t, hb_codepoint_t> gid_and_class_t;
typedef hb_vector_t<gid_and_class_t> gid_and_class_list_t;
static bool incremental_size_is (const gid_and_class_list_t& list, unsigned klass,
unsigned cov_expected, unsigned class_def_expected)
{
graph::class_def_size_estimator_t estimator (list.iter ());
unsigned result = estimator.incremental_coverage_size (klass);
if (result != cov_expected)
{
printf ("FAIL: coverage expected size %u but was %u\n", cov_expected, result);
return false;
}
result = estimator.incremental_class_def_size (klass);
if (result != class_def_expected)
{
printf ("FAIL: class def expected size %u but was %u\n", class_def_expected, result);
return false;
}
return true;
}
static void test_class_and_coverage_size_estimates ()
{
gid_and_class_list_t empty = {
};
assert (incremental_size_is (empty, 0, 0, 0));
assert (incremental_size_is (empty, 1, 0, 0));
gid_and_class_list_t class_zero = {
{5, 0},
};
assert (incremental_size_is (class_zero, 0, 2, 0));
gid_and_class_list_t consecutive = {
{4, 0},
{5, 0},
{6, 1},
{7, 1},
{8, 2},
{9, 2},
{10, 2},
{11, 2},
};
assert (incremental_size_is (consecutive, 0, 4, 0));
assert (incremental_size_is (consecutive, 1, 4, 4));
assert (incremental_size_is (consecutive, 2, 8, 6));
gid_and_class_list_t non_consecutive = {
{4, 0},
{5, 0},
{6, 1},
{7, 1},
{9, 2},
{10, 2},
{11, 2},
{12, 2},
};
assert (incremental_size_is (non_consecutive, 0, 4, 0));
assert (incremental_size_is (non_consecutive, 1, 4, 6));
assert (incremental_size_is (non_consecutive, 2, 8, 6));
gid_and_class_list_t multiple_ranges = {
{4, 0},
{5, 0},
{6, 1},
{7, 1},
{9, 1},
{11, 1},
{12, 1},
{13, 1},
};
assert (incremental_size_is (multiple_ranges, 0, 4, 0));
assert (incremental_size_is (multiple_ranges, 1, 2 * 6, 3 * 6));
}
int
main (int argc, char **argv)
{
test_class_and_coverage_size_estimates ();
}

View file

@ -342,7 +342,7 @@ struct hb_sorted_array_t :
unsigned int i;
return bfind (x, &i) ? &this->arrayZ[i] : not_found;
}
template <typename T, typename ...Ts>
template <typename T>
const Type *bsearch (const T &x, const Type *not_found = nullptr) const
{
unsigned int i;
@ -389,7 +389,7 @@ struct hb_sorted_array_t :
this->length,
sizeof (Type),
_hb_cmp_method<T, Type, Ts...>,
ds...);
std::forward<Ts> (ds)...);
}
};
template <typename T> inline hb_sorted_array_t<T>

View file

@ -159,10 +159,13 @@ struct hb_atomic_int_t
hb_atomic_int_t () = default;
constexpr hb_atomic_int_t (int v) : v (v) {}
hb_atomic_int_t& operator = (int v_) { set_relaxed (v_); return *this; }
operator int () const { return get_relaxed (); }
void set_relaxed (int v_) { hb_atomic_int_impl_set_relaxed (&v, v_); }
void set (int v_) { hb_atomic_int_impl_set (&v, v_); }
void set_release (int v_) { hb_atomic_int_impl_set (&v, v_); }
int get_relaxed () const { return hb_atomic_int_impl_get_relaxed (&v); }
int get () const { return hb_atomic_int_impl_get (&v); }
int get_acquire () const { return hb_atomic_int_impl_get (&v); }
int inc () { return hb_atomic_int_impl_add (&v, 1); }
int dec () { return hb_atomic_int_impl_add (&v, -1); }
@ -180,11 +183,11 @@ struct hb_atomic_ptr_t
void init (T* v_ = nullptr) { set_relaxed (v_); }
void set_relaxed (T* v_) { hb_atomic_ptr_impl_set_relaxed (&v, v_); }
T *get_relaxed () const { return (T *) hb_atomic_ptr_impl_get_relaxed (&v); }
T *get () const { return (T *) hb_atomic_ptr_impl_get ((void **) &v); }
T *get_acquire () const { return (T *) hb_atomic_ptr_impl_get ((void **) &v); }
bool cmpexch (const T *old, T *new_) const { return hb_atomic_ptr_impl_cmpexch ((void **) &v, (void *) old, (void *) new_); }
T * operator -> () const { return get (); }
template <typename C> operator C * () const { return get (); }
T * operator -> () const { return get_acquire (); }
template <typename C> operator C * () const { return get_acquire (); }
T *v = nullptr;
};

View file

@ -56,7 +56,7 @@ struct hb_bit_set_t
{
successful = true;
population = 0;
last_page_lookup.set_relaxed (0);
last_page_lookup = 0;
page_map.init ();
pages.init ();
}
@ -614,7 +614,7 @@ struct hb_bit_set_t
const auto* page_map_array = page_map.arrayZ;
unsigned int major = get_major (*codepoint);
unsigned int i = last_page_lookup.get_relaxed ();
unsigned int i = last_page_lookup;
if (unlikely (i >= page_map.length || page_map_array[i].major != major))
{
@ -632,7 +632,7 @@ struct hb_bit_set_t
if (pages_array[current.index].next (codepoint))
{
*codepoint += current.major * page_t::PAGE_BITS;
last_page_lookup.set_relaxed (i);
last_page_lookup = i;
return true;
}
i++;
@ -645,11 +645,11 @@ struct hb_bit_set_t
if (m != INVALID)
{
*codepoint = current.major * page_t::PAGE_BITS + m;
last_page_lookup.set_relaxed (i);
last_page_lookup = i;
return true;
}
}
last_page_lookup.set_relaxed (0);
last_page_lookup = 0;
*codepoint = INVALID;
return false;
}
@ -732,7 +732,7 @@ struct hb_bit_set_t
{
const auto* page_map_array = page_map.arrayZ;
unsigned int major = get_major (codepoint);
unsigned int i = last_page_lookup.get_relaxed ();
unsigned int i = last_page_lookup;
if (unlikely (i >= page_map.length || page_map_array[i].major != major))
{
page_map.bfind (major, &i, HB_NOT_FOUND_STORE_CLOSEST);
@ -773,7 +773,7 @@ struct hb_bit_set_t
{
const auto* page_map_array = page_map.arrayZ;
unsigned int major = get_major (codepoint);
unsigned int i = last_page_lookup.get_relaxed ();
unsigned int i = last_page_lookup;
if (unlikely (i >= page_map.length || page_map_array[i].major != major))
{
page_map.bfind(major, &i, HB_NOT_FOUND_STORE_CLOSEST);
@ -900,7 +900,7 @@ struct hb_bit_set_t
/* The extra page_map length is necessary; can't just rely on vector here,
* since the next check would be tricked because a null page also has
* major==0, which we can't distinguish from an actualy major==0 page... */
unsigned i = last_page_lookup.get_relaxed ();
unsigned i = last_page_lookup;
if (likely (i < page_map.length))
{
auto &cached_page = page_map.arrayZ[i];
@ -924,7 +924,7 @@ struct hb_bit_set_t
page_map[i] = map;
}
last_page_lookup.set_relaxed (i);
last_page_lookup = i;
return &pages[page_map[i].index];
}
const page_t *page_for (hb_codepoint_t g) const
@ -934,7 +934,7 @@ struct hb_bit_set_t
/* The extra page_map length is necessary; can't just rely on vector here,
* since the next check would be tricked because a null page also has
* major==0, which we can't distinguish from an actualy major==0 page... */
unsigned i = last_page_lookup.get_relaxed ();
unsigned i = last_page_lookup;
if (likely (i < page_map.length))
{
auto &cached_page = page_map.arrayZ[i];
@ -946,7 +946,7 @@ struct hb_bit_set_t
if (!page_map.bfind (key, &i))
return nullptr;
last_page_lookup.set_relaxed (i);
last_page_lookup = i;
return &pages[page_map[i].index];
}
page_t &page_at (unsigned int i) { return pages[page_map[i].index]; }

View file

@ -32,7 +32,7 @@
#include "hb.hh"
#line 33 "hb-buffer-deserialize-json.hh"
#line 36 "hb-buffer-deserialize-json.hh"
static const unsigned char _deserialize_json_trans_keys[] = {
0u, 0u, 9u, 123u, 9u, 34u, 97u, 117u, 120u, 121u, 34u, 34u, 9u, 58u, 9u, 57u,
48u, 57u, 9u, 125u, 9u, 125u, 9u, 125u, 34u, 34u, 9u, 58u, 9u, 57u, 48u, 57u,
@ -557,12 +557,12 @@ _hb_buffer_deserialize_json (hb_buffer_t *buffer,
hb_glyph_info_t info = {0};
hb_glyph_position_t pos = {0};
#line 554 "hb-buffer-deserialize-json.hh"
#line 561 "hb-buffer-deserialize-json.hh"
{
cs = deserialize_json_start;
}
#line 557 "hb-buffer-deserialize-json.hh"
#line 566 "hb-buffer-deserialize-json.hh"
{
int _slen;
int _trans;
@ -774,7 +774,7 @@ _resume:
*end_ptr = p;
}
break;
#line 735 "hb-buffer-deserialize-json.hh"
#line 778 "hb-buffer-deserialize-json.hh"
}
_again:

View file

@ -32,7 +32,7 @@
#include "hb.hh"
#line 33 "hb-buffer-deserialize-text.hh"
#line 36 "hb-buffer-deserialize-text.hh"
static const unsigned char _deserialize_text_trans_keys[] = {
0u, 0u, 9u, 91u, 85u, 85u, 43u, 43u, 48u, 102u, 9u, 85u, 48u, 57u, 45u, 57u,
48u, 57u, 48u, 57u, 48u, 57u, 45u, 57u, 48u, 57u, 44u, 44u, 45u, 57u, 48u, 57u,
@ -509,12 +509,12 @@ _hb_buffer_deserialize_text (hb_buffer_t *buffer,
hb_glyph_info_t info = {0};
hb_glyph_position_t pos = {0};
#line 506 "hb-buffer-deserialize-text.hh"
#line 513 "hb-buffer-deserialize-text.hh"
{
cs = deserialize_text_start;
}
#line 509 "hb-buffer-deserialize-text.hh"
#line 518 "hb-buffer-deserialize-text.hh"
{
int _slen;
int _trans;
@ -894,7 +894,7 @@ _resume:
*end_ptr = p;
}
break;
#line 826 "hb-buffer-deserialize-text.hh"
#line 898 "hb-buffer-deserialize-text.hh"
}
_again:
@ -1043,7 +1043,7 @@ _again:
*end_ptr = p;
}
break;
#line 953 "hb-buffer-deserialize-text.hh"
#line 1047 "hb-buffer-deserialize-text.hh"
}
}

View file

@ -145,7 +145,7 @@ typedef struct hb_glyph_info_t {
* @HB_GLYPH_FLAG_SAFE_TO_INSERT_TATWEEL: In scripts that use elongation (Arabic,
Mongolian, Syriac, etc.), this flag signifies
that it is safe to insert a U+0640 TATWEEL
character *before* this cluster for elongation.
character before this cluster for elongation.
This flag does not determine the
script-specific elongation places, but only
when it is safe to do the elongation without

View file

@ -32,12 +32,21 @@
/* Implements a lockfree cache for int->int functions. */
template <unsigned int key_bits=16, unsigned int value_bits=8 + 32 - key_bits, unsigned int cache_bits=8>
template <unsigned int key_bits=16,
unsigned int value_bits=8 + 32 - key_bits,
unsigned int cache_bits=8,
bool thread_safe=true>
struct hb_cache_t
{
using item_t = typename std::conditional<thread_safe,
hb_atomic_int_t,
typename std::conditional<key_bits + value_bits - cache_bits <= 16,
short,
int>::type
>::type;
static_assert ((key_bits >= cache_bits), "");
static_assert ((key_bits + value_bits - cache_bits <= 8 * sizeof (hb_atomic_int_t)), "");
static_assert (sizeof (hb_atomic_int_t) == sizeof (unsigned int), "");
static_assert ((key_bits + value_bits - cache_bits <= 8 * sizeof (item_t)), "");
void init () { clear (); }
void fini () {}
@ -45,14 +54,14 @@ struct hb_cache_t
void clear ()
{
for (unsigned i = 0; i < ARRAY_LENGTH (values); i++)
values[i].set_relaxed (-1);
values[i] = -1;
}
bool get (unsigned int key, unsigned int *value) const
{
unsigned int k = key & ((1u<<cache_bits)-1);
unsigned int v = values[k].get_relaxed ();
if ((key_bits + value_bits - cache_bits == 8 * sizeof (hb_atomic_int_t) && v == (unsigned int) -1) ||
unsigned int v = values[k];
if ((key_bits + value_bits - cache_bits == 8 * sizeof (item_t) && v == (unsigned int) -1) ||
(v >> value_bits) != (key >> cache_bits))
return false;
*value = v & ((1u<<value_bits)-1);
@ -65,16 +74,13 @@ struct hb_cache_t
return false; /* Overflows */
unsigned int k = key & ((1u<<cache_bits)-1);
unsigned int v = ((key>>cache_bits)<<value_bits) | value;
values[k].set_relaxed (v);
values[k] = v;
return true;
}
private:
hb_atomic_int_t values[1u<<cache_bits];
item_t values[1u<<cache_bits];
};
typedef hb_cache_t<21, 16, 8> hb_cmap_cache_t;
typedef hb_cache_t<16, 24, 8> hb_advance_cache_t;
#endif /* HB_CACHE_HH */

View file

@ -99,7 +99,7 @@ _hb_options_init ()
}
/* This is idempotent and threadsafe. */
_hb_options.set_relaxed (u.i);
_hb_options = u.i;
}

View file

@ -495,6 +495,8 @@ hb_language_matches (hb_language_t language,
* @HB_SCRIPT_TOTO: `Toto`, Since: 3.0.0
* @HB_SCRIPT_VITHKUQI: `Vith`, Since: 3.0.0
* @HB_SCRIPT_MATH: `Zmth`, Since: 3.4.0
* @HB_SCRIPT_KAWI: `Kawi`, Since: 5.2.0
* @HB_SCRIPT_NAG_MUNDARI: `Nagm`, Since: 5.2.0
* @HB_SCRIPT_INVALID: No script set
*
* Data type for scripts. Each #hb_script_t's value is an #hb_tag_t corresponding
@ -716,6 +718,12 @@ typedef enum
*/
HB_SCRIPT_MATH = HB_TAG ('Z','m','t','h'),
/*
* Since 5.2.0
*/
HB_SCRIPT_KAWI = HB_TAG ('K','a','w','i'), /*15.0*/
HB_SCRIPT_NAG_MUNDARI = HB_TAG ('N','a','g','m'), /*15.0*/
/* No script set. */
HB_SCRIPT_INVALID = HB_TAG_NONE,

View file

@ -35,6 +35,9 @@
#include "config.h"
#endif
#ifndef HB_BORING_EXPANSION
#define HB_NO_BORING_EXPANSION
#endif
#ifdef HB_TINY
#define HB_LEAN

View file

@ -67,12 +67,12 @@ hb_options ()
#endif
/* Make a local copy, so we can access bitfield threadsafely. */
hb_options_union_t u;
u.i = _hb_options.get_relaxed ();
u.i = _hb_options;
if (unlikely (!u.i))
{
_hb_options_init ();
u.i = _hb_options.get_relaxed ();
u.i = _hb_options;
}
return u.opts;

View file

@ -132,7 +132,7 @@ hb_face_create_for_tables (hb_reference_table_func_t reference_table_func,
face->user_data = user_data;
face->destroy = destroy;
face->num_glyphs.set_relaxed (-1);
face->num_glyphs = -1;
face->data.init0 (face);
face->table.init0 (face);
@ -479,7 +479,7 @@ hb_face_set_upem (hb_face_t *face,
if (hb_object_is_immutable (face))
return;
face->upem.set_relaxed (upem);
face->upem = upem;
}
/**
@ -514,7 +514,7 @@ hb_face_set_glyph_count (hb_face_t *face,
if (hb_object_is_immutable (face))
return;
face->num_glyphs.set_relaxed (glyph_count);
face->num_glyphs = glyph_count;
}
/**

View file

@ -83,7 +83,7 @@ struct hb_face_t
unsigned int get_upem () const
{
unsigned int ret = upem.get_relaxed ();
unsigned int ret = upem;
if (unlikely (!ret))
{
return load_upem ();
@ -93,7 +93,7 @@ struct hb_face_t
unsigned int get_num_glyphs () const
{
unsigned int ret = num_glyphs.get_relaxed ();
unsigned int ret = num_glyphs;
if (unlikely (ret == UINT_MAX))
return load_num_glyphs ();
return ret;

View file

@ -80,16 +80,19 @@
*/
using hb_ft_advance_cache_t = hb_cache_t<16, 24, 8, false>;
struct hb_ft_font_t
{
int load_flags;
bool symbol; /* Whether selected cmap is symbol cmap. */
bool unref; /* Whether to destroy ft_face when done. */
bool transform; /* Whether to apply FT_Face's transform. */
mutable hb_mutex_t lock;
FT_Face ft_face;
mutable unsigned cached_serial;
mutable hb_advance_cache_t advance_cache;
mutable hb_ft_advance_cache_t advance_cache;
};
static hb_ft_font_t *
@ -136,6 +139,8 @@ _hb_ft_font_destroy (void *data)
/* hb_font changed, update FT_Face. */
static void _hb_ft_hb_font_changed (hb_font_t *font, FT_Face ft_face)
{
hb_ft_font_t *ft_font = (hb_ft_font_t *) font->user_data;
float x_mult = 1.f, y_mult = 1.f;
if (font->x_scale < 0) x_mult = -x_mult;
@ -173,6 +178,7 @@ static void _hb_ft_hb_font_changed (hb_font_t *font, FT_Face ft_face)
FT_Matrix matrix = { (int) roundf (x_mult * (1<<16)), 0,
0, (int) roundf (y_mult * (1<<16))};
FT_Set_Transform (ft_face, &matrix, nullptr);
ft_font->transform = true;
}
#if defined(HAVE_FT_GET_VAR_BLEND_COORDINATES) && !defined(HB_NO_VAR)
@ -428,13 +434,19 @@ hb_ft_get_glyph_h_advances (hb_font_t* font, void* font_data,
hb_lock_t lock (ft_font->lock);
FT_Face ft_face = ft_font->ft_face;
int load_flags = ft_font->load_flags;
float x_mult;
#ifdef HAVE_FT_GET_TRANSFORM
FT_Matrix matrix;
FT_Get_Transform (ft_face, &matrix, nullptr);
float mult = matrix.xx / 65536.f;
#else
float mult = font->x_scale < 0 ? -1 : +1;
if (ft_font->transform)
{
FT_Matrix matrix;
FT_Get_Transform (ft_face, &matrix, nullptr);
x_mult = sqrtf ((float)matrix.xx * matrix.xx + (float)matrix.xy * matrix.xy) / 65536.f;
}
else
#endif
{
x_mult = font->x_scale < 0 ? -1 : +1;
}
for (unsigned int i = 0; i < count; i++)
{
@ -450,7 +462,7 @@ hb_ft_get_glyph_h_advances (hb_font_t* font, void* font_data,
ft_font->advance_cache.set (glyph, v);
}
*first_advance = (int) (v * mult + (1<<9)) >> 10;
*first_advance = (int) (v * x_mult + (1<<9)) >> 10;
first_glyph = &StructAtOffsetUnaligned<hb_codepoint_t> (first_glyph, glyph_stride);
first_advance = &StructAtOffsetUnaligned<hb_position_t> (first_advance, advance_stride);
}
@ -466,13 +478,19 @@ hb_ft_get_glyph_v_advance (hb_font_t *font,
const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font_data;
hb_lock_t lock (ft_font->lock);
FT_Fixed v;
float y_mult;
#ifdef HAVE_FT_GET_TRANSFORM
FT_Matrix matrix;
FT_Get_Transform (ft_font->ft_face, &matrix, nullptr);
float y_mult = matrix.yy / 65536.f;
#else
float y_mult = font->y_scale < 0 ? -1 : +1;
if (ft_font->transform)
{
FT_Matrix matrix;
FT_Get_Transform (ft_font->ft_face, &matrix, nullptr);
y_mult = sqrtf ((float)matrix.yx * matrix.yx + (float)matrix.yy * matrix.yy) / 65536.f;
}
else
#endif
{
y_mult = font->y_scale < 0 ? -1 : +1;
}
if (unlikely (FT_Get_Advance (ft_font->ft_face, glyph, ft_font->load_flags | FT_LOAD_VERTICAL_LAYOUT, &v)))
return 0;
@ -498,15 +516,21 @@ hb_ft_get_glyph_v_origin (hb_font_t *font,
const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font_data;
hb_lock_t lock (ft_font->lock);
FT_Face ft_face = ft_font->ft_face;
float x_mult, y_mult;
#ifdef HAVE_FT_GET_TRANSFORM
FT_Matrix matrix;
FT_Get_Transform (ft_face, &matrix, nullptr);
float x_mult = matrix.xx / 65536.f;
float y_mult = matrix.yy / 65536.f;
#else
float x_mult = font->x_scale < 0 ? -1 : +1;
float y_mult = font->y_scale < 0 ? -1 : +1;
if (ft_font->transform)
{
FT_Matrix matrix;
FT_Get_Transform (ft_face, &matrix, nullptr);
x_mult = sqrtf ((float)matrix.xx * matrix.xx + (float)matrix.xy * matrix.xy) / 65536.f;
y_mult = sqrtf ((float)matrix.yx * matrix.yx + (float)matrix.yy * matrix.yy) / 65536.f;
}
else
#endif
{
x_mult = font->x_scale < 0 ? -1 : +1;
y_mult = font->y_scale < 0 ? -1 : +1;
}
if (unlikely (FT_Load_Glyph (ft_face, glyph, ft_font->load_flags)))
return false;
@ -553,15 +577,21 @@ hb_ft_get_glyph_extents (hb_font_t *font,
const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font_data;
hb_lock_t lock (ft_font->lock);
FT_Face ft_face = ft_font->ft_face;
float x_mult, y_mult;
#ifdef HAVE_FT_GET_TRANSFORM
FT_Matrix matrix;
FT_Get_Transform (ft_face, &matrix, nullptr);
float x_mult = matrix.xx / 65536.f;
float y_mult = matrix.yy / 65536.f;
#else
float x_mult = font->x_scale < 0 ? -1 : +1;
float y_mult = font->y_scale < 0 ? -1 : +1;
if (ft_font->transform)
{
FT_Matrix matrix;
FT_Get_Transform (ft_face, &matrix, nullptr);
x_mult = sqrtf ((float)matrix.xx * matrix.xx + (float)matrix.xy * matrix.xy) / 65536.f;
y_mult = sqrtf ((float)matrix.yx * matrix.yx + (float)matrix.yy * matrix.yy) / 65536.f;
}
else
#endif
{
x_mult = font->x_scale < 0 ? -1 : +1;
y_mult = font->y_scale < 0 ? -1 : +1;
}
if (unlikely (FT_Load_Glyph (ft_face, glyph, ft_font->load_flags)))
return false;
@ -663,13 +693,19 @@ hb_ft_get_font_h_extents (hb_font_t *font HB_UNUSED,
const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font_data;
hb_lock_t lock (ft_font->lock);
FT_Face ft_face = ft_font->ft_face;
float y_mult;
#ifdef HAVE_FT_GET_TRANSFORM
FT_Matrix matrix;
FT_Get_Transform (ft_face, &matrix, nullptr);
float y_mult = matrix.yy / 65536.f;
#else
float y_mult = font->y_scale < 0 ? -1 : +1;
if (ft_font->transform)
{
FT_Matrix matrix;
FT_Get_Transform (ft_face, &matrix, nullptr);
y_mult = sqrtf ((float)matrix.yx * matrix.yx + (float)matrix.yy * matrix.yy) / 65536.f;
}
else
#endif
{
y_mult = font->y_scale < 0 ? -1 : +1;
}
if (ft_face->units_per_EM != 0)
{
@ -1233,13 +1269,14 @@ hb_ft_font_set_funcs (hb_font_t *font)
if (FT_Select_Charmap (ft_face, FT_ENCODING_MS_SYMBOL))
FT_Select_Charmap (ft_face, FT_ENCODING_UNICODE);
_hb_ft_hb_font_changed (font, ft_face);
ft_face->generic.data = blob;
ft_face->generic.finalizer = (FT_Generic_Finalizer) _release_blob;
_hb_ft_font_set_funcs (font, ft_face, true);
hb_ft_font_set_load_flags (font, FT_LOAD_DEFAULT | FT_LOAD_NO_HINTING);
_hb_ft_hb_font_changed (font, ft_face);
}

View file

@ -176,12 +176,12 @@ struct hb_lazy_loader_t : hb_data_wrapper_t<Data, WheresData>
void init0 () {} /* Init, when memory is already set to 0. No-op for us. */
void init () { instance.set_relaxed (nullptr); }
void fini () { do_destroy (instance.get ()); init (); }
void fini () { do_destroy (instance.get_acquire ()); init (); }
void free_instance ()
{
retry:
Stored *p = instance.get ();
Stored *p = instance.get_acquire ();
if (unlikely (p && !cmpexch (p, nullptr)))
goto retry;
do_destroy (p);
@ -203,7 +203,7 @@ struct hb_lazy_loader_t : hb_data_wrapper_t<Data, WheresData>
Stored * get_stored () const
{
retry:
Stored *p = this->instance.get ();
Stored *p = this->instance.get_acquire ();
if (unlikely (!p))
{
if (unlikely (this->is_inert ()))

View file

@ -144,14 +144,14 @@ struct hb_reference_count_t
{
mutable hb_atomic_int_t ref_count;
void init (int v = 1) { ref_count.set_relaxed (v); }
int get_relaxed () const { return ref_count.get_relaxed (); }
void init (int v = 1) { ref_count = v; }
int get_relaxed () const { return ref_count; }
int inc () const { return ref_count.inc (); }
int dec () const { return ref_count.dec (); }
void fini () { ref_count.set_relaxed (-0x0000DEAD); }
void fini () { ref_count = -0x0000DEAD; }
bool is_inert () const { return !ref_count.get_relaxed (); }
bool is_valid () const { return ref_count.get_relaxed () > 0; }
bool is_inert () const { return !ref_count; }
bool is_valid () const { return ref_count > 0; }
};
@ -214,15 +214,15 @@ static inline void hb_object_trace (const Type *obj, const char *function)
obj ? obj->header.ref_count.get_relaxed () : 0);
}
template <typename Type>
static inline Type *hb_object_create ()
template <typename Type, typename ...Ts>
static inline Type *hb_object_create (Ts... ds)
{
Type *obj = (Type *) hb_calloc (1, sizeof (Type));
if (unlikely (!obj))
return obj;
new (obj) Type;
new (obj) Type (std::forward<Ts> (ds)...);
hb_object_init (obj);
hb_object_trace (obj, HB_FUNC);
@ -233,7 +233,7 @@ template <typename Type>
static inline void hb_object_init (Type *obj)
{
obj->header.ref_count.init ();
obj->header.writable.set_relaxed (true);
obj->header.writable = true;
obj->header.user_data.init ();
}
template <typename Type>
@ -244,12 +244,12 @@ static inline bool hb_object_is_valid (const Type *obj)
template <typename Type>
static inline bool hb_object_is_immutable (const Type *obj)
{
return !obj->header.writable.get_relaxed ();
return !obj->header.writable;
}
template <typename Type>
static inline void hb_object_make_immutable (const Type *obj)
{
obj->header.writable.set_relaxed (false);
obj->header.writable = false;
}
template <typename Type>
static inline Type *hb_object_reference (Type *obj)
@ -273,7 +273,8 @@ static inline bool hb_object_destroy (Type *obj)
hb_object_fini (obj);
obj->~Type ();
if (!std::is_trivially_destructible<Type>::value)
obj->~Type ();
return true;
}
@ -281,7 +282,7 @@ template <typename Type>
static inline void hb_object_fini (Type *obj)
{
obj->header.ref_count.fini (); /* Do this before user_data */
hb_user_data_array_t *user_data = obj->header.user_data.get ();
hb_user_data_array_t *user_data = obj->header.user_data.get_acquire ();
if (user_data)
{
user_data->fini ();
@ -301,7 +302,7 @@ static inline bool hb_object_set_user_data (Type *obj,
assert (hb_object_is_valid (obj));
retry:
hb_user_data_array_t *user_data = obj->header.user_data.get ();
hb_user_data_array_t *user_data = obj->header.user_data.get_acquire ();
if (unlikely (!user_data))
{
user_data = (hb_user_data_array_t *) hb_calloc (sizeof (hb_user_data_array_t), 1);
@ -326,7 +327,7 @@ static inline void *hb_object_get_user_data (Type *obj,
if (unlikely (!obj || obj->header.is_inert ()))
return nullptr;
assert (hb_object_is_valid (obj));
hb_user_data_array_t *user_data = obj->header.user_data.get ();
hb_user_data_array_t *user_data = obj->header.user_data.get_acquire ();
if (!user_data)
return nullptr;
return user_data->get (key);

View file

@ -1379,7 +1379,7 @@ struct cff1
if (unlikely (!len)) return false;
retry:
hb_sorted_vector_t<gname_t> *names = glyph_names.get ();
hb_sorted_vector_t<gname_t> *names = glyph_names.get_acquire ();
if (unlikely (!names))
{
names = (hb_sorted_vector_t<gname_t> *) hb_calloc (sizeof (hb_sorted_vector_t<gname_t>), 1);

View file

@ -59,13 +59,15 @@
* never need to call these functions directly.
**/
using hb_ot_font_advance_cache_t = hb_cache_t<24, 16, 8, true>;
struct hb_ot_font_t
{
const hb_ot_face_t *ot_face;
/* h_advance caching */
mutable hb_atomic_int_t cached_coords_serial;
mutable hb_atomic_ptr_t<hb_advance_cache_t> advance_cache;
mutable hb_atomic_ptr_t<hb_ot_font_advance_cache_t> advance_cache;
};
static hb_ot_font_t *
@ -161,14 +163,14 @@ hb_ot_get_glyph_h_advances (hb_font_t* font, void* font_data,
bool use_cache = false;
#endif
hb_advance_cache_t *cache = nullptr;
hb_ot_font_advance_cache_t *cache = nullptr;
if (use_cache)
{
retry:
cache = ot_font->advance_cache.get ();
cache = ot_font->advance_cache.get_acquire ();
if (unlikely (!cache))
{
cache = (hb_advance_cache_t *) hb_malloc (sizeof (hb_advance_cache_t));
cache = (hb_ot_font_advance_cache_t *) hb_malloc (sizeof (hb_ot_font_advance_cache_t));
if (unlikely (!cache))
{
use_cache = false;
@ -181,7 +183,7 @@ hb_ot_get_glyph_h_advances (hb_font_t* font, void* font_data,
hb_free (cache);
goto retry;
}
ot_font->cached_coords_serial.set (font->serial_coords);
ot_font->cached_coords_serial.set_release (font->serial_coords);
}
}
out:
@ -197,10 +199,10 @@ hb_ot_get_glyph_h_advances (hb_font_t* font, void* font_data,
}
else
{ /* Use cache. */
if (ot_font->cached_coords_serial.get () != (int) font->serial_coords)
if (ot_font->cached_coords_serial.get_acquire () != (int) font->serial_coords)
{
ot_font->advance_cache->init ();
ot_font->cached_coords_serial.set (font->serial_coords);
ot_font->cached_coords_serial.set_release (font->serial_coords);
}
for (unsigned int i = 0; i < count; i++)

View file

@ -73,6 +73,8 @@ struct hmtxvmtx
return_trace (true);
}
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>>* get_mtx_map (const hb_subset_plan_t *plan) const
{ return T::is_horizontal ? plan->hmtx_map : plan->vmtx_map; }
bool subset_update_header (hb_subset_plan_t *plan,
unsigned int num_hmetrics) const
@ -130,14 +132,15 @@ struct hmtxvmtx
accelerator_t _mtx (c->plan->source);
unsigned num_long_metrics;
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *mtx_map = get_mtx_map (c->plan);
{
/* Determine num_long_metrics to encode. */
auto& plan = c->plan;
num_long_metrics = plan->num_output_glyphs ();
hb_codepoint_t old_gid = 0;
unsigned int last_advance = plan->old_gid_for_new_gid (num_long_metrics - 1, &old_gid) ? _mtx.get_advance_without_var_unscaled (old_gid) : 0;
unsigned int last_advance = get_new_gid_advance_unscaled (plan, mtx_map, num_long_metrics - 1, _mtx);
while (num_long_metrics > 1 &&
last_advance == (plan->old_gid_for_new_gid (num_long_metrics - 2, &old_gid) ? _mtx.get_advance_without_var_unscaled (old_gid) : 0))
last_advance == get_new_gid_advance_unscaled (plan, mtx_map, num_long_metrics - 2, _mtx))
{
num_long_metrics--;
}
@ -145,14 +148,18 @@ struct hmtxvmtx
auto it =
+ hb_range (c->plan->num_output_glyphs ())
| hb_map ([c, &_mtx] (unsigned _)
| hb_map ([c, &_mtx, mtx_map] (unsigned _)
{
hb_codepoint_t old_gid;
if (!c->plan->old_gid_for_new_gid (_, &old_gid))
return hb_pair (0u, 0);
int lsb = 0;
(void) _mtx.get_leading_bearing_without_var_unscaled (old_gid, &lsb);
return hb_pair (_mtx.get_advance_without_var_unscaled (old_gid), +lsb);
if (!mtx_map->has (_))
{
hb_codepoint_t old_gid;
if (!c->plan->old_gid_for_new_gid (_, &old_gid))
return hb_pair (0u, 0);
int lsb = 0;
(void) _mtx.get_leading_bearing_without_var_unscaled (old_gid, &lsb);
return hb_pair (_mtx.get_advance_without_var_unscaled (old_gid), +lsb);
}
return mtx_map->get (_);
})
;
@ -330,6 +337,24 @@ struct hmtxvmtx
hb_blob_ptr_t<V> var_table;
};
/* get advance: when no variations, call get_advance_without_var_unscaled.
* when there're variations, get advance value from mtx_map in subset_plan*/
unsigned get_new_gid_advance_unscaled (const hb_subset_plan_t *plan,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *mtx_map,
unsigned new_gid,
const accelerator_t &_mtx) const
{
if (mtx_map->is_empty () ||
(new_gid == 0 && !mtx_map->has (new_gid)))
{
hb_codepoint_t old_gid = 0;
return plan->old_gid_for_new_gid (new_gid, &old_gid) ?
_mtx.get_advance_without_var_unscaled (old_gid) : 0;
}
else
{ return mtx_map->get (new_gid).first; }
}
protected:
UnsizedArrayOf<LongMetric>
longMetricZ; /* Paired advance width and leading

View file

@ -84,10 +84,10 @@ using OT::Layout::MediumTypes;
namespace OT {
template<typename Iterator>
static inline void ClassDef_serialize (hb_serialize_context_t *c,
static inline bool ClassDef_serialize (hb_serialize_context_t *c,
Iterator it);
static void ClassDef_remap_and_serialize (
static bool ClassDef_remap_and_serialize (
hb_serialize_context_t *c,
const hb_set_t &klasses,
bool use_class_zero,
@ -186,6 +186,7 @@ struct hb_subset_layout_context_t :
unsigned lookup_index_count;
};
struct VariationStore;
struct hb_collect_variation_indices_context_t :
hb_dispatch_context_t<hb_collect_variation_indices_context_t>
{
@ -194,15 +195,27 @@ struct hb_collect_variation_indices_context_t :
static return_t default_return_value () { return hb_empty_t (); }
hb_set_t *layout_variation_indices;
hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map;
hb_font_t *font;
const VariationStore *var_store;
const hb_set_t *glyph_set;
const hb_map_t *gpos_lookups;
float *store_cache;
hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_,
hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map_,
hb_font_t *font_,
const VariationStore *var_store_,
const hb_set_t *glyph_set_,
const hb_map_t *gpos_lookups_) :
const hb_map_t *gpos_lookups_,
float *store_cache_) :
layout_variation_indices (layout_variation_indices_),
varidx_delta_map (varidx_delta_map_),
font (font_),
var_store (var_store_),
glyph_set (glyph_set_),
gpos_lookups (gpos_lookups_) {}
gpos_lookups (gpos_lookups_),
store_cache (store_cache_) {}
};
template<typename OutputArray>
@ -1380,17 +1393,14 @@ struct LookupOffsetList : List16OfOffsetTo<TLookup, OffsetType>
*/
static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
static bool ClassDef_remap_and_serialize (hb_serialize_context_t *c,
const hb_set_t &klasses,
bool use_class_zero,
hb_sorted_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> &glyph_and_klass, /* IN/OUT */
hb_map_t *klass_map /*IN/OUT*/)
{
if (!klass_map)
{
ClassDef_serialize (c, glyph_and_klass.iter ());
return;
}
return ClassDef_serialize (c, glyph_and_klass.iter ());
/* any glyph not assigned a class value falls into Class zero (0),
* if any glyph assigned to class 0, remapping must start with 0->0*/
@ -1413,7 +1423,7 @@ static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
}
c->propagate_error (glyph_and_klass, klasses);
ClassDef_serialize (c, glyph_and_klass.iter ());
return ClassDef_serialize (c, glyph_and_klass.iter ());
}
/*
@ -1495,11 +1505,12 @@ struct ClassDefFormat1_3
? hb_len (hb_iter (glyph_map.keys()) | hb_filter (glyph_filter))
: glyph_map.get_population ();
use_class_zero = use_class_zero && glyph_count <= glyph_and_klass.length;
ClassDef_remap_and_serialize (c->serializer,
orig_klasses,
use_class_zero,
glyph_and_klass,
klass_map);
if (!ClassDef_remap_and_serialize (c->serializer,
orig_klasses,
use_class_zero,
glyph_and_klass,
klass_map))
return_trace (false);
return_trace (keep_empty_table || (bool) glyph_and_klass);
}
@ -1736,11 +1747,12 @@ struct ClassDefFormat2_4
? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
: glyph_map.get_population ();
use_class_zero = use_class_zero && glyph_count <= glyph_and_klass.length;
ClassDef_remap_and_serialize (c->serializer,
orig_klasses,
use_class_zero,
glyph_and_klass,
klass_map);
if (!ClassDef_remap_and_serialize (c->serializer,
orig_klasses,
use_class_zero,
glyph_and_klass,
klass_map))
return_trace (false);
return_trace (keep_empty_table || (bool) glyph_and_klass);
}
@ -2124,9 +2136,9 @@ struct ClassDef
};
template<typename Iterator>
static inline void ClassDef_serialize (hb_serialize_context_t *c,
static inline bool ClassDef_serialize (hb_serialize_context_t *c,
Iterator it)
{ c->start_embed<ClassDef> ()->serialize (c, it); }
{ return (c->start_embed<ClassDef> ()->serialize (c, it)); }
/*
@ -2280,16 +2292,16 @@ struct VarData
unsigned int count = regionIndices.len;
bool is_long = longWords ();
unsigned word_count = wordCount ();
unsigned int scount = is_long ? count - word_count : word_count;
unsigned int scount = is_long ? count : word_count;
unsigned int lcount = is_long ? word_count : 0;
const HBUINT8 *bytes = get_delta_bytes ();
const HBUINT8 *row = bytes + inner * (scount + count);
const HBUINT8 *row = bytes + inner * get_row_size ();
float delta = 0.;
unsigned int i = 0;
const HBINT16 *lcursor = reinterpret_cast<const HBINT16 *> (row);
const HBINT32 *lcursor = reinterpret_cast<const HBINT32 *> (row);
for (; i < lcount; i++)
{
float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache);
@ -2563,7 +2575,7 @@ struct VariationStore
bool serialize (hb_serialize_context_t *c,
const VariationStore *src,
const hb_array_t <hb_inc_bimap_t> &inner_maps)
const hb_array_t <const hb_inc_bimap_t> &inner_maps)
{
TRACE_SERIALIZE (this);
#ifdef HB_NO_VAR
@ -2618,7 +2630,7 @@ struct VariationStore
return_trace (true);
}
bool subset (hb_subset_context_t *c) const
bool subset (hb_subset_context_t *c, const hb_array_t<const hb_inc_bimap_t> &inner_maps) const
{
TRACE_SUBSET (this);
#ifdef HB_NO_VAR
@ -2628,22 +2640,7 @@ struct VariationStore
VariationStore *varstore_prime = c->serializer->start_embed<VariationStore> ();
if (unlikely (!varstore_prime)) return_trace (false);
const hb_set_t *variation_indices = c->plan->layout_variation_indices;
if (variation_indices->is_empty ()) return_trace (false);
hb_vector_t<hb_inc_bimap_t> inner_maps;
inner_maps.resize ((unsigned) dataSets.len);
for (unsigned idx : c->plan->layout_variation_indices->iter ())
{
uint16_t major = idx >> 16;
uint16_t minor = idx & 0xFFFF;
if (major >= inner_maps.length)
return_trace (false);
inner_maps[major].add (minor);
}
varstore_prime->serialize (c->serializer, this, inner_maps.as_array ());
varstore_prime->serialize (c->serializer, this, inner_maps);
return_trace (
!c->serializer->in_error()
@ -3169,28 +3166,36 @@ struct VariationDevice
VariationStore::cache_t *store_cache = nullptr) const
{ return font->em_scalef_y (get_delta (font, store, store_cache)); }
VariationDevice* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map) const
VariationDevice* copy (hb_serialize_context_t *c,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
{
TRACE_SERIALIZE (this);
if (!layout_variation_idx_delta_map) return_trace (nullptr);
auto snap = c->snapshot ();
auto *out = c->embed (this);
if (unlikely (!out)) return_trace (nullptr);
if (!layout_variation_idx_map || layout_variation_idx_map->is_empty ()) return_trace (out);
/* TODO Just get() and bail if NO_VARIATION. Needs to setup the map to return that. */
if (!layout_variation_idx_map->has (varIdx))
if (!layout_variation_idx_delta_map->has (varIdx))
{
c->revert (snap);
return_trace (nullptr);
}
unsigned new_idx = layout_variation_idx_map->get (varIdx);
unsigned new_idx = hb_first (layout_variation_idx_delta_map->get (varIdx));
out->varIdx = new_idx;
return_trace (out);
}
void record_variation_index (hb_set_t *layout_variation_indices) const
void collect_variation_index (hb_collect_variation_indices_context_t *c) const
{
layout_variation_indices->add (varIdx);
c->layout_variation_indices->add (varIdx);
int delta = 0;
if (c->font && c->var_store)
delta = roundf (get_delta (c->font, *c->var_store, c->store_cache));
/* set new varidx to HB_OT_LAYOUT_NO_VARIATIONS_INDEX here, will remap
* varidx later*/
c->varidx_delta_map->set (varIdx, hb_pair_t<unsigned, int> (HB_OT_LAYOUT_NO_VARIATIONS_INDEX, delta));
}
bool sanitize (hb_sanitize_context_t *c) const
@ -3283,7 +3288,8 @@ struct Device
}
}
Device* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map=nullptr) const
Device* copy (hb_serialize_context_t *c,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map=nullptr) const
{
TRACE_SERIALIZE (this);
switch (u.b.format) {
@ -3295,14 +3301,14 @@ struct Device
#endif
#ifndef HB_NO_VAR
case 0x8000:
return_trace (reinterpret_cast<Device *> (u.variation.copy (c, layout_variation_idx_map)));
return_trace (reinterpret_cast<Device *> (u.variation.copy (c, layout_variation_idx_delta_map)));
#endif
default:
return_trace (nullptr);
}
}
void collect_variation_indices (hb_set_t *layout_variation_indices) const
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
switch (u.b.format) {
#ifndef HB_NO_HINTING
@ -3313,7 +3319,7 @@ struct Device
#endif
#ifndef HB_NO_VAR
case 0x8000:
u.variation.record_variation_index (layout_variation_indices);
u.variation.collect_variation_index (c);
return;
#endif
default:
@ -3321,6 +3327,18 @@ struct Device
}
}
unsigned get_variation_index () const
{
switch (u.b.format) {
#ifndef HB_NO_VAR
case 0x8000:
return u.variation.varIdx;
#endif
default:
return HB_OT_LAYOUT_NO_VARIATIONS_INDEX;
}
}
protected:
union {
DeviceHeader b;

View file

@ -200,15 +200,34 @@ struct CaretValueFormat3
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
auto *out = c->serializer->embed (this);
auto *out = c->serializer->start_embed (*this);
if (unlikely (!out)) return_trace (false);
if (!c->serializer->embed (caretValueFormat)) return_trace (false);
if (!c->serializer->embed (coordinate)) return_trace (false);
unsigned varidx = (this+deviceTable).get_variation_index ();
if (c->plan->layout_variation_idx_delta_map->has (varidx))
{
int delta = hb_second (c->plan->layout_variation_idx_delta_map->get (varidx));
if (delta != 0)
{
if (!c->serializer->check_assign (out->coordinate, coordinate + delta, HB_SERIALIZE_ERROR_INT_OVERFLOW))
return_trace (false);
}
}
if (c->plan->all_axes_pinned)
return_trace (c->serializer->check_assign (out->caretValueFormat, 1, HB_SERIALIZE_ERROR_INT_OVERFLOW));
if (!c->serializer->embed (deviceTable))
return_trace (false);
return_trace (out->deviceTable.serialize_copy (c->serializer, deviceTable, this, c->serializer->to_bias (out),
hb_serialize_context_t::Head, c->plan->layout_variation_idx_map));
hb_serialize_context_t::Head, c->plan->layout_variation_idx_delta_map));
}
void collect_variation_indices (hb_set_t *layout_variation_indices) const
{ (this+deviceTable).collect_variation_indices (layout_variation_indices); }
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{ (this+deviceTable).collect_variation_indices (c); }
bool sanitize (hb_sanitize_context_t *c) const
{
@ -255,14 +274,14 @@ struct CaretValue
}
}
void collect_variation_indices (hb_set_t *layout_variation_indices) const
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
switch (u.format) {
case 1:
case 2:
return;
case 3:
u.format3.collect_variation_indices (layout_variation_indices);
u.format3.collect_variation_indices (c);
return;
default: return;
}
@ -329,7 +348,7 @@ struct LigGlyph
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
for (const Offset16To<CaretValue>& offset : carets.iter ())
(this+offset).collect_variation_indices (c->layout_variation_indices);
(this+offset).collect_variation_indices (c);
}
bool sanitize (hb_sanitize_context_t *c) const
@ -586,7 +605,10 @@ struct GDEFVersion1_2
bool subset_varstore = false;
if (version.to_int () >= 0x00010003u)
{
subset_varstore = out->varStore.serialize_subset (c, varStore, this);
if (c->plan->all_axes_pinned)
out->varStore = 0;
else
subset_varstore = out->varStore.serialize_subset (c, varStore, this, c->plan->gdef_varstore_inner_maps.as_array ());
}
if (subset_varstore)
@ -846,7 +868,7 @@ struct GDEF
{ get_lig_caret_list ().collect_variation_indices (c); }
void remap_layout_variation_indices (const hb_set_t *layout_variation_indices,
hb_map_t *layout_variation_idx_map /* OUT */) const
hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map /* OUT */) const
{
if (!has_var_store ()) return;
if (layout_variation_indices->is_empty ()) return;
@ -864,7 +886,11 @@ struct GDEF
}
unsigned new_idx = (new_major << 16) + new_minor;
layout_variation_idx_map->set (idx, new_idx);
if (!layout_variation_idx_delta_map->has (idx))
continue;
int delta = hb_second (layout_variation_idx_delta_map->get (idx));
layout_variation_idx_delta_map->set (idx, hb_pair_t<unsigned, int> (new_idx, delta));
++new_minor;
last_major = major;
}

View file

@ -4261,6 +4261,7 @@ struct GSUBGPOS
}
void prune_langsys (const hb_map_t *duplicate_feature_map,
const hb_set_t *layout_scripts,
hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map,
hb_set_t *new_feature_indexes /* OUT */) const
{
@ -4269,6 +4270,8 @@ struct GSUBGPOS
unsigned count = get_script_count ();
for (unsigned script_index = 0; script_index < count; script_index++)
{
const Tag& tag = get_script_tag (script_index);
if (!layout_scripts->has (tag)) continue;
const Script& s = get_script (script_index);
s.prune_langsys (&c, script_index);
}

View file

@ -601,9 +601,13 @@ hb_ot_layout_table_select_script (hb_face_t *face,
* @feature_tags: (out) (array length=feature_count): Array of feature tags found in the table
*
* Fetches a list of all feature tags in the given face's GSUB or GPOS table.
* Note that there might be duplicate feature tags, belonging to different
* script/language-system pairs of the table.
*
* Return value: Total number of feature tags.
*
* Since: 0.6.0
*
**/
unsigned int
hb_ot_layout_table_get_feature_tags (hb_face_t *face,
@ -629,6 +633,9 @@ hb_ot_layout_table_get_feature_tags (hb_face_t *face,
* or GPOS table.
*
* Return value: `true` if the feature is found, `false` otherwise
*
* Since: 0.6.0
*
**/
bool
hb_ot_layout_table_find_feature (hb_face_t *face,
@ -668,6 +675,8 @@ hb_ot_layout_table_find_feature (hb_face_t *face,
*
* Return value: Total number of language tags.
*
* Since: 0.6.0
*
**/
unsigned int
hb_ot_layout_script_get_language_tags (hb_face_t *face,
@ -778,6 +787,8 @@ hb_ot_layout_script_select_language (hb_face_t *face,
*
* Return value: `true` if the feature is found, `false` otherwise
*
* Since: 0.6.0
*
**/
hb_bool_t
hb_ot_layout_language_get_required_feature_index (hb_face_t *face,
@ -846,6 +857,9 @@ hb_ot_layout_language_get_required_feature (hb_face_t *face,
* returned will begin at the offset provided.
*
* Return value: Total number of features.
*
* Since: 0.6.0
*
**/
unsigned int
hb_ot_layout_language_get_feature_indexes (hb_face_t *face,
@ -879,6 +893,9 @@ hb_ot_layout_language_get_feature_indexes (hb_face_t *face,
* returned will begin at the offset provided.
*
* Return value: Total number of feature tags.
*
* Since: 0.6.0
*
**/
unsigned int
hb_ot_layout_language_get_feature_tags (hb_face_t *face,
@ -919,6 +936,8 @@ hb_ot_layout_language_get_feature_tags (hb_face_t *face,
*
* Return value: `true` if the feature is found, `false` otherwise
*
* Since: 0.6.0
*
**/
hb_bool_t
hb_ot_layout_language_find_feature (hb_face_t *face,
@ -1167,9 +1186,12 @@ script_collect_features (hb_collect_features_context_t *c,
* hb_ot_layout_collect_features:
* @face: #hb_face_t to work upon
* @table_tag: #HB_OT_TAG_GSUB or #HB_OT_TAG_GPOS
* @scripts: The array of scripts to collect features for
* @languages: The array of languages to collect features for
* @features: The array of features to collect
* @scripts: (nullable) (array zero-terminated=1): The array of scripts to collect features for,
* terminated by %HB_TAG_NONE
* @languages: (nullable) (array zero-terminated=1): The array of languages to collect features for,
* terminated by %HB_TAG_NONE
* @features: (nullable) (array zero-terminated=1): The array of features to collect,
* terminated by %HB_TAG_NONE
* @feature_indexes: (out): The array of feature indexes found for the query
*
* Fetches a list of all feature indexes in the specified face's GSUB table
@ -1216,9 +1238,12 @@ hb_ot_layout_collect_features (hb_face_t *face,
* hb_ot_layout_collect_lookups:
* @face: #hb_face_t to work upon
* @table_tag: #HB_OT_TAG_GSUB or #HB_OT_TAG_GPOS
* @scripts: The array of scripts to collect lookups for
* @languages: The array of languages to collect lookups for
* @features: The array of features to collect lookups for
* @scripts: (nullable) (array zero-terminated=1): The array of scripts to collect lookups for,
* terminated by %HB_TAG_NONE
* @languages: (nullable) (array zero-terminated=1): The array of languages to collect lookups for,
* terminated by %HB_TAG_NONE
* @features: (nullable) (array zero-terminated=1): The array of features to collect lookups for,
* terminated by %HB_TAG_NONE
* @lookup_indexes: (out): The array of lookup indexes found for the query
*
* Fetches a list of all feature-lookup indexes in the specified face's GSUB
@ -1316,6 +1341,8 @@ hb_ot_layout_lookup_collect_glyphs (hb_face_t *face,
*
* Return value: `true` if feature variations were found, `false` otherwise.
*
* Since: 1.4.0
*
**/
hb_bool_t
hb_ot_layout_table_find_feature_variations (hb_face_t *face,
@ -1347,6 +1374,8 @@ hb_ot_layout_table_find_feature_variations (hb_face_t *face,
*
* Return value: Total number of lookups.
*
* Since: 1.4.0
*
**/
unsigned int
hb_ot_layout_feature_with_variations_get_lookups (hb_face_t *face,
@ -1379,6 +1408,8 @@ hb_ot_layout_feature_with_variations_get_lookups (hb_face_t *face,
*
* Return value: `true` if data found, `false` otherwise
*
* Since: 0.6.0
*
**/
hb_bool_t
hb_ot_layout_has_substitution (hb_face_t *face)
@ -2297,11 +2328,6 @@ struct hb_get_glyph_alternates_dispatch_t :
static return_t default_return_value () { return 0; }
bool stop_sublookup_iteration (return_t r) const { return r; }
hb_face_t *face;
hb_get_glyph_alternates_dispatch_t (hb_face_t *face) :
face (face) {}
private:
template <typename T, typename ...Ts> auto
_dispatch (const T &obj, hb_priority<1>, Ts&&... ds) HB_AUTO_RETURN
@ -2340,7 +2366,7 @@ hb_ot_layout_lookup_get_glyph_alternates (hb_face_t *face,
unsigned *alternate_count /* IN/OUT. May be NULL. */,
hb_codepoint_t *alternate_glyphs /* OUT. May be NULL. */)
{
hb_get_glyph_alternates_dispatch_t c (face);
hb_get_glyph_alternates_dispatch_t c;
const OT::SubstLookup &lookup = face->table.GSUB->table->get_lookup (lookup_index);
auto ret = lookup.dispatch (&c, glyph, start_offset, alternate_count, alternate_glyphs);
if (!ret && alternate_count) *alternate_count = 0;

View file

@ -166,6 +166,47 @@ struct OS2
}
}
float map_wdth_to_widthclass(float width) const
{
if (width < 50) return 1.0f;
if (width > 200) return 9.0f;
float ratio = (width - 50) / 12.5f;
int a = (int) floorf (ratio);
int b = (int) ceilf (ratio);
/* follow this maping:
* https://docs.microsoft.com/en-us/typography/opentype/spec/os2#uswidthclass
*/
if (b <= 6) // 50-125
{
if (a == b) return a + 1.0f;
}
else if (b == 7) // no mapping for 137.5
{
a = 6;
b = 8;
}
else if (b == 8)
{
if (a == b) return 8.0f; // 150
a = 6;
}
else
{
if (a == b && a == 12) return 9.0f; //200
b = 12;
a = 8;
}
float va = 50 + a * 12.5f;
float vb = 50 + b * 12.5f;
float ret = a + (width - va) / (vb - va);
if (a <= 6) ret += 1.0f;
return ret;
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
@ -183,6 +224,26 @@ struct OS2
_update_unicode_ranges (c->plan->unicodes, os2_prime->ulUnicodeRange);
if (c->plan->user_axes_location->has (HB_TAG ('w','g','h','t')) &&
!c->plan->pinned_at_default)
{
float weight_class = c->plan->user_axes_location->get (HB_TAG ('w','g','h','t'));
if (!c->serializer->check_assign (os2_prime->usWeightClass,
roundf (hb_clamp (weight_class, 1.0f, 1000.0f)),
HB_SERIALIZE_ERROR_INT_OVERFLOW))
return_trace (false);
}
if (c->plan->user_axes_location->has (HB_TAG ('w','d','t','h')) &&
!c->plan->pinned_at_default)
{
float width = c->plan->user_axes_location->get (HB_TAG ('w','d','t','h'));
if (!c->serializer->check_assign (os2_prime->usWidthClass,
roundf (map_wdth_to_widthclass (width)),
HB_SERIALIZE_ERROR_INT_OVERFLOW))
return_trace (false);
}
return_trace (true);
}

View file

@ -102,6 +102,14 @@ struct post
if (!serialize (c->serializer, glyph_names))
return_trace (false);
if (c->plan->user_axes_location->has (HB_TAG ('s','l','n','t')) &&
!c->plan->pinned_at_default)
{
float italic_angle = c->plan->user_axes_location->get (HB_TAG ('s','l','n','t'));
italic_angle = hb_max (-90.f, hb_min (italic_angle, 90.f));
post_prime->italicAngle.set_float (italic_angle);
}
if (glyph_names && version.major == 2)
return_trace (v2X.subset (c));
@ -133,7 +141,7 @@ struct post
}
~accelerator_t ()
{
hb_free (gids_sorted_by_name.get ());
hb_free (gids_sorted_by_name.get_acquire ());
table.destroy ();
}
@ -160,7 +168,7 @@ struct post
if (unlikely (!len)) return false;
retry:
uint16_t *gids = gids_sorted_by_name.get ();
uint16_t *gids = gids_sorted_by_name.get_acquire ();
if (unlikely (!gids))
{

View file

@ -6,10 +6,10 @@
*
* on files with these headers:
*
* # ArabicShaping-14.0.0.txt
* # Date: 2021-05-21, 01:54:00 GMT [KW, RP]
* # Scripts-14.0.0.txt
* # Date: 2021-07-10, 00:35:31 GMT
* # ArabicShaping-15.0.0.txt
* # Date: 2022-02-14, 18:50:00 GMT [KW, RP]
* # Scripts-15.0.0.txt
* # Date: 2022-04-26, 23:15:02 GMT
*/
#ifndef HB_OT_SHAPER_ARABIC_JOINING_LIST_HH

View file

@ -6,10 +6,10 @@
*
* on files with these headers:
*
* # ArabicShaping-14.0.0.txt
* # Date: 2021-05-21, 01:54:00 GMT [KW, RP]
* # Blocks-14.0.0.txt
* # Date: 2021-01-22, 23:29:00 GMT [KW]
* # ArabicShaping-15.0.0.txt
* # Date: 2022-02-14, 18:50:00 GMT [KW, RP]
* # Blocks-15.0.0.txt
* # Date: 2022-01-28, 20:58:00 GMT [KW]
* UnicodeData.txt does not have a header.
*/

View file

@ -53,7 +53,7 @@ enum indic_syllable_type_t {
};
#line 54 "hb-ot-shaper-indic-machine.hh"
#line 57 "hb-ot-shaper-indic-machine.hh"
#define indic_syllable_machine_ex_A 9u
#define indic_syllable_machine_ex_C 1u
#define indic_syllable_machine_ex_CM 16u
@ -75,7 +75,7 @@ enum indic_syllable_type_t {
#define indic_syllable_machine_ex_ZWNJ 5u
#line 74 "hb-ot-shaper-indic-machine.hh"
#line 79 "hb-ot-shaper-indic-machine.hh"
static const unsigned char _indic_syllable_machine_trans_keys[] = {
8u, 8u, 4u, 8u, 5u, 7u, 5u, 8u, 4u, 8u, 4u, 12u, 4u, 8u, 8u, 8u,
5u, 7u, 5u, 8u, 4u, 8u, 4u, 12u, 4u, 12u, 4u, 12u, 8u, 8u, 5u, 7u,
@ -422,7 +422,7 @@ find_syllables_indic (hb_buffer_t *buffer)
int cs;
hb_glyph_info_t *info = buffer->info;
#line 415 "hb-ot-shaper-indic-machine.hh"
#line 426 "hb-ot-shaper-indic-machine.hh"
{
cs = indic_syllable_machine_start;
ts = 0;
@ -438,7 +438,7 @@ find_syllables_indic (hb_buffer_t *buffer)
unsigned int syllable_serial = 1;
#line 427 "hb-ot-shaper-indic-machine.hh"
#line 442 "hb-ot-shaper-indic-machine.hh"
{
int _slen;
int _trans;
@ -452,7 +452,7 @@ _resume:
#line 1 "NONE"
{ts = p;}
break;
#line 439 "hb-ot-shaper-indic-machine.hh"
#line 456 "hb-ot-shaper-indic-machine.hh"
}
_keys = _indic_syllable_machine_trans_keys + (cs<<1);
@ -555,7 +555,7 @@ _eof_trans:
#line 113 "hb-ot-shaper-indic-machine.rl"
{act = 6;}
break;
#line 521 "hb-ot-shaper-indic-machine.hh"
#line 559 "hb-ot-shaper-indic-machine.hh"
}
_again:
@ -564,7 +564,7 @@ _again:
#line 1 "NONE"
{ts = 0;}
break;
#line 528 "hb-ot-shaper-indic-machine.hh"
#line 568 "hb-ot-shaper-indic-machine.hh"
}
if ( ++p != pe )

View file

@ -6,12 +6,12 @@
*
* on files with these headers:
*
* # IndicSyllabicCategory-14.0.0.txt
* # Date: 2021-05-22, 01:01:00 GMT [KW, RP]
* # IndicPositionalCategory-14.0.0.txt
* # Date: 2021-05-22, 01:01:00 GMT [KW, RP]
* # Blocks-14.0.0.txt
* # Date: 2021-01-22, 23:29:00 GMT [KW]
* # IndicSyllabicCategory-15.0.0.txt
* # Date: 2022-05-26, 02:18:00 GMT [KW, RP]
* # IndicPositionalCategory-15.0.0.txt
* # Date: 2022-05-26, 02:18:00 GMT [KW, RP]
* # Blocks-15.0.0.txt
* # Date: 2022-01-28, 20:58:00 GMT [KW]
*/
#include "hb.hh"
@ -92,7 +92,7 @@ static_assert (OT_VPst == M_Cat(VPst), "");
#define _OT_R OT_Ra /* 14 chars; Ra */
#define _OT_Rf OT_Repha /* 1 chars; Repha */
#define _OT_Rt OT_Robatic /* 3 chars; Robatic */
#define _OT_SM OT_SM /* 55 chars; SM */
#define _OT_SM OT_SM /* 56 chars; SM */
#define _OT_S OT_Symbol /* 22 chars; Symbol */
#define _OT_V OT_V /* 172 chars; V */
#define _OT_VA OT_VAbv /* 18 chars; VAbv */
@ -117,7 +117,7 @@ static_assert (OT_VPst == M_Cat(VPst), "");
#define _POS_R POS_POST_C /* 13 chars; POST_C */
#define _POS_L POS_PRE_C /* 5 chars; PRE_C */
#define _POS_LM POS_PRE_M /* 14 chars; PRE_M */
#define _POS_SM POS_SMVD /* 129 chars; SMVD */
#define _POS_SM POS_SMVD /* 130 chars; SMVD */
#pragma GCC diagnostic pop
@ -301,7 +301,7 @@ static const uint16_t indic_table[] = {
/* 0CD8 */ _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(C,C), _(C,C), _(X,X),
/* 0CE0 */ _(V,C), _(V,C), _(M,BS), _(M,BS), _(X,X), _(X,X), _(GB,C), _(GB,C),
/* 0CE8 */ _(GB,C), _(GB,C), _(GB,C), _(GB,C), _(GB,C), _(GB,C), _(GB,C), _(GB,C),
/* 0CF0 */ _(X,X), _(CS,C), _(CS,C), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X),
/* 0CF0 */ _(X,X), _(CS,C), _(CS,C),_(SM,SM), _(X,X), _(X,X), _(X,X), _(X,X),
/* 0CF8 */ _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X),
/* Malayalam */

View file

@ -276,7 +276,7 @@ struct indic_shape_plan_t
{
bool load_virama_glyph (hb_font_t *font, hb_codepoint_t *pglyph) const
{
hb_codepoint_t glyph = virama_glyph.get_relaxed ();
hb_codepoint_t glyph = virama_glyph;
if (unlikely (glyph == (hb_codepoint_t) -1))
{
if (!config->virama || !font->get_nominal_glyph (config->virama, &glyph))
@ -286,7 +286,7 @@ struct indic_shape_plan_t
/* Our get_nominal_glyph() function needs a font, so we can't get the virama glyph
* during shape planning... Instead, overwrite it here. */
virama_glyph.set_relaxed ((int) glyph);
virama_glyph = (int) glyph;
}
*pglyph = glyph;
@ -330,7 +330,7 @@ data_create_indic (const hb_ot_shape_plan_t *plan)
#ifndef HB_NO_UNISCRIBE_BUG_COMPATIBLE
indic_plan->uniscribe_bug_compatible = hb_options ().uniscribe_bug_compatible;
#endif
indic_plan->virama_glyph.set_relaxed (-1);
indic_plan->virama_glyph = -1;
/* Use zero-context would_substitute() matching for new-spec of the main
* Indic scripts, and scripts with one spec only, but not for old-specs.
@ -992,7 +992,7 @@ final_reordering_syllable_indic (const hb_ot_shape_plan_t *plan,
* class of I_Cat(H) is desired but has been lost. */
/* We don't call load_virama_glyph(), since we know it's already
* loaded. */
hb_codepoint_t virama_glyph = indic_plan->virama_glyph.get_relaxed ();
hb_codepoint_t virama_glyph = indic_plan->virama_glyph;
if (virama_glyph)
{
for (unsigned int i = start; i < end; i++)

View file

@ -48,7 +48,7 @@ enum khmer_syllable_type_t {
};
#line 49 "hb-ot-shaper-khmer-machine.hh"
#line 52 "hb-ot-shaper-khmer-machine.hh"
#define khmer_syllable_machine_ex_C 1u
#define khmer_syllable_machine_ex_DOTTEDCIRCLE 11u
#define khmer_syllable_machine_ex_H 4u
@ -66,7 +66,7 @@ enum khmer_syllable_type_t {
#define khmer_syllable_machine_ex_ZWNJ 5u
#line 65 "hb-ot-shaper-khmer-machine.hh"
#line 70 "hb-ot-shaper-khmer-machine.hh"
static const unsigned char _khmer_syllable_machine_trans_keys[] = {
5u, 26u, 5u, 26u, 1u, 15u, 5u, 26u, 5u, 26u, 5u, 26u, 5u, 26u, 5u, 26u,
5u, 26u, 5u, 26u, 5u, 26u, 5u, 26u, 5u, 26u, 1u, 15u, 5u, 26u, 5u, 26u,
@ -294,7 +294,7 @@ find_syllables_khmer (hb_buffer_t *buffer)
int cs;
hb_glyph_info_t *info = buffer->info;
#line 287 "hb-ot-shaper-khmer-machine.hh"
#line 298 "hb-ot-shaper-khmer-machine.hh"
{
cs = khmer_syllable_machine_start;
ts = 0;
@ -310,7 +310,7 @@ find_syllables_khmer (hb_buffer_t *buffer)
unsigned int syllable_serial = 1;
#line 299 "hb-ot-shaper-khmer-machine.hh"
#line 314 "hb-ot-shaper-khmer-machine.hh"
{
int _slen;
int _trans;
@ -324,7 +324,7 @@ _resume:
#line 1 "NONE"
{ts = p;}
break;
#line 311 "hb-ot-shaper-khmer-machine.hh"
#line 328 "hb-ot-shaper-khmer-machine.hh"
}
_keys = _khmer_syllable_machine_trans_keys + (cs<<1);
@ -394,7 +394,7 @@ _eof_trans:
#line 98 "hb-ot-shaper-khmer-machine.rl"
{act = 3;}
break;
#line 368 "hb-ot-shaper-khmer-machine.hh"
#line 398 "hb-ot-shaper-khmer-machine.hh"
}
_again:
@ -403,7 +403,7 @@ _again:
#line 1 "NONE"
{ts = 0;}
break;
#line 375 "hb-ot-shaper-khmer-machine.hh"
#line 407 "hb-ot-shaper-khmer-machine.hh"
}
if ( ++p != pe )

View file

@ -50,7 +50,7 @@ enum myanmar_syllable_type_t {
};
#line 51 "hb-ot-shaper-myanmar-machine.hh"
#line 54 "hb-ot-shaper-myanmar-machine.hh"
#define myanmar_syllable_machine_ex_A 9u
#define myanmar_syllable_machine_ex_As 32u
#define myanmar_syllable_machine_ex_C 1u
@ -77,7 +77,7 @@ enum myanmar_syllable_type_t {
#define myanmar_syllable_machine_ex_ZWNJ 5u
#line 76 "hb-ot-shaper-myanmar-machine.hh"
#line 81 "hb-ot-shaper-myanmar-machine.hh"
static const unsigned char _myanmar_syllable_machine_trans_keys[] = {
1u, 41u, 3u, 41u, 5u, 39u, 5u, 8u, 3u, 41u, 3u, 39u, 3u, 39u, 5u, 39u,
5u, 39u, 3u, 39u, 3u, 39u, 3u, 41u, 5u, 39u, 1u, 15u, 3u, 39u, 3u, 39u,
@ -443,7 +443,7 @@ find_syllables_myanmar (hb_buffer_t *buffer)
int cs;
hb_glyph_info_t *info = buffer->info;
#line 436 "hb-ot-shaper-myanmar-machine.hh"
#line 447 "hb-ot-shaper-myanmar-machine.hh"
{
cs = myanmar_syllable_machine_start;
ts = 0;
@ -459,7 +459,7 @@ find_syllables_myanmar (hb_buffer_t *buffer)
unsigned int syllable_serial = 1;
#line 448 "hb-ot-shaper-myanmar-machine.hh"
#line 463 "hb-ot-shaper-myanmar-machine.hh"
{
int _slen;
int _trans;
@ -473,7 +473,7 @@ _resume:
#line 1 "NONE"
{ts = p;}
break;
#line 460 "hb-ot-shaper-myanmar-machine.hh"
#line 477 "hb-ot-shaper-myanmar-machine.hh"
}
_keys = _myanmar_syllable_machine_trans_keys + (cs<<1);
@ -519,7 +519,7 @@ _eof_trans:
#line 113 "hb-ot-shaper-myanmar-machine.rl"
{te = p;p--;{ found_syllable (myanmar_non_myanmar_cluster); }}
break;
#line 498 "hb-ot-shaper-myanmar-machine.hh"
#line 523 "hb-ot-shaper-myanmar-machine.hh"
}
_again:
@ -528,7 +528,7 @@ _again:
#line 1 "NONE"
{ts = 0;}
break;
#line 505 "hb-ot-shaper-myanmar-machine.hh"
#line 532 "hb-ot-shaper-myanmar-machine.hh"
}
if ( ++p != pe )

File diff suppressed because it is too large Load diff

View file

@ -6,18 +6,18 @@
*
* on files with these headers:
*
* # IndicSyllabicCategory-14.0.0.txt
* # Date: 2021-05-22, 01:01:00 GMT [KW, RP]
* # IndicPositionalCategory-14.0.0.txt
* # Date: 2021-05-22, 01:01:00 GMT [KW, RP]
* # ArabicShaping-14.0.0.txt
* # Date: 2021-05-21, 01:54:00 GMT [KW, RP]
* # DerivedCoreProperties-14.0.0.txt
* # Date: 2021-08-12, 23:12:53 GMT
* # Blocks-14.0.0.txt
* # Date: 2021-01-22, 23:29:00 GMT [KW]
* # Scripts-14.0.0.txt
* # Date: 2021-07-10, 00:35:31 GMT
* # IndicSyllabicCategory-15.0.0.txt
* # Date: 2022-05-26, 02:18:00 GMT [KW, RP]
* # IndicPositionalCategory-15.0.0.txt
* # Date: 2022-05-26, 02:18:00 GMT [KW, RP]
* # ArabicShaping-15.0.0.txt
* # Date: 2022-02-14, 18:50:00 GMT [KW, RP]
* # DerivedCoreProperties-15.0.0.txt
* # Date: 2022-08-05, 22:17:05 GMT
* # Blocks-15.0.0.txt
* # Date: 2022-01-28, 20:58:00 GMT [KW]
* # Scripts-15.0.0.txt
* # Date: 2022-04-26, 23:15:02 GMT
* # Override values For Indic_Syllabic_Category
* # Not derivable
* # Initial version based on Unicode 7.0 by Andrew Glass 2014-03-17
@ -90,7 +90,7 @@
#pragma GCC diagnostic pop
static const uint8_t
hb_use_u8[3083] =
hb_use_u8[3115] =
{
16, 50, 51, 51, 51, 52, 51, 83, 118, 131, 51, 57, 58, 179, 195, 61,
51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
@ -114,22 +114,22 @@ hb_use_u8[3083] =
47, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 48, 49, 2, 2, 2,
2, 2, 2, 2, 2, 50, 51, 2, 52, 2, 2, 53, 2, 2, 54, 55,
56, 57, 58, 59, 60, 61, 62, 63, 2, 64, 65, 2, 66, 67, 68, 69,
2, 70, 2, 71, 72, 73, 74, 2, 2, 75, 76, 77, 78, 2, 79, 2,
2, 80, 80, 80, 80, 80, 80, 80, 80, 81, 2, 2, 2, 2, 2, 2,
2, 70, 2, 71, 72, 73, 74, 2, 2, 75, 76, 77, 78, 2, 79, 80,
2, 81, 81, 81, 81, 81, 81, 81, 81, 82, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 82, 83, 2, 2, 2, 2, 2, 2, 2, 84,
85, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 80, 80, 80, 86, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 83, 84, 2, 2, 2, 2, 2, 2, 2, 85,
86, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 81, 81, 81, 87, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 87, 88, 2, 2, 2, 2, 2,
2, 2, 2, 89, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 88, 89, 2, 2, 2, 2, 2,
2, 2, 2, 90, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 90, 2, 2, 91, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 92, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 93, 93, 94, 95, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93,
93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93,
93, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
2, 2, 2, 91, 2, 2, 92, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 93, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 94, 94, 95, 96, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94,
94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94,
94, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
0, 2, 2, 2, 2, 2, 0, 0, 0, 3, 0, 0, 0, 0, 0, 4,
0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 6, 7, 0, 0, 0, 0, 0, 0,
@ -147,7 +147,7 @@ hb_use_u8[3083] =
7, 0, 0, 0, 2, 2, 2, 2, 2, 39, 40, 41, 0, 0, 0, 0,
0, 10, 13, 28, 2, 2, 2, 2, 28, 2, 28, 2, 2, 2, 2, 2,
2, 7, 2, 28, 2, 2, 0, 15, 16, 17, 18, 19, 25, 20, 33, 22,
0, 0, 0, 0, 0, 28, 9, 39, 42, 10, 27, 28, 2, 2, 2, 7,
0, 0, 0, 0, 0, 28, 39, 39, 42, 10, 27, 28, 2, 2, 2, 7,
28, 7, 2, 28, 2, 2, 0, 15, 43, 0, 0, 25, 20, 0, 0, 2,
28, 28, 0, 0, 0, 0, 0, 0, 0, 0, 44, 28, 2, 2, 7, 0,
2, 7, 2, 2, 0, 28, 7, 7, 2, 0, 28, 7, 0, 2, 7, 0,
@ -191,75 +191,77 @@ hb_use_u8[3083] =
43, 100, 12, 0, 0, 0, 0, 0, 0, 2, 2, 59, 16, 46, 21, 111,
100, 100, 100, 112, 113, 0, 0, 0, 0, 2, 2, 2, 2, 2, 0, 28,
2, 9, 44, 114, 114, 114, 9, 114, 114, 13, 114, 114, 114, 24, 0, 38,
0, 0, 0, 115, 116, 9, 3, 0, 0, 0, 0, 0, 0, 0, 117, 0,
0, 0, 0, 0, 0, 0, 4, 118, 119, 40, 40, 3, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 119, 119, 120, 119, 119, 119, 119, 119, 119, 119,
119, 0, 0, 121, 0, 0, 0, 0, 0, 0, 5, 121, 0, 0, 0, 0,
0, 0, 0, 115, 49, 9, 3, 0, 0, 0, 0, 0, 0, 0, 116, 0,
0, 0, 0, 0, 0, 0, 4, 117, 118, 40, 40, 3, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 118, 118, 119, 118, 118, 118, 118, 118, 118, 118,
118, 0, 0, 120, 0, 0, 0, 0, 0, 0, 5, 120, 0, 0, 0, 0,
0, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7,
0, 2, 2, 2, 2, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0,
122, 2, 51, 2, 106, 2, 8, 2, 2, 2, 63, 17, 14, 0, 0, 29,
121, 2, 51, 2, 106, 2, 8, 2, 2, 2, 63, 17, 14, 0, 0, 29,
0, 2, 2, 0, 0, 0, 0, 0, 0, 27, 2, 2, 2, 2, 2, 2,
2, 2, 2, 123, 21, 21, 21, 21, 21, 21, 21, 124, 0, 0, 0, 0,
2, 2, 2, 122, 21, 21, 21, 21, 21, 21, 21, 123, 0, 0, 0, 0,
0, 9, 9, 9, 9, 9, 9, 9, 9, 9, 2, 0, 0, 0, 0, 0,
50, 2, 2, 2, 20, 20, 125, 114, 0, 2, 2, 2, 126, 18, 57, 18,
111, 100, 127, 0, 0, 0, 0, 0, 0, 9, 128, 2, 2, 2, 2, 2,
2, 2, 129, 21, 20, 18, 46, 130, 131, 132, 0, 0, 0, 0, 0, 0,
50, 2, 2, 2, 20, 20, 124, 114, 0, 2, 2, 2, 125, 18, 57, 18,
111, 100, 126, 0, 0, 0, 0, 0, 0, 9, 127, 2, 2, 2, 2, 2,
2, 2, 128, 21, 20, 18, 46, 129, 130, 131, 0, 0, 0, 0, 0, 0,
0, 2, 2, 50, 28, 2, 2, 2, 2, 2, 2, 2, 2, 8, 20, 57,
97, 74, 133, 134, 135, 0, 0, 0, 0, 2, 136, 2, 2, 2, 2, 137,
0, 28, 2, 40, 3, 0, 77, 13, 2, 51, 20, 138, 50, 51, 2, 2,
103, 8, 7, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 139, 19,
23, 0, 0, 140, 141, 0, 0, 0, 0, 2, 63, 43, 21, 78, 45, 142,
97, 74, 132, 133, 134, 0, 0, 0, 0, 2, 135, 2, 2, 2, 2, 136,
0, 28, 2, 40, 3, 0, 77, 13, 2, 51, 20, 137, 50, 51, 2, 2,
103, 8, 7, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 138, 19,
23, 0, 0, 139, 140, 0, 0, 0, 0, 2, 63, 43, 21, 78, 45, 141,
0, 79, 79, 79, 79, 79, 79, 79, 79, 0, 0, 0, 0, 0, 0, 0,
4, 119, 119, 119, 119, 120, 0, 0, 0, 2, 2, 2, 2, 2, 7, 2,
4, 118, 118, 118, 118, 119, 0, 0, 0, 2, 2, 2, 2, 2, 7, 2,
2, 2, 7, 2, 28, 2, 2, 2, 2, 2, 28, 2, 2, 2, 28, 7,
0, 126, 18, 25, 29, 0, 0, 143, 144, 2, 2, 28, 2, 28, 2, 2,
2, 2, 2, 2, 0, 12, 35, 0, 145, 2, 2, 11, 35, 0, 28, 2,
0, 125, 18, 25, 29, 0, 0, 142, 143, 2, 2, 28, 2, 28, 2, 2,
2, 2, 2, 2, 0, 12, 35, 0, 144, 2, 2, 11, 35, 0, 28, 2,
2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 2, 2,
7, 2, 2, 9, 39, 0, 0, 0, 0, 2, 2, 2, 2, 2, 25, 36,
0, 2, 2, 2, 114, 114, 114, 114, 114, 146, 2, 7, 0, 0, 0, 0,
0, 2, 2, 2, 114, 114, 114, 114, 114, 145, 2, 7, 0, 0, 0, 0,
0, 2, 12, 12, 0, 0, 0, 0, 0, 7, 2, 2, 7, 2, 2, 2,
2, 28, 2, 7, 0, 28, 2, 0, 0, 147, 148, 149, 2, 2, 2, 2,
2, 2, 2, 2, 2, 20, 20, 18, 18, 18, 20, 20, 132, 0, 0, 0,
0, 0, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 2, 2, 2, 2,
2, 51, 50, 51, 0, 0, 0, 0, 151, 9, 72, 2, 2, 2, 2, 2,
2, 28, 2, 7, 0, 28, 2, 0, 0, 146, 147, 148, 2, 2, 2, 2,
2, 2, 2, 2, 2, 20, 20, 18, 18, 18, 20, 20, 131, 0, 0, 0,
0, 0, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 2, 2, 2, 2,
2, 51, 50, 51, 0, 0, 0, 0, 150, 9, 72, 2, 2, 2, 2, 2,
2, 16, 17, 19, 14, 22, 35, 0, 0, 0, 29, 0, 0, 0, 0, 0,
0, 9, 47, 2, 2, 2, 2, 2, 2, 2, 2, 2, 126, 18, 20, 152,
20, 19, 153, 154, 2, 2, 2, 2, 2, 0, 0, 63, 155, 0, 0, 0,
0, 9, 47, 2, 2, 2, 2, 2, 2, 2, 2, 2, 125, 18, 20, 151,
20, 19, 152, 153, 2, 2, 2, 2, 2, 0, 0, 63, 154, 0, 0, 0,
0, 2, 11, 0, 0, 0, 0, 0, 0, 2, 63, 23, 18, 18, 18, 20,
20, 106, 156, 0, 0, 157, 158, 29, 159, 28, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 21, 17, 20, 20, 160, 42, 0, 0, 0,
44, 2, 2, 2, 7, 7, 2, 2, 28, 2, 2, 2, 2, 2, 2, 2,
28, 2, 2, 2, 2, 2, 2, 2, 8, 16, 17, 19, 20, 161, 29, 0,
0, 9, 9, 28, 2, 2, 2, 7, 28, 7, 2, 28, 2, 2, 56, 15,
21, 14, 21, 45, 30, 31, 30, 32, 0, 0, 0, 0, 33, 0, 0, 0,
2, 2, 21, 0, 9, 9, 9, 44, 0, 9, 9, 44, 0, 0, 0, 0,
0, 2, 2, 63, 23, 18, 18, 18, 20, 21, 124, 13, 15, 0, 0, 0,
0, 2, 2, 2, 2, 2, 0, 0, 162, 163, 0, 0, 0, 0, 0, 0,
0, 16, 17, 18, 18, 64, 97, 23, 159, 9, 164, 7, 0, 0, 0, 0,
0, 2, 2, 2, 2, 2, 2, 2, 63, 23, 18, 18, 0, 46, 46, 9,
165, 35, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 18,
0, 21, 17, 18, 18, 19, 14, 80, 165, 36, 0, 0, 0, 0, 0, 0,
0, 2, 2, 2, 2, 2, 8, 166, 23, 18, 20, 20, 164, 7, 0, 0,
0, 2, 2, 2, 2, 2, 7, 41, 134, 21, 20, 18, 74, 19, 20, 0,
0, 2, 2, 2, 7, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 16,
17, 18, 19, 20, 103, 165, 35, 0, 0, 2, 2, 2, 7, 28, 0, 2,
2, 2, 2, 28, 7, 2, 2, 2, 2, 21, 21, 16, 30, 31, 10, 167,
168, 169, 170, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 0, 2, 2,
2, 63, 23, 18, 18, 0, 20, 21, 27, 106, 0, 31, 0, 0, 0, 0,
0, 50, 18, 20, 20, 20, 138, 2, 2, 2, 171, 172, 9, 13, 173, 70,
174, 0, 0, 1, 145, 0, 0, 0, 0, 50, 18, 20, 14, 17, 18, 2,
2, 2, 2, 156, 156, 156, 175, 175, 175, 175, 175, 175, 13, 176, 0, 28,
0, 20, 18, 18, 29, 20, 20, 9, 165, 0, 59, 59, 59, 59, 59, 59,
59, 64, 19, 80, 44, 0, 0, 0, 0, 2, 2, 2, 7, 2, 28, 2,
2, 50, 20, 20, 29, 0, 36, 20, 25, 9, 158, 177, 173, 0, 0, 0,
0, 2, 2, 2, 28, 7, 2, 2, 2, 2, 2, 2, 2, 2, 21, 21,
45, 20, 33, 80, 66, 0, 0, 0, 0, 2, 178, 64, 45, 0, 0, 0,
20, 106, 155, 0, 0, 156, 157, 29, 158, 28, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 21, 17, 20, 20, 159, 42, 0, 0, 0,
47, 125, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 7, 7, 2, 2,
28, 2, 2, 2, 2, 2, 2, 2, 28, 2, 2, 2, 2, 2, 2, 2,
8, 16, 17, 19, 20, 160, 29, 0, 0, 9, 9, 28, 2, 2, 2, 7,
28, 7, 2, 28, 2, 2, 56, 15, 21, 14, 21, 45, 30, 31, 30, 32,
0, 0, 0, 0, 33, 0, 0, 0, 2, 2, 21, 0, 9, 9, 9, 44,
0, 9, 9, 44, 0, 0, 0, 0, 0, 2, 2, 63, 23, 18, 18, 18,
20, 21, 123, 13, 15, 0, 0, 0, 0, 2, 2, 2, 2, 2, 0, 0,
161, 162, 0, 0, 0, 0, 0, 0, 0, 16, 17, 18, 18, 64, 97, 23,
158, 9, 163, 7, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2,
63, 23, 18, 18, 0, 46, 46, 9, 164, 35, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 2, 2, 18, 0, 21, 17, 18, 18, 19, 14, 80,
164, 36, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 8, 165,
23, 18, 20, 20, 163, 7, 0, 0, 0, 2, 2, 2, 2, 2, 7, 41,
133, 21, 20, 18, 74, 19, 20, 0, 0, 2, 2, 2, 7, 0, 0, 0,
0, 2, 2, 2, 2, 2, 2, 16, 17, 18, 19, 20, 103, 164, 35, 0,
0, 2, 2, 2, 7, 28, 0, 2, 2, 2, 2, 28, 7, 2, 2, 2,
2, 21, 21, 16, 30, 31, 10, 166, 167, 168, 169, 0, 0, 0, 0, 0,
0, 2, 2, 2, 2, 0, 2, 2, 2, 63, 23, 18, 18, 0, 20, 21,
27, 106, 0, 31, 0, 0, 0, 0, 0, 50, 18, 20, 20, 20, 137, 2,
2, 2, 170, 171, 9, 13, 172, 70, 173, 0, 0, 1, 144, 0, 0, 0,
0, 50, 18, 20, 14, 17, 18, 2, 2, 2, 2, 155, 155, 155, 174, 174,
174, 174, 174, 174, 13, 175, 0, 28, 0, 20, 18, 18, 29, 20, 20, 9,
164, 0, 59, 59, 59, 59, 59, 59, 59, 64, 19, 80, 44, 0, 0, 0,
0, 2, 2, 2, 7, 2, 28, 2, 2, 50, 20, 20, 29, 0, 36, 20,
25, 9, 157, 176, 172, 0, 0, 0, 0, 2, 2, 2, 28, 7, 2, 2,
2, 2, 2, 2, 2, 2, 21, 21, 45, 20, 33, 80, 66, 0, 0, 0,
0, 2, 177, 64, 45, 0, 0, 0, 0, 9, 178, 2, 2, 2, 2, 2,
2, 2, 2, 21, 20, 18, 29, 0, 46, 14, 140, 0, 0, 0, 0, 0,
0, 179, 179, 179, 106, 7, 0, 0, 0, 9, 9, 9, 44, 0, 0, 0,
0, 2, 2, 2, 2, 2, 7, 0, 56, 180, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 0, 0, 0,
38, 114, 24, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0,
0, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 0, 56,
35, 0, 4, 119, 119, 119, 120, 0, 0, 9, 9, 9, 47, 2, 2, 2,
35, 0, 4, 118, 118, 118, 119, 0, 0, 9, 9, 9, 47, 2, 2, 2,
0, 2, 2, 2, 2, 2, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2,
44, 2, 2, 2, 2, 2, 2, 9, 9, 2, 2, 42, 42, 42, 90, 0,
0, O, O, O, GB, B, B, GB, O, O, WJ,FMPst,FMPst, O, CGJ, B,
@ -268,7 +270,7 @@ hb_use_u8[3083] =
VPst,VMBlw, O, O, VAbv, GB,VMAbv,VMPst,VMPst, O, B, VBlw, O, O, VPre, VPre,
O, VPre, H, O, VPst,FMAbv, O,CMBlw, O, VAbv, O, VAbv, H, O,VMBlw,VMAbv,
CMAbv, GB, GB, O, MBlw,CMAbv,CMAbv, VPst, VAbv,VMAbv, O, VPst, O, VPre, VPre,VMAbv,
B, O, CS, CS, O, B, VAbv, VAbv, B, R, O, HVM, O, O, FBlw, O,
B, O, CS, CS,VMPst, B, VAbv, VAbv, B, R, O, HVM, O, O, FBlw, O,
CMAbv, O,CMBlw, VAbv, VBlw, B, SUB, SUB, SUB, O, SUB, SUB, O, FBlw, O, B,
VPst, VBlw, VPre,VMAbv,VMBlw,VMPst, IS, VAbv, MPst, MPre, MBlw, MBlw, B, MBlw, MBlw, VPst,
VMPst,VMPst, B, MBlw, VPst, VPre, VAbv, VAbv,VMPst,VMPst,VMBlw, B,VMPst, VBlw, VPst, CGJ,
@ -276,18 +278,18 @@ hb_use_u8[3083] =
CGJ, WJ, CGJ, GB,CMAbv,CMAbv, B, GB, B, VAbv, SUB, FPst, FPst,VMBlw, FPst, FPst,
FBlw,VMAbv,FMBlw, VAbv, VPre, B, MPre, MBlw, SUB, FAbv, FAbv, MAbv, SUB, Sk, VPst, VAbv,
VMAbv,VMAbv, FAbv,CMAbv, VPst, H, B, O,SMAbv,SMBlw,SMAbv,SMAbv,SMAbv, VPst, IS, VBlw,
FAbv,VMPre,VMPre,FMAbv,CMBlw,VMBlw,VMBlw,VMAbv, CS, CS,VMPst, O,FMAbv, ZWNJ, CGJ, WJ,
WJ, WJ, O,FMPst, O, O, H, MPst, VPst, H,VMAbv, VAbv,VMBlw, B, VBlw, FPst,
VPst, FAbv,VMPst, B,CMAbv, VAbv, MBlw, MPst, MBlw, H, O, VBlw, MPst, MPre, MAbv, MBlw,
O, B, FAbv, FAbv, FPst, VBlw, B, B, VPre, O,VMPst, IS, O,VMPst, VBlw, VPst,
VMBlw,VMBlw,VMAbv, O, IS,VMBlw, B,VMPst,VMAbv,VMPst, CS, CS, B, N, N, O,
HN, VPre, VBlw, VAbv, IS,CMAbv, O, VPst, B, R, R, O,FMBlw,CMBlw, VAbv, VPre,
VMAbv,VMAbv, H, VAbv,CMBlw,FMAbv, B, CS, CS, H,CMBlw,VMPst, H,VMPst, VAbv,VMAbv,
VPst, IS, R, MPst, R, MPst,CMBlw, B,FMBlw, VBlw,VMAbv, R, MBlw, MBlw, GB, FBlw,
FBlw,CMAbv, IS, VBlw, IS, GB, VAbv, H, H, O, VBlw,
FAbv,VMPre,VMPre,FMAbv,CMBlw,VMBlw,VMBlw,VMAbv, CS, O,FMAbv, ZWNJ, CGJ, WJ, WJ, WJ,
O,FMPst, O, O, H, MPst, VPst, H,VMAbv, VAbv,VMBlw, B, VBlw, FPst, VPst, FAbv,
VMPst, B,CMAbv, VAbv, MBlw, MPst, MBlw, H, O, VBlw, MPst, MPre, MAbv, MBlw, O, B,
FAbv, FAbv, FPst, VBlw, B, B, VPre, O,VMPst, IS, O,VMPst, VBlw, VPst,VMBlw,VMBlw,
VMAbv, O, IS,VMBlw, B,VMPst,VMAbv,VMPst, CS, CS, B, N, N, O, HN, VPre,
VBlw, VAbv, IS,CMAbv, O, VPst, B, R, R, O,FMBlw,CMBlw, VAbv, VPre,VMAbv,VMAbv,
H, VAbv,CMBlw,FMAbv, B, CS, CS, H,CMBlw,VMPst, H,VMPst, VAbv,VMAbv, VPst, IS,
R, MPst, R, MPst,CMBlw, B,FMBlw, VBlw,VMAbv, R, MBlw, MBlw, GB, FBlw, FBlw,CMAbv,
IS, VBlw, IS, GB, VAbv, R,VMPst, H, H, O, VBlw,
};
static const uint16_t
hb_use_u16[768] =
hb_use_u16[776] =
{
0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 3, 4, 0, 5, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0,
@ -319,24 +321,25 @@ hb_use_u16[768] =
9, 9,173,170, 0, 0, 0, 0, 0, 0, 0, 9,174,175, 0, 9,
176, 0, 0,177,178, 0, 0, 0,179, 9, 9,180,181,182,183,184,
185, 9, 9,186,187, 0, 0, 0,188, 9,189,190,191, 9, 9,192,
185, 9, 9,193,194,105,195,102, 9, 33,196,197, 0, 0, 0, 0,
198,199, 94, 9, 9,200,201, 2,202, 20, 21,203,204,205,206,207,
9, 9, 9,208,209,210,211, 0,195, 9, 9,212,213, 2, 0, 0,
9, 9,214,215,216,217, 0, 0, 9, 9, 9,218,219, 2, 0, 0,
9, 9,220,221, 2, 0, 0, 0, 9,222,223,103,224, 0, 0, 0,
9, 9,225,226, 0, 0, 0, 0,227,228, 9,229,230, 2, 0, 0,
0, 0,231, 9, 9,232,233, 0,234, 9, 9,235,236,237, 9, 9,
238,239, 0, 0, 0, 0, 0, 0, 21, 9,214,240, 7, 9, 70, 18,
9,241, 73,242, 0, 0, 0, 0,243, 9, 9,244,245, 2,246, 9,
247,248, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9,249,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 98,250, 0, 0, 0, 0,
0, 0, 0, 0, 2, 0, 0, 0, 9, 9, 9,251, 0, 0, 0, 0,
9, 9, 9, 9,252,253,254,254,255,256, 0, 0, 0, 0,257, 0,
9, 9, 9, 9, 9,258, 0, 0, 9, 9, 9, 9, 9, 9,105, 70,
94,259, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,260,
9, 9, 70,261,262, 0, 0, 0, 0, 9,263, 0, 9, 9,264, 2,
9, 9, 9, 9,265, 2, 0, 0,129,129,129,129,129,129,129,129,
160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,129,
185, 9, 9,193,194,105,195,102, 9, 33,196,197,198, 0, 0, 0,
199,200, 94, 9, 9,201,202, 2,203, 20, 21,204,205,206,207,208,
9, 9, 9,209,210,211,212, 0,195, 9, 9,213,214, 2, 0, 0,
9, 9,215,216,217,218, 0, 0, 9, 9, 9,219,220, 2, 0, 0,
9, 9,221,222, 2, 0, 0, 0, 9,223,224,103,225, 0, 0, 0,
9, 9,226,227, 0, 0, 0, 0,228,229, 9,230,231, 2, 0, 0,
0, 0,232, 9, 9,233,234, 0,235, 9, 9,236,237,238, 9, 9,
239,240, 0, 0, 0, 0, 0, 0, 21, 9,215,241, 7, 9, 70, 18,
9,242, 73,243, 0, 0, 0, 0,244, 9, 9,245,246, 2,247, 9,
248,249, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9,250,
251, 48, 9,252,253, 2, 0, 0, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 98,254, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0,
9, 9, 9,255, 0, 0, 0, 0, 9, 9, 9, 9,256,257,258,258,
259,260, 0, 0, 0, 0,261, 0, 9, 9, 9, 9, 9,262, 0, 0,
9, 9, 9, 9, 9, 9,105, 70, 94,263, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0,264, 9, 9, 70,265,266, 0, 0, 0,
0, 9,267, 0, 9, 9,268, 2, 9, 9, 9, 9,269, 2, 0, 0,
129,129,129,129,129,129,129,129,160,160,160,160,160,160,160,160,
160,160,160,160,160,160,160,129,
};
static inline unsigned
@ -347,7 +350,7 @@ hb_use_b4 (const uint8_t* a, unsigned i)
static inline uint_fast8_t
hb_use_get_category (unsigned u)
{
return u<921600u?hb_use_u8[2721+(((hb_use_u8[593+(((hb_use_u16[((hb_use_u8[113+(((hb_use_b4(hb_use_u8,u>>1>>3>>3>>5))<<5)+((u>>1>>3>>3)&31u))])<<3)+((u>>1>>3)&7u)])<<3)+((u>>1)&7u))])<<1)+((u)&1u))]:O;
return u<921600u?hb_use_u8[2753+(((hb_use_u8[593+(((hb_use_u16[((hb_use_u8[113+(((hb_use_b4(hb_use_u8,u>>1>>3>>3>>5))<<5)+((u>>1>>3>>3)&31u))])<<3)+((u>>1>>3)&7u)])<<3)+((u>>1)&7u))])<<1)+((u)&1u))]:O;
}
#undef B
@ -393,26 +396,5 @@ hb_use_get_category (unsigned u)
#undef VMPre
#ifdef HB_USE_TABLE_MAIN
int main (int argc, char **argv)
{
if (argc != 2)
{
for (unsigned u = 0; u < 0x10FFFFu; u++)
printf ("U+%04X %d\n", u, hb_use_get_category (u));
return 0;
}
hb_codepoint_t u;
sscanf (argv[1], "%x", &u);
printf ("%d\n", hb_use_get_category (u));
return 0;
}
#endif
#endif /* HB_OT_SHAPER_USE_TABLE_HH */
/* == End of generated table == */

View file

@ -10,8 +10,8 @@
* # Date: 2015-03-12, 21:17:00 GMT [AG]
* # Date: 2019-11-08, 23:22:00 GMT [AG]
*
* # Scripts-14.0.0.txt
* # Date: 2021-07-10, 00:35:31 GMT
* # Scripts-15.0.0.txt
* # Date: 2022-04-26, 23:15:02 GMT
*/
#include "hb.hh"

View file

@ -380,6 +380,10 @@ hb_ot_shaper_categorize (const hb_ot_shape_planner_t *planner)
case HB_SCRIPT_TOTO:
case HB_SCRIPT_VITHKUQI:
/* Unicode-15.0 additions */
case HB_SCRIPT_KAWI:
case HB_SCRIPT_NAG_MUNDARI:
/* If the designer designed the font for the 'DFLT' script,
* (or we ended up arbitrarily pick 'latn'), use the default shaper.
* Otherwise, use the specific shaper.

View file

@ -307,12 +307,12 @@ hb_ot_tags_from_language (const char *lang_str,
hb_tag_t lang_tag = hb_tag_from_string (lang_str, first_len);
static hb_atomic_int_t last_tag_idx; /* Poor man's cache. */
unsigned tag_idx = last_tag_idx.get_relaxed ();
unsigned tag_idx = last_tag_idx;
if (likely (tag_idx < ot_languages_len && ot_languages[tag_idx].language == lang_tag) ||
hb_sorted_array (ot_languages, ot_languages_len).bfind (lang_tag, &tag_idx))
{
last_tag_idx.set_relaxed (tag_idx);
last_tag_idx = tag_idx;
unsigned int i;
while (tag_idx != 0 &&
ot_languages[tag_idx].language == ot_languages[tag_idx - 1].language)

View file

@ -206,11 +206,10 @@ struct avar
v = hb_clamp (v, -(1<<14), +(1<<14));
out.push (v);
}
OT::VariationStore::destroy_cache (var_store_cache);
for (unsigned i = 0; i < coords_length; i++)
coords[i] = out[i];
OT::VariationStore::destroy_cache (var_store_cache);
#endif
}

View file

@ -43,7 +43,7 @@ struct VariationValueRecord
public:
Tag valueTag; /* Four-byte tag identifying a font-wide measure. */
HBUINT32 varIdx; /* Outer/inner index into VariationStore item. */
VarIdx varIdx; /* Outer/inner index into VariationStore item. */
public:
DEFINE_SIZE_STATIC (8);

View file

@ -276,33 +276,17 @@ bool _process_overflows (const hb_vector_t<graph::overflow_record_t>& overflows,
return resolution_attempted;
}
/*
* Attempts to modify the topological sorting of the provided object graph to
* eliminate offset overflows in the links between objects of the graph. If a
* non-overflowing ordering is found the updated graph is serialized it into the
* provided serialization context.
*
* If necessary the structure of the graph may be modified in ways that do not
* affect the functionality of the graph. For example shared objects may be
* duplicated.
*
* For a detailed writeup describing how the algorithm operates see:
* docs/repacker.md
*/
template<typename T>
inline hb_blob_t*
hb_resolve_overflows (const T& packed,
hb_tag_t table_tag,
unsigned max_rounds = 20,
bool recalculate_extensions = false) {
graph_t sorted_graph (packed);
inline bool
hb_resolve_graph_overflows (hb_tag_t table_tag,
unsigned max_rounds ,
bool recalculate_extensions,
graph_t& sorted_graph /* IN/OUT */)
{
sorted_graph.sort_shortest_distance ();
bool will_overflow = graph::will_overflow (sorted_graph);
if (!will_overflow)
{
return graph::serialize (sorted_graph);
}
return true;
graph::gsubgpos_graph_context_t ext_context (table_tag, sorted_graph);
if ((table_tag == HB_OT_TAG_GPOS
@ -314,13 +298,13 @@ hb_resolve_overflows (const T& packed,
DEBUG_MSG (SUBSET_REPACK, nullptr, "Splitting subtables if needed.");
if (!_presplit_subtables_if_needed (ext_context)) {
DEBUG_MSG (SUBSET_REPACK, nullptr, "Subtable splitting failed.");
return nullptr;
return false;
}
DEBUG_MSG (SUBSET_REPACK, nullptr, "Promoting lookups to extensions if needed.");
if (!_promote_extensions_if_needed (ext_context)) {
DEBUG_MSG (SUBSET_REPACK, nullptr, "Extensions promotion failed.");
return nullptr;
return false;
}
}
@ -360,15 +344,41 @@ hb_resolve_overflows (const T& packed,
if (sorted_graph.in_error ())
{
DEBUG_MSG (SUBSET_REPACK, nullptr, "Sorted graph in error state.");
return nullptr;
return false;
}
if (graph::will_overflow (sorted_graph))
{
DEBUG_MSG (SUBSET_REPACK, nullptr, "Offset overflow resolution failed.");
return nullptr;
return false;
}
return true;
}
/*
* Attempts to modify the topological sorting of the provided object graph to
* eliminate offset overflows in the links between objects of the graph. If a
* non-overflowing ordering is found the updated graph is serialized it into the
* provided serialization context.
*
* If necessary the structure of the graph may be modified in ways that do not
* affect the functionality of the graph. For example shared objects may be
* duplicated.
*
* For a detailed writeup describing how the algorithm operates see:
* docs/repacker.md
*/
template<typename T>
inline hb_blob_t*
hb_resolve_overflows (const T& packed,
hb_tag_t table_tag,
unsigned max_rounds = 20,
bool recalculate_extensions = false) {
graph_t sorted_graph (packed);
if (!hb_resolve_graph_overflows (table_tag, max_rounds, recalculate_extensions, sorted_graph))
return nullptr;
return graph::serialize (sorted_graph);
}

View file

@ -139,6 +139,14 @@ struct hb_serialize_context_t
objidx = o.objidx;
}
#endif
HB_INTERNAL static int cmp (const void* a, const void* b)
{
int cmp = ((const link_t*)a)->position - ((const link_t*)b)->position;
if (cmp) return cmp;
return ((const link_t*)a)->objidx - ((const link_t*)b)->objidx;
}
};
char *head;
@ -315,7 +323,7 @@ struct hb_serialize_context_t
{
object_t *obj = current;
if (unlikely (!obj)) return;
if (unlikely (in_error())) return;
if (unlikely (in_error() && !only_overflow ())) return;
current = current->next;
revert (obj->head, obj->tail);

View file

@ -94,7 +94,7 @@ hb_face_t::load_num_glyphs () const
ret = hb_max (ret, load_num_glyphs_from_maxp (this));
num_glyphs.set_relaxed (ret);
num_glyphs = ret;
return ret;
}
@ -102,7 +102,7 @@ unsigned int
hb_face_t::load_upem () const
{
unsigned int ret = table.head->get_upem ();
upem.set_relaxed (ret);
upem = ret;
return ret;
}

View file

@ -391,7 +391,7 @@ hb_subset_input_get_user_data (const hb_subset_input_t *input,
*
* Return value: `true` if success, `false` otherwise
*
* Since: REPLACEME
* Since: EXPERIMENTAL
**/
hb_bool_t
hb_subset_input_pin_axis_to_default (hb_subset_input_t *input,
@ -415,7 +415,7 @@ hb_subset_input_pin_axis_to_default (hb_subset_input_t *input,
*
* Return value: `true` if success, `false` otherwise
*
* Since: REPLACEME
* Since: EXPERIMENTAL
**/
hb_bool_t
hb_subset_input_pin_axis_location (hb_subset_input_t *input,

View file

@ -89,8 +89,6 @@ _remap_indexes (const hb_set_t *indexes,
}
#ifndef HB_NO_SUBSET_LAYOUT
typedef void (*layout_collect_func_t) (hb_face_t *face, hb_tag_t table_tag, const hb_tag_t *scripts, const hb_tag_t *languages, const hb_tag_t *features, hb_set_t *lookup_indexes /* OUT */);
/*
* Removes all tags from 'tags' that are not in filter. Additionally eliminates any duplicates.
@ -130,8 +128,8 @@ static bool _filter_tag_list(hb_vector_t<hb_tag_t>* tags, /* IN/OUT */
template <typename T>
static void _collect_layout_indices (hb_subset_plan_t *plan,
const T& table,
layout_collect_func_t layout_collect_func,
hb_set_t *indices /* OUT */)
hb_set_t *lookup_indices, /* OUT */
hb_set_t *feature_indices /* OUT */)
{
unsigned num_features = table.get_feature_count ();
hb_vector_t<hb_tag_t> features;
@ -149,12 +147,23 @@ static void _collect_layout_indices (hb_subset_plan_t *plan,
|| !plan->check_success (!scripts.in_error ()) || !scripts)
return;
layout_collect_func (plan->source,
T::tableTag,
retain_all_scripts ? nullptr : scripts.arrayZ,
nullptr,
retain_all_features ? nullptr : features.arrayZ,
indices);
hb_ot_layout_collect_features (plan->source,
T::tableTag,
retain_all_scripts ? nullptr : scripts.arrayZ,
nullptr,
retain_all_features ? nullptr : features.arrayZ,
feature_indices);
for (unsigned feature_index : *feature_indices)
{
//TODO: replace HB_OT_LAYOUT_NO_VARIATIONS_INDEX with variation_index for
//instancing
const OT::Feature &f = table.get_feature_variation (feature_index, HB_OT_LAYOUT_NO_VARIATIONS_INDEX);
f.add_lookup_indexes_to (lookup_indices);
}
//TODO: update for instancing: only collect lookups from feature_indexes that have no variations
table.feature_variation_collect_lookups (feature_indices, lookup_indices);
}
@ -232,11 +241,11 @@ _closure_glyphs_lookups_features (hb_subset_plan_t *plan,
{
hb_blob_ptr_t<T> table = plan->source_table<T> ();
hb_tag_t table_tag = table->tableTag;
hb_set_t lookup_indices;
hb_set_t lookup_indices, feature_indices;
_collect_layout_indices<T> (plan,
*table,
hb_ot_layout_collect_lookups,
&lookup_indices);
&lookup_indices,
&feature_indices);
if (table_tag == HB_OT_TAG_GSUB)
hb_ot_layout_lookups_substitute_closure (plan->source,
@ -247,19 +256,13 @@ _closure_glyphs_lookups_features (hb_subset_plan_t *plan,
&lookup_indices);
_remap_indexes (&lookup_indices, lookups);
// Collect and prune features
hb_set_t feature_indices;
_collect_layout_indices<T> (plan,
*table,
hb_ot_layout_collect_features,
&feature_indices);
// prune features
table->prune_features (lookups, &feature_indices);
hb_map_t duplicate_feature_map;
_GSUBGPOS_find_duplicate_features (*table, lookups, &feature_indices, &duplicate_feature_map);
feature_indices.clear ();
table->prune_langsys (&duplicate_feature_map, langsys_map, &feature_indices);
table->prune_langsys (&duplicate_feature_map, plan->layout_scripts, langsys_map, &feature_indices);
_remap_indexes (&feature_indices, features);
table.destroy ();
@ -269,11 +272,46 @@ _closure_glyphs_lookups_features (hb_subset_plan_t *plan,
#ifndef HB_NO_VAR
static inline void
_collect_layout_variation_indices (hb_subset_plan_t* plan,
const hb_set_t *glyphset,
const hb_map_t *gpos_lookups,
hb_set_t *layout_variation_indices,
hb_map_t *layout_variation_idx_map)
_generate_varstore_inner_maps (const hb_set_t& varidx_set,
unsigned subtable_count,
hb_vector_t<hb_inc_bimap_t> &inner_maps /* OUT */)
{
if (varidx_set.is_empty () || subtable_count == 0) return;
inner_maps.resize (subtable_count);
for (unsigned idx : varidx_set)
{
uint16_t major = idx >> 16;
uint16_t minor = idx & 0xFFFF;
if (major >= subtable_count)
continue;
inner_maps[major].add (minor);
}
}
static inline hb_font_t*
_get_hb_font_with_variations (const hb_subset_plan_t *plan)
{
hb_font_t *font = hb_font_create (plan->source);
hb_vector_t<hb_variation_t> vars;
vars.alloc (plan->user_axes_location->get_population ());
for (auto _ : *plan->user_axes_location)
{
hb_variation_t var;
var.tag = _.first;
var.value = _.second;
vars.push (var);
}
hb_font_set_variations (font, vars.arrayZ, plan->user_axes_location->get_population ());
return font;
}
static inline void
_collect_layout_variation_indices (hb_subset_plan_t* plan)
{
hb_blob_ptr_t<OT::GDEF> gdef = plan->source_table<OT::GDEF> ();
hb_blob_ptr_t<GPOS> gpos = plan->source_table<GPOS> ();
@ -284,13 +322,40 @@ _collect_layout_variation_indices (hb_subset_plan_t* plan,
gpos.destroy ();
return;
}
OT::hb_collect_variation_indices_context_t c (layout_variation_indices, glyphset, gpos_lookups);
const OT::VariationStore *var_store = nullptr;
hb_set_t varidx_set;
hb_font_t *font = nullptr;
float *store_cache = nullptr;
bool collect_delta = plan->pinned_at_default ? false : true;
if (collect_delta)
{
font = _get_hb_font_with_variations (plan);
if (gdef->has_var_store ())
{
var_store = &(gdef->get_var_store ());
store_cache = var_store->create_cache ();
}
}
OT::hb_collect_variation_indices_context_t c (&varidx_set,
plan->layout_variation_idx_delta_map,
font, var_store,
plan->_glyphset_gsub,
plan->gpos_lookups,
store_cache);
gdef->collect_variation_indices (&c);
if (hb_ot_layout_has_positioning (plan->source))
gpos->collect_variation_indices (&c);
gdef->remap_layout_variation_indices (layout_variation_indices, layout_variation_idx_map);
hb_font_destroy (font);
var_store->destroy_cache (store_cache);
gdef->remap_layout_variation_indices (&varidx_set, plan->layout_variation_idx_delta_map);
unsigned subtable_count = gdef->has_var_store () ? gdef->get_var_store ().get_sub_table_count () : 0;
_generate_varstore_inner_maps (varidx_set, subtable_count, plan->gdef_varstore_inner_maps);
gdef.destroy ();
gpos.destroy ();
@ -506,11 +571,7 @@ _populate_gids_to_retain (hb_subset_plan_t* plan,
#ifndef HB_NO_VAR
if (close_over_gdef)
_collect_layout_variation_indices (plan,
plan->_glyphset_gsub,
plan->gpos_lookups,
plan->layout_variation_indices,
plan->layout_variation_idx_map);
_collect_layout_variation_indices (plan);
#endif
}
@ -585,12 +646,9 @@ _nameid_closure (hb_face_t *face,
#ifndef HB_NO_VAR
static void
_normalize_axes_location (hb_face_t *face,
const hb_hashmap_t<hb_tag_t, float> *user_axes_location,
hb_hashmap_t<hb_tag_t, int> *normalized_axes_location, /* OUT */
bool &all_axes_pinned)
_normalize_axes_location (hb_face_t *face, hb_subset_plan_t *plan)
{
if (user_axes_location->is_empty ())
if (plan->user_axes_location->is_empty ())
return;
hb_array_t<const OT::AxisRecord> axes = face->table.fvar->get_axes ();
@ -605,25 +663,27 @@ _normalize_axes_location (hb_face_t *face,
for (const auto& axis : axes)
{
hb_tag_t axis_tag = axis.get_axis_tag ();
if (!user_axes_location->has (axis_tag))
if (!plan->user_axes_location->has (axis_tag))
{
axis_not_pinned = true;
}
else
{
int normalized_v = axis.normalize_axis_value (user_axes_location->get (axis_tag));
int normalized_v = axis.normalize_axis_value (plan->user_axes_location->get (axis_tag));
if (has_avar && axis_count < face->table.avar->get_axis_count ())
{
normalized_v = seg_maps->map (normalized_v);
}
normalized_axes_location->set (axis_tag, normalized_v);
plan->axes_location->set (axis_tag, normalized_v);
if (normalized_v != 0)
plan->pinned_at_default = false;
}
if (has_avar)
seg_maps = &StructAfter<OT::SegmentMaps> (*seg_maps);
axis_count++;
}
all_axes_pinned = !axis_not_pinned;
plan->all_axes_pinned = !axis_not_pinned;
}
#endif
/**
@ -683,8 +743,8 @@ hb_subset_plan_create_or_fail (hb_face_t *face,
plan->gpos_features = hb_map_create ();
plan->colrv1_layers = hb_map_create ();
plan->colr_palettes = hb_map_create ();
plan->layout_variation_indices = hb_set_create ();
plan->layout_variation_idx_map = hb_map_create ();
plan->check_success (plan->layout_variation_idx_delta_map = hb_hashmap_create<unsigned, hb_pair_t<unsigned, int>> ());
plan->gdef_varstore_inner_maps.init ();
plan->check_success (plan->sanitized_table_cache = hb_hashmap_create<hb_tag_t, hb::unique_ptr<hb_blob_t>> ());
plan->check_success (plan->axes_location = hb_hashmap_create<hb_tag_t, int> ());
@ -692,12 +752,20 @@ hb_subset_plan_create_or_fail (hb_face_t *face,
if (plan->user_axes_location && input->axes_location)
*plan->user_axes_location = *input->axes_location;
plan->all_axes_pinned = false;
plan->pinned_at_default = true;
plan->check_success (plan->vmtx_map = hb_hashmap_create<unsigned, hb_pair_t<unsigned, int>> ());
plan->check_success (plan->hmtx_map = hb_hashmap_create<unsigned, hb_pair_t<unsigned, int>> ());
if (unlikely (plan->in_error ())) {
hb_subset_plan_destroy (plan);
return nullptr;
}
#ifndef HB_NO_VAR
_normalize_axes_location (face, plan);
#endif
_populate_unicodes_to_retain (input->sets.unicodes, input->sets.glyphs, plan);
_populate_gids_to_retain (plan,
@ -725,13 +793,6 @@ hb_subset_plan_create_or_fail (hb_face_t *face,
plan->glyph_map->get(plan->unicode_to_new_gid_list.arrayZ[i].second);
}
#ifndef HB_NO_VAR
_normalize_axes_location (face,
input->axes_location,
plan->axes_location,
plan->all_axes_pinned);
#endif
_nameid_closure (face, plan->name_ids, plan->all_axes_pinned, plan->user_axes_location);
if (unlikely (plan->in_error ())) {
hb_subset_plan_destroy (plan);
@ -754,44 +815,6 @@ hb_subset_plan_destroy (hb_subset_plan_t *plan)
{
if (!hb_object_destroy (plan)) return;
hb_set_destroy (plan->unicodes);
hb_set_destroy (plan->name_ids);
hb_set_destroy (plan->name_languages);
hb_set_destroy (plan->layout_features);
hb_set_destroy (plan->layout_scripts);
hb_set_destroy (plan->glyphs_requested);
hb_set_destroy (plan->drop_tables);
hb_set_destroy (plan->no_subset_tables);
hb_face_destroy (plan->source);
hb_face_destroy (plan->dest);
hb_map_destroy (plan->codepoint_to_glyph);
hb_map_destroy (plan->glyph_map);
hb_map_destroy (plan->reverse_glyph_map);
hb_map_destroy (plan->glyph_map_gsub);
hb_set_destroy (plan->_glyphset);
hb_set_destroy (plan->_glyphset_gsub);
hb_set_destroy (plan->_glyphset_mathed);
hb_set_destroy (plan->_glyphset_colred);
hb_map_destroy (plan->gsub_lookups);
hb_map_destroy (plan->gpos_lookups);
hb_map_destroy (plan->gsub_features);
hb_map_destroy (plan->gpos_features);
hb_map_destroy (plan->colrv1_layers);
hb_map_destroy (plan->colr_palettes);
hb_set_destroy (plan->layout_variation_indices);
hb_map_destroy (plan->layout_variation_idx_map);
hb_hashmap_destroy (plan->gsub_langsys);
hb_hashmap_destroy (plan->gpos_langsys);
hb_hashmap_destroy (plan->axes_location);
hb_hashmap_destroy (plan->sanitized_table_cache);
if (plan->user_axes_location)
{
hb_object_destroy (plan->user_axes_location);
hb_free (plan->user_axes_location);
}
hb_free (plan);
}

View file

@ -33,10 +33,56 @@
#include "hb-subset-input.hh"
#include "hb-map.hh"
#include "hb-bimap.hh"
#include "hb-set.hh"
struct hb_subset_plan_t
{
hb_subset_plan_t ()
{}
~hb_subset_plan_t()
{
hb_set_destroy (unicodes);
hb_set_destroy (name_ids);
hb_set_destroy (name_languages);
hb_set_destroy (layout_features);
hb_set_destroy (layout_scripts);
hb_set_destroy (glyphs_requested);
hb_set_destroy (drop_tables);
hb_set_destroy (no_subset_tables);
hb_face_destroy (source);
hb_face_destroy (dest);
hb_map_destroy (codepoint_to_glyph);
hb_map_destroy (glyph_map);
hb_map_destroy (reverse_glyph_map);
hb_map_destroy (glyph_map_gsub);
hb_set_destroy (_glyphset);
hb_set_destroy (_glyphset_gsub);
hb_set_destroy (_glyphset_mathed);
hb_set_destroy (_glyphset_colred);
hb_map_destroy (gsub_lookups);
hb_map_destroy (gpos_lookups);
hb_map_destroy (gsub_features);
hb_map_destroy (gpos_features);
hb_map_destroy (colrv1_layers);
hb_map_destroy (colr_palettes);
hb_hashmap_destroy (gsub_langsys);
hb_hashmap_destroy (gpos_langsys);
hb_hashmap_destroy (axes_location);
hb_hashmap_destroy (sanitized_table_cache);
hb_hashmap_destroy (hmtx_map);
hb_hashmap_destroy (vmtx_map);
hb_hashmap_destroy (layout_variation_idx_delta_map);
if (user_axes_location)
{
hb_object_destroy (user_axes_location);
hb_free (user_axes_location);
}
}
hb_object_header_t header;
bool successful;
@ -101,10 +147,11 @@ struct hb_subset_plan_t
hb_map_t *colrv1_layers;
hb_map_t *colr_palettes;
//The set of layout item variation store delta set indices to be retained
hb_set_t *layout_variation_indices;
//Old -> New layout item variation store delta set index mapping
hb_map_t *layout_variation_idx_map;
//Old layout item variation index -> (New varidx, delta) mapping
hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map;
//gdef varstore retained varidx mapping
hb_vector_t<hb_inc_bimap_t> gdef_varstore_inner_maps;
hb_hashmap_t<hb_tag_t, hb::unique_ptr<hb_blob_t>>* sanitized_table_cache;
//normalized axes location map
@ -112,6 +159,12 @@ struct hb_subset_plan_t
//user specified axes location map
hb_hashmap_t<hb_tag_t, float> *user_axes_location;
bool all_axes_pinned;
bool pinned_at_default;
//hmtx metrics map: new gid->(advance, lsb)
hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *hmtx_map;
//vmtx metrics map: new gid->(advance, lsb)
hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *vmtx_map;
public:

View file

@ -405,6 +405,27 @@ _passthrough (hb_subset_plan_t *plan, hb_tag_t tag)
return result;
}
static bool
_dependencies_satisfied (hb_subset_plan_t *plan, hb_tag_t tag,
hb_set_t &visited_set, hb_set_t &revisit_set)
{
switch (tag)
{
case HB_OT_TAG_hmtx:
case HB_OT_TAG_vmtx:
if (!plan->pinned_at_default &&
!visited_set.has (HB_OT_TAG_glyf))
{
revisit_set.add (tag);
return false;
}
return true;
default:
return true;
}
}
static bool
_subset_table (hb_subset_plan_t *plan,
hb_vector_t<char> &buf,
@ -514,7 +535,7 @@ hb_subset_plan_execute_or_fail (hb_subset_plan_t *plan)
return nullptr;
}
hb_set_t tags_set;
hb_set_t tags_set, revisit_set;
bool success = true;
hb_tag_t table_tags[32];
unsigned offset = 0, num_tables = ARRAY_LENGTH (table_tags);
@ -527,10 +548,27 @@ hb_subset_plan_execute_or_fail (hb_subset_plan_t *plan)
{
hb_tag_t tag = table_tags[i];
if (_should_drop_table (plan, tag) && !tags_set.has (tag)) continue;
if (!_dependencies_satisfied (plan, tag, tags_set, revisit_set)) continue;
tags_set.add (tag);
success = _subset_table (plan, buf, tag);
if (unlikely (!success)) goto end;
}
/*delayed subsetting for some tables since they might have dependency on other tables in some cases:
e.g: during instantiating glyf tables, hmetrics/vmetrics are updated and saved in subset plan,
hmtx/vmtx subsetting need to use these updated metrics values*/
while (!revisit_set.is_empty ())
{
hb_set_t revisit_temp;
for (hb_tag_t tag : revisit_set)
{
if (!_dependencies_satisfied (plan, tag, tags_set, revisit_temp)) continue;
tags_set.add (tag);
success = _subset_table (plan, buf, tag);
if (unlikely (!success)) goto end;
}
revisit_set = revisit_temp;
}
offset += num_tables;
}

File diff suppressed because it is too large Load diff

View file

@ -6,16 +6,16 @@
*
* on file with this header:
*
* # emoji-data-14.0.0.txt
* # Date: 2021-08-26, 17:22:22 GMT
* # © 2021 Unicode®, Inc.
* # emoji-data.txt
* # Date: 2022-08-02, 00:26:10 GMT
* # © 2022 Unicode®, Inc.
* # Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries.
* # For terms of use, see http://www.unicode.org/terms_of_use.html
* # For terms of use, see https://www.unicode.org/terms_of_use.html
* #
* # Emoji Data for UTS #51
* # Used with Emoji Version 14.0 and subsequent minor revisions (if any)
* # Used with Emoji Version 15.0 and subsequent minor revisions (if any)
* #
* # For documentation and usage, see http://www.unicode.org/reports/tr51
* # For documentation and usage, see https://www.unicode.org/reports/tr51
*/
#ifndef HB_UNICODE_EMOJI_TABLE_HH

View file

@ -47,7 +47,7 @@ HB_BEGIN_DECLS
*
* The minor component of the library version available at compile-time.
*/
#define HB_VERSION_MINOR 1
#define HB_VERSION_MINOR 2
/**
* HB_VERSION_MICRO:
*
@ -60,7 +60,7 @@ HB_BEGIN_DECLS
*
* A string literal containing the library version available at compile-time.
*/
#define HB_VERSION_STRING "5.1.0"
#define HB_VERSION_STRING "5.2.0"
/**
* HB_VERSION_ATLEAST: