harfbuzz: Update to 8.3.0

This commit is contained in:
Jakub Marcowski 2024-03-09 01:57:02 +01:00
parent f28964805e
commit ac4cc07301
No known key found for this signature in database
GPG key ID: 10D9E07CFFBC0E6F
90 changed files with 1015 additions and 326 deletions

View file

@ -375,7 +375,7 @@ Files extracted from upstream source:
## harfbuzz
- Upstream: https://github.com/harfbuzz/harfbuzz
- Version: 8.2.2 (18a6e78549e8e04a281129ea8ca784ce85f111b8, 2023)
- Version: 8.3.0 (894a1f72ee93a1fd8dc1d9218cb3fd8f048be29a, 2023)
- License: MIT
Files extracted from upstream source:
@ -383,7 +383,8 @@ Files extracted from upstream source:
- `AUTHORS`, `COPYING`, `THANKS`
- From the `src` folder, recursively:
- All the `.c`, `.cc`, `.h`, `.hh` files
- Except `main.cc`, `harfbuzz*.cc`, `failing-alloc.c`, `test*.cc`, `hb-wasm*.*`
- Except `main.cc`, `harfbuzz*.cc`, `failing-alloc.c`, `test*.cc`, `hb-wasm*.*`,
and the `wasm` folder
## icu4c

View file

@ -204,6 +204,7 @@ struct IndexSubtable
{
TRACE_SANITIZE (this);
if (!u.header.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.header.indexFormat)
{
case 1: return_trace (u.format1.sanitize (c, glyph_count));
@ -378,6 +379,7 @@ struct IndexSubtableRecord
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
firstGlyphIndex <= lastGlyphIndex &&
offsetToSubtable.sanitize (c, base, lastGlyphIndex - firstGlyphIndex + 1));
}
@ -635,6 +637,7 @@ struct BitmapSizeTable
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
indexSubtableArrayOffset.sanitize (c, base, numberOfIndexSubtables) &&
horizontal.sanitize (c) &&
vertical.sanitize (c));
@ -738,7 +741,9 @@ struct CBLC
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
likely (version.major == 2 || version.major == 3) &&
hb_barrier () &&
sizeTables.sanitize (c, this));
}
@ -975,6 +980,7 @@ struct CBDT
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
likely (version.major == 2 || version.major == 3));
}

View file

@ -1948,10 +1948,11 @@ struct COLR
bool has_v0_data () const { return numBaseGlyphs; }
bool has_v1_data () const
{
if (version == 1)
return (this+baseGlyphList).len > 0;
if (version != 1)
return false;
hb_barrier ();
return false;
return (this+baseGlyphList).len > 0;
}
unsigned int get_glyph_layers (hb_codepoint_t glyph,
@ -2032,6 +2033,8 @@ struct COLR
hb_set_t *palette_indices) const
{
if (version != 1) return;
hb_barrier ();
hb_set_t visited_glyphs;
hb_colrv1_closure_context_t c (this, &visited_glyphs, layer_indices, palette_indices);
@ -2058,10 +2061,12 @@ struct COLR
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
(this+baseGlyphsZ).sanitize (c, numBaseGlyphs) &&
(this+layersZ).sanitize (c, numLayers) &&
(version == 0 ||
(version == 1 &&
(hb_barrier () &&
version == 1 &&
baseGlyphList.sanitize (c, this) &&
layerList.sanitize (c, this) &&
clipList.sanitize (c, this) &&
@ -2284,6 +2289,8 @@ struct COLR
{
if (version == 1)
{
hb_barrier ();
const Paint *paint = get_base_glyph_paint (glyph);
return paint != nullptr;
@ -2313,6 +2320,8 @@ struct COLR
if (version == 1)
{
hb_barrier ();
const Paint *paint = get_base_glyph_paint (glyph);
if (paint)
{

View file

@ -214,13 +214,17 @@ struct CPAL
hb_set_t *nameids_to_retain /* OUT */) const
{
if (version == 1)
{
hb_barrier ();
v1 ().collect_name_ids (this, numPalettes, numColors, color_index_map, nameids_to_retain);
}
}
private:
const CPALV1Tail& v1 () const
{
if (version == 0) return Null (CPALV1Tail);
hb_barrier ();
return StructAfter<CPALV1Tail> (*this);
}
@ -312,7 +316,10 @@ struct CPAL
return_trace (false);
if (version == 1)
{
hb_barrier ();
return_trace (v1 ().serialize (c->serializer, numPalettes, numColors, this, color_index_map));
}
return_trace (true);
}
@ -321,6 +328,7 @@ struct CPAL
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
(this+colorRecordsZ).sanitize (c, numColorRecords) &&
colorRecordIndicesZ.sanitize (c, numPalettes) &&
(version == 0 || v1 ().sanitize (c, this, numPalettes, numColors)));

View file

@ -368,6 +368,7 @@ struct sbix
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version >= 1 &&
strikes.sanitize (c, this)));
}

View file

@ -56,6 +56,7 @@ struct SVGDocumentIndexEntry
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
svgDoc.sanitize (c, base, svgDocLength));
}

View file

@ -64,6 +64,7 @@ struct Coverage
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format)
{
case 1: return_trace (u.format1.sanitize (c));

View file

@ -291,6 +291,7 @@ struct CaretValue
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
@ -441,6 +442,20 @@ struct MarkGlyphSetsFormat1
bool covers (unsigned int set_index, hb_codepoint_t glyph_id) const
{ return (this+coverage[set_index]).get_coverage (glyph_id) != NOT_COVERED; }
void collect_used_mark_sets (const hb_set_t& glyph_set,
hb_set_t& used_mark_sets /* OUT */) const
{
unsigned i = 0;
for (const auto &offset : coverage)
{
const auto &cov = this+offset;
if (cov.intersects (&glyph_set))
used_mark_sets.add (i);
i++;
}
}
template <typename set_t>
void collect_coverage (hb_vector_t<set_t> &sets) const
{
@ -461,6 +476,7 @@ struct MarkGlyphSetsFormat1
bool ret = true;
for (const Offset32To<Coverage>& offset : coverage.iter ())
{
auto snap = c->serializer->snapshot ();
auto *o = out->coverage.serialize_append (c->serializer);
if (unlikely (!o))
{
@ -468,11 +484,17 @@ struct MarkGlyphSetsFormat1
break;
}
//not using o->serialize_subset (c, offset, this, out) here because
//OTS doesn't allow null offset.
//See issue: https://github.com/khaledhosny/ots/issues/172
//skip empty coverage
c->serializer->push ();
c->dispatch (this+offset);
bool res = false;
if (offset) res = c->dispatch (this+offset);
if (!res)
{
c->serializer->pop_discard ();
c->serializer->revert (snap);
(out->coverage.len)--;
continue;
}
c->serializer->add_link (*o, c->serializer->pop_pack ());
}
@ -513,6 +535,15 @@ struct MarkGlyphSets
}
}
void collect_used_mark_sets (const hb_set_t& glyph_set,
hb_set_t& used_mark_sets /* OUT */) const
{
switch (u.format) {
case 1: u.format1.collect_used_mark_sets (glyph_set, used_mark_sets); return;
default:return;
}
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
@ -526,6 +557,7 @@ struct MarkGlyphSets
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 1: return_trace (u.format1.sanitize (c));
default:return_trace (true);
@ -600,6 +632,7 @@ struct GDEFVersion1_2
attachList.sanitize (c, this) &&
ligCaretList.sanitize (c, this) &&
markAttachClassDef.sanitize (c, this) &&
hb_barrier () &&
(version.to_int () < 0x00010002u || markGlyphSetsDef.sanitize (c, this)) &&
(version.to_int () < 0x00010003u || varStore.sanitize (c, this)));
}
@ -627,23 +660,28 @@ struct GDEFVersion1_2
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
auto *out = c->serializer->embed (*this);
if (unlikely (!out)) return_trace (false);
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->version.major = version.major;
out->version.minor = version.minor;
bool subset_glyphclassdef = out->glyphClassDef.serialize_subset (c, glyphClassDef, this, nullptr, false, true);
bool subset_attachlist = out->attachList.serialize_subset (c, attachList, this);
bool subset_ligcaretlist = out->ligCaretList.serialize_subset (c, ligCaretList, this);
bool subset_markattachclassdef = out->markAttachClassDef.serialize_subset (c, markAttachClassDef, this, nullptr, false, true);
bool subset_markglyphsetsdef = false;
auto snapshot_version0 = c->serializer->snapshot ();
if (version.to_int () >= 0x00010002u)
{
if (unlikely (!c->serializer->embed (markGlyphSetsDef))) return_trace (false);
subset_markglyphsetsdef = out->markGlyphSetsDef.serialize_subset (c, markGlyphSetsDef, this);
}
bool subset_varstore = false;
auto snapshot_version2 = c->serializer->snapshot ();
if (version.to_int () >= 0x00010003u)
{
if (unlikely (!c->serializer->embed (varStore))) return_trace (false);
if (c->plan->all_axes_pinned)
out->varStore = 0;
else if (c->plan->normalized_coords)
@ -666,15 +704,21 @@ struct GDEFVersion1_2
subset_varstore = out->varStore.serialize_subset (c, varStore, this, c->plan->gdef_varstore_inner_maps.as_array ());
}
if (subset_varstore)
{
out->version.minor = 3;
c->plan->has_gdef_varstore = true;
} else if (subset_markglyphsetsdef) {
out->version.minor = 2;
c->serializer->revert (snapshot_version2);
} else {
out->version.minor = 0;
c->serializer->revert (snapshot_version0);
}
bool subset_ligcaretlist = out->ligCaretList.serialize_subset (c, ligCaretList, this);
return_trace (subset_glyphclassdef || subset_attachlist ||
subset_ligcaretlist || subset_markattachclassdef ||
(out->version.to_int () >= 0x00010002u && subset_markglyphsetsdef) ||
@ -709,6 +753,7 @@ struct GDEF
{
TRACE_SANITIZE (this);
if (unlikely (!u.version.sanitize (c))) return_trace (false);
hb_barrier ();
switch (u.version.major) {
case 1: return_trace (u.version1.sanitize (c));
#ifndef HB_NO_BEYOND_64K

View file

@ -25,6 +25,7 @@ struct Anchor
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));

View file

@ -38,9 +38,15 @@ struct AnchorFormat3
*y = font->em_fscale_y (yCoordinate);
if ((font->x_ppem || font->num_coords) && xDeviceTable.sanitize (&c->sanitizer, this))
{
hb_barrier ();
*x += (this+xDeviceTable).get_x_delta (font, c->var_store, c->var_store_cache);
}
if ((font->y_ppem || font->num_coords) && yDeviceTable.sanitize (&c->sanitizer, this))
{
hb_barrier ();
*y += (this+yDeviceTable).get_y_delta (font, c->var_store, c->var_store_cache);
}
}
bool subset (hb_subset_context_t *c) const

View file

@ -8,7 +8,7 @@ namespace GPOS_impl {
struct AnchorMatrix
{
HBUINT16 rows; /* Number of rows */
UnsizedArrayOf<Offset16To<Anchor>>
UnsizedArrayOf<Offset16To<Anchor, AnchorMatrix>>
matrixZ; /* Matrix of offsets to Anchor tables--
* from beginning of AnchorMatrix table */
public:
@ -18,6 +18,7 @@ struct AnchorMatrix
{
TRACE_SANITIZE (this);
if (!c->check_struct (this)) return_trace (false);
hb_barrier ();
if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
unsigned int count = rows * cols;
if (!c->check_array (matrixZ.arrayZ, count)) return_trace (false);
@ -25,6 +26,7 @@ struct AnchorMatrix
if (c->lazy_some_gpos)
return_trace (true);
hb_barrier ();
for (unsigned int i = 0; i < count; i++)
if (!matrixZ[i].sanitize (c, this)) return_trace (false);
return_trace (true);
@ -38,6 +40,7 @@ struct AnchorMatrix
if (unlikely (row >= rows || col >= cols)) return Null (Anchor);
auto &offset = matrixZ[row * cols + col];
if (unlikely (!offset.sanitize (&c->sanitizer, this))) return Null (Anchor);
hb_barrier ();
*found = !offset.is_null ();
return this+offset;
}
@ -65,15 +68,14 @@ struct AnchorMatrix
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->rows = num_rows;
bool ret = false;
for (const unsigned i : index_iter)
{
auto *offset = c->serializer->embed (matrixZ[i]);
if (!offset) return_trace (false);
ret |= offset->serialize_subset (c, matrixZ[i], this);
offset->serialize_subset (c, matrixZ[i], this);
}
return_trace (ret);
return_trace (true);
}
};

View file

@ -23,7 +23,7 @@ static void SinglePos_serialize (hb_serialize_context_t *c,
const SrcLookup *src,
Iterator it,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
bool all_axes_pinned);
unsigned new_format);
}

View file

@ -11,21 +11,21 @@ struct EntryExitRecord
{
friend struct CursivePosFormat1;
bool sanitize (hb_sanitize_context_t *c, const void *base) const
bool sanitize (hb_sanitize_context_t *c, const struct CursivePosFormat1 *base) const
{
TRACE_SANITIZE (this);
return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
const void *src_base) const
const struct CursivePosFormat1 *src_base) const
{
(src_base+entryAnchor).collect_variation_indices (c);
(src_base+exitAnchor).collect_variation_indices (c);
}
bool subset (hb_subset_context_t *c,
const void *src_base) const
const struct CursivePosFormat1 *src_base) const
{
TRACE_SERIALIZE (this);
auto *out = c->serializer->embed (this);
@ -38,11 +38,11 @@ struct EntryExitRecord
}
protected:
Offset16To<Anchor>
Offset16To<Anchor, struct CursivePosFormat1>
entryAnchor; /* Offset to EntryAnchor table--from
* beginning of CursivePos
* subtable--may be NULL */
Offset16To<Anchor>
Offset16To<Anchor, struct CursivePosFormat1>
exitAnchor; /* Offset to ExitAnchor table--from
* beginning of CursivePos
* subtable--may be NULL */
@ -128,6 +128,7 @@ struct CursivePosFormat1
const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)];
if (!this_record.entryAnchor ||
unlikely (!this_record.entryAnchor.sanitize (&c->sanitizer, this))) return_trace (false);
hb_barrier ();
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset_fast (buffer->idx);
@ -145,6 +146,7 @@ struct CursivePosFormat1
buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
return_trace (false);
}
hb_barrier ();
unsigned int i = skippy_iter.idx;
unsigned int j = buffer->idx;
@ -262,7 +264,7 @@ struct CursivePosFormat1
hb_requires (hb_is_iterator (Iterator))>
void serialize (hb_subset_context_t *c,
Iterator it,
const void *src_base)
const struct CursivePosFormat1 *src_base)
{
if (unlikely (!c->serializer->extend_min ((*this)))) return;
this->format = 1;

View file

@ -42,6 +42,7 @@ struct MarkMarkPosFormat1_2
mark1Coverage.sanitize (c, this) &&
mark2Coverage.sanitize (c, this) &&
mark1Array.sanitize (c, this) &&
hb_barrier () &&
mark2Array.sanitize (c, this, (unsigned int) classCount));
}

View file

@ -36,6 +36,7 @@ struct PairPosFormat1_3
TRACE_SANITIZE (this);
if (!c->check_struct (this)) return_trace (false);
hb_barrier ();
unsigned int len1 = valueFormat[0].get_len ();
unsigned int len2 = valueFormat[1].get_len ();
@ -131,20 +132,33 @@ struct PairPosFormat1_3
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->format = format;
out->valueFormat[0] = valueFormat[0];
out->valueFormat[1] = valueFormat[1];
if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
{
hb_pair_t<unsigned, unsigned> newFormats = compute_effective_value_formats (glyphset);
out->valueFormat[0] = newFormats.first;
out->valueFormat[1] = newFormats.second;
}
if (c->plan->all_axes_pinned)
hb_pair_t<unsigned, unsigned> newFormats = hb_pair (valueFormat[0], valueFormat[1]);
if (c->plan->normalized_coords)
{
out->valueFormat[0] = out->valueFormat[0].drop_device_table_flags ();
out->valueFormat[1] = out->valueFormat[1].drop_device_table_flags ();
/* all device flags will be dropped when full instancing, no need to strip
* hints, also do not strip emtpy cause we don't compute the new default
* value during stripping */
newFormats = compute_effective_value_formats (glyphset, false, false, &c->plan->layout_variation_idx_delta_map);
}
/* do not strip hints for VF */
else if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
{
hb_blob_t* blob = hb_face_reference_table (c->plan->source, HB_TAG ('f','v','a','r'));
bool has_fvar = (blob != hb_blob_get_empty ());
hb_blob_destroy (blob);
bool strip = !has_fvar;
/* special case: strip hints when a VF has no GDEF varstore after
* subsetting*/
if (has_fvar && !c->plan->has_gdef_varstore)
strip = true;
newFormats = compute_effective_value_formats (glyphset, strip, true);
}
out->valueFormat[0] = newFormats.first;
out->valueFormat[1] = newFormats.second;
hb_sorted_vector_t<hb_codepoint_t> new_coverage;
@ -175,7 +189,9 @@ struct PairPosFormat1_3
}
hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_set_t& glyphset) const
hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_set_t& glyphset,
bool strip_hints, bool strip_empty,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map = nullptr) const
{
unsigned record_size = PairSet::get_size (valueFormat);
@ -195,8 +211,8 @@ struct PairPosFormat1_3
{
if (record->intersects (glyphset))
{
format1 = format1 | valueFormat[0].get_effective_format (record->get_values_1 ());
format2 = format2 | valueFormat[1].get_effective_format (record->get_values_2 (valueFormat[0]));
format1 = format1 | valueFormat[0].get_effective_format (record->get_values_1 (), strip_hints, strip_empty, &set, varidx_delta_map);
format2 = format2 | valueFormat[1].get_effective_format (record->get_values_2 (valueFormat[0]), strip_hints, strip_empty, &set, varidx_delta_map);
}
record = &StructAtOffset<const PairValueRecord> (record, record_size);
}

View file

@ -8,7 +8,7 @@ namespace Layout {
namespace GPOS_impl {
template <typename Types>
struct PairPosFormat2_4
struct PairPosFormat2_4 : ValueBase
{
protected:
HBUINT16 format; /* Format identifier--format = 2 */
@ -287,18 +287,31 @@ struct PairPosFormat2_4
unsigned len2 = valueFormat2.get_len ();
hb_pair_t<unsigned, unsigned> newFormats = hb_pair (valueFormat1, valueFormat2);
if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
newFormats = compute_effective_value_formats (klass1_map, klass2_map);
if (c->plan->normalized_coords)
{
/* in case of full instancing, all var device flags will be dropped so no
* need to strip hints here */
newFormats = compute_effective_value_formats (klass1_map, klass2_map, false, false, &c->plan->layout_variation_idx_delta_map);
}
/* do not strip hints for VF */
else if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
{
hb_blob_t* blob = hb_face_reference_table (c->plan->source, HB_TAG ('f','v','a','r'));
bool has_fvar = (blob != hb_blob_get_empty ());
hb_blob_destroy (blob);
bool strip = !has_fvar;
/* special case: strip hints when a VF has no GDEF varstore after
* subsetting*/
if (has_fvar && !c->plan->has_gdef_varstore)
strip = true;
newFormats = compute_effective_value_formats (klass1_map, klass2_map, strip, true);
}
out->valueFormat1 = newFormats.first;
out->valueFormat2 = newFormats.second;
if (c->plan->all_axes_pinned)
{
out->valueFormat1 = out->valueFormat1.drop_device_table_flags ();
out->valueFormat2 = out->valueFormat2.drop_device_table_flags ();
}
unsigned total_len = len1 + len2;
hb_vector_t<unsigned> class2_idxs (+ hb_range ((unsigned) class2Count) | hb_filter (klass2_map));
for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
@ -326,7 +339,9 @@ struct PairPosFormat2_4
hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_map_t& klass1_map,
const hb_map_t& klass2_map) const
const hb_map_t& klass2_map,
bool strip_hints, bool strip_empty,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map = nullptr) const
{
unsigned len1 = valueFormat1.get_len ();
unsigned len2 = valueFormat2.get_len ();
@ -340,8 +355,8 @@ struct PairPosFormat2_4
for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
{
unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * record_size;
format1 = format1 | valueFormat1.get_effective_format (&values[idx]);
format2 = format2 | valueFormat2.get_effective_format (&values[idx + len1]);
format1 = format1 | valueFormat1.get_effective_format (&values[idx], strip_hints, strip_empty, this, varidx_delta_map);
format2 = format2 | valueFormat2.get_effective_format (&values[idx + len1], strip_hints, strip_empty, this, varidx_delta_map);
}
if (format1 == valueFormat1 && format2 == valueFormat2)

View file

@ -9,7 +9,7 @@ namespace GPOS_impl {
template <typename Types>
struct PairSet
struct PairSet : ValueBase
{
template <typename Types2>
friend struct PairPosFormat1_3;
@ -45,10 +45,12 @@ struct PairSet
bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
{
TRACE_SANITIZE (this);
if (!(c->check_struct (this)
&& c->check_range (&firstPairValueRecord,
if (!(c->check_struct (this) &&
hb_barrier () &&
c->check_range (&firstPairValueRecord,
len,
closure->stride))) return_trace (false);
hb_barrier ();
unsigned int count = len;
const PairValueRecord *record = &firstPairValueRecord;

View file

@ -29,7 +29,7 @@ struct PairValueRecord
struct context_t
{
const void *base;
const ValueBase *base;
const ValueFormat *valueFormats;
const ValueFormat *newFormats;
unsigned len1; /* valueFormats[0].get_len() */
@ -62,7 +62,7 @@ struct PairValueRecord
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
const ValueFormat *valueFormats,
const void *base) const
const ValueBase *base) const
{
unsigned record1_len = valueFormats[0].get_len ();
unsigned record2_len = valueFormats[1].get_len ();

View file

@ -39,14 +39,12 @@ struct SinglePos
const SrcLookup* src,
Iterator glyph_val_iter_pairs,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
bool all_axes_pinned)
unsigned newFormat)
{
if (unlikely (!c->extend_min (u.format))) return;
unsigned format = 2;
ValueFormat new_format = src->get_value_format ();
if (all_axes_pinned)
new_format = new_format.drop_device_table_flags ();
ValueFormat new_format;
new_format = newFormat;
if (glyph_val_iter_pairs)
format = get_format (glyph_val_iter_pairs);
@ -89,8 +87,8 @@ SinglePos_serialize (hb_serialize_context_t *c,
const SrcLookup *src,
Iterator it,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
bool all_axes_pinned)
{ c->start_embed<SinglePos> ()->serialize (c, src, it, layout_variation_idx_delta_map, all_axes_pinned); }
unsigned new_format)
{ c->start_embed<SinglePos> ()->serialize (c, src, it, layout_variation_idx_delta_map, new_format); }
}

View file

@ -8,7 +8,7 @@ namespace OT {
namespace Layout {
namespace GPOS_impl {
struct SinglePosFormat1
struct SinglePosFormat1 : ValueBase
{
protected:
HBUINT16 format; /* Format identifier--format = 1 */
@ -28,6 +28,7 @@ struct SinglePosFormat1
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
coverage.sanitize (c, this) &&
hb_barrier () &&
/* The coverage table may use a range to represent a set
* of glyphs, which means a small number of bytes can
* generate a large glyph set. Manually modify the
@ -146,6 +147,30 @@ struct SinglePosFormat1
hb_set_t intersection;
(this+coverage).intersect_set (glyphset, intersection);
unsigned new_format = valueFormat;
if (c->plan->normalized_coords)
{
new_format = valueFormat.get_effective_format (values.arrayZ, false, false, this, &c->plan->layout_variation_idx_delta_map);
}
/* do not strip hints for VF */
else if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
{
hb_blob_t* blob = hb_face_reference_table (c->plan->source, HB_TAG ('f','v','a','r'));
bool has_fvar = (blob != hb_blob_get_empty ());
hb_blob_destroy (blob);
bool strip = !has_fvar;
/* special case: strip hints when a VF has no GDEF varstore after
* subsetting*/
if (has_fvar && !c->plan->has_gdef_varstore)
strip = true;
new_format = valueFormat.get_effective_format (values.arrayZ,
strip, /* strip hints */
true, /* strip empty */
this, nullptr);
}
auto it =
+ hb_iter (intersection)
| hb_map_retains_sorting (glyph_map)
@ -153,7 +178,7 @@ struct SinglePosFormat1
;
bool ret = bool (it);
SinglePos_serialize (c->serializer, this, it, &c->plan->layout_variation_idx_delta_map, c->plan->all_axes_pinned);
SinglePos_serialize (c->serializer, this, it, &c->plan->layout_variation_idx_delta_map, new_format);
return_trace (ret);
}
};

View file

@ -7,7 +7,7 @@ namespace OT {
namespace Layout {
namespace GPOS_impl {
struct SinglePosFormat2
struct SinglePosFormat2 : ValueBase
{
protected:
HBUINT16 format; /* Format identifier--format = 2 */
@ -143,6 +143,37 @@ struct SinglePosFormat2
coverage.serialize_serialize (c, glyphs);
}
template<typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
unsigned compute_effective_format (const hb_face_t *face,
Iterator it,
bool is_instancing, bool strip_hints,
bool has_gdef_varstore,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map) const
{
hb_blob_t* blob = hb_face_reference_table (face, HB_TAG ('f','v','a','r'));
bool has_fvar = (blob != hb_blob_get_empty ());
hb_blob_destroy (blob);
unsigned new_format = 0;
if (is_instancing)
{
new_format = new_format | valueFormat.get_effective_format (+ it | hb_map (hb_second), false, false, this, varidx_delta_map);
}
/* do not strip hints for VF */
else if (strip_hints)
{
bool strip = !has_fvar;
if (has_fvar && !has_gdef_varstore)
strip = true;
new_format = new_format | valueFormat.get_effective_format (+ it | hb_map (hb_second), strip, true, this, nullptr);
}
else
new_format = valueFormat;
return new_format;
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
@ -163,8 +194,13 @@ struct SinglePosFormat2
})
;
unsigned new_format = compute_effective_format (c->plan->source, it,
bool (c->plan->normalized_coords),
bool (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING),
c->plan->has_gdef_varstore,
&c->plan->layout_variation_idx_delta_map);
bool ret = bool (it);
SinglePos_serialize (c->serializer, this, it, &c->plan->layout_variation_idx_delta_map, c->plan->all_axes_pinned);
SinglePos_serialize (c->serializer, this, it, &c->plan->layout_variation_idx_delta_map, new_format);
return_trace (ret);
}
};

View file

@ -9,6 +9,8 @@ namespace GPOS_impl {
typedef HBUINT16 Value;
struct ValueBase {}; // Dummy base class tag for OffsetTo<Value> bases.
typedef UnsizedArrayOf<Value> ValueRecord;
struct ValueFormat : HBUINT16
@ -78,7 +80,7 @@ struct ValueFormat : HBUINT16
}
bool apply_value (hb_ot_apply_context_t *c,
const void *base,
const ValueBase *base,
const Value *values,
hb_glyph_position_t &glyph_pos) const
{
@ -142,11 +144,29 @@ struct ValueFormat : HBUINT16
return ret;
}
unsigned int get_effective_format (const Value *values) const
unsigned int get_effective_format (const Value *values, bool strip_hints, bool strip_empty, const ValueBase *base,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map) const
{
unsigned int format = *this;
for (unsigned flag = xPlacement; flag <= yAdvDevice; flag = flag << 1) {
if (format & flag) should_drop (*values++, (Flags) flag, &format);
if (format & flag)
{
if (strip_hints && flag >= xPlaDevice)
{
format = format & ~flag;
values++;
continue;
}
if (varidx_delta_map && flag >= xPlaDevice)
{
update_var_flag (values++, (Flags) flag, &format, base, varidx_delta_map);
continue;
}
/* do not strip empty when instancing, cause we don't know whether the new
* default value is 0 or not */
if (strip_empty) should_drop (*values, (Flags) flag, &format);
values++;
}
}
return format;
@ -154,18 +174,19 @@ struct ValueFormat : HBUINT16
template<typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
unsigned int get_effective_format (Iterator it) const {
unsigned int get_effective_format (Iterator it, bool strip_hints, bool strip_empty, const ValueBase *base,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map) const {
unsigned int new_format = 0;
for (const hb_array_t<const Value>& values : it)
new_format = new_format | get_effective_format (&values);
new_format = new_format | get_effective_format (&values, strip_hints, strip_empty, base, varidx_delta_map);
return new_format;
}
void copy_values (hb_serialize_context_t *c,
unsigned int new_format,
const void *base,
const ValueBase *base,
const Value *values,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
{
@ -217,7 +238,7 @@ struct ValueFormat : HBUINT16
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
const void *base,
const ValueBase *base,
const hb_array_t<const Value>& values) const
{
unsigned format = *this;
@ -251,17 +272,8 @@ struct ValueFormat : HBUINT16
}
}
unsigned drop_device_table_flags () const
{
unsigned format = *this;
for (unsigned flag = xPlaDevice; flag <= yAdvDevice; flag = flag << 1)
format = format & ~flag;
return format;
}
private:
bool sanitize_value_devices (hb_sanitize_context_t *c, const void *base, const Value *values) const
bool sanitize_value_devices (hb_sanitize_context_t *c, const ValueBase *base, const Value *values) const
{
unsigned int format = *this;
@ -278,17 +290,17 @@ struct ValueFormat : HBUINT16
return true;
}
static inline Offset16To<Device>& get_device (Value* value)
static inline Offset16To<Device, ValueBase>& get_device (Value* value)
{
return *static_cast<Offset16To<Device> *> (value);
return *static_cast<Offset16To<Device, ValueBase> *> (value);
}
static inline const Offset16To<Device>& get_device (const Value* value)
static inline const Offset16To<Device, ValueBase>& get_device (const Value* value)
{
return *static_cast<const Offset16To<Device> *> (value);
return *static_cast<const Offset16To<Device, ValueBase> *> (value);
}
static inline const Device& get_device (const Value* value,
bool *worked,
const void *base,
const ValueBase *base,
hb_sanitize_context_t &c)
{
if (worked) *worked |= bool (*value);
@ -296,12 +308,13 @@ struct ValueFormat : HBUINT16
if (unlikely (!offset.sanitize (&c, base)))
return Null(Device);
hb_barrier ();
return base + offset;
}
void add_delta_to_value (HBINT16 *value,
const void *base,
const ValueBase *base,
const Value *src_value,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
{
@ -313,7 +326,8 @@ struct ValueFormat : HBUINT16
*value += hb_second (*varidx_delta);
}
bool copy_device (hb_serialize_context_t *c, const void *base,
bool copy_device (hb_serialize_context_t *c,
const ValueBase *base,
const Value *src_value,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
unsigned int new_format, Flags flag) const
@ -354,7 +368,7 @@ struct ValueFormat : HBUINT16
return (format & devices) != 0;
}
bool sanitize_value (hb_sanitize_context_t *c, const void *base, const Value *values) const
bool sanitize_value (hb_sanitize_context_t *c, const ValueBase *base, const Value *values) const
{
TRACE_SANITIZE (this);
@ -366,7 +380,7 @@ struct ValueFormat : HBUINT16
return_trace (!has_device () || sanitize_value_devices (c, base, values));
}
bool sanitize_values (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count) const
bool sanitize_values (hb_sanitize_context_t *c, const ValueBase *base, const Value *values, unsigned int count) const
{
TRACE_SANITIZE (this);
unsigned size = get_size ();
@ -376,11 +390,12 @@ struct ValueFormat : HBUINT16
if (c->lazy_some_gpos)
return_trace (true);
hb_barrier ();
return_trace (sanitize_values_stride_unsafe (c, base, values, count, size));
}
/* Just sanitize referenced Device tables. Doesn't check the values themselves. */
bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count, unsigned int stride) const
bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, const ValueBase *base, const Value *values, unsigned int count, unsigned int stride) const
{
TRACE_SANITIZE (this);
@ -403,6 +418,20 @@ struct ValueFormat : HBUINT16
*format = *format & ~flag;
}
void update_var_flag (const Value* value, Flags flag,
unsigned int* format, const ValueBase *base,
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map) const
{
if (*value)
{
unsigned varidx = (base + get_device (value)).get_variation_index ();
hb_pair_t<unsigned, int> *varidx_delta;
if (varidx_delta_map->has (varidx, &varidx_delta) &&
varidx_delta->first != HB_OT_LAYOUT_NO_VARIATIONS_INDEX)
return;
}
*format = *format & ~flag;
}
};
}

View file

@ -33,9 +33,11 @@ struct ReverseChainSingleSubstFormat1
TRACE_SANITIZE (this);
if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
return_trace (false);
hb_barrier ();
const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
if (!lookahead.sanitize (c, this))
return_trace (false);
hb_barrier ();
const auto &substitute = StructAfter<decltype (substituteX)> (lookahead);
return_trace (substitute.sanitize (c));
}
@ -109,12 +111,12 @@ struct ReverseChainSingleSubstFormat1
bool apply (hb_ot_apply_context_t *c) const
{
TRACE_APPLY (this);
if (unlikely (c->nesting_level_left != HB_MAX_NESTING_LEVEL))
return_trace (false); /* No chaining to this type */
unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
if (unlikely (c->nesting_level_left != HB_MAX_NESTING_LEVEL))
return_trace (false); /* No chaining to this type */
const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
const auto &substitute = StructAfter<decltype (substituteX)> (lookahead);

View file

@ -38,8 +38,8 @@ struct SmallTypes {
using HBUINT = HBUINT16;
using HBGlyphID = HBGlyphID16;
using Offset = Offset16;
template <typename Type, bool has_null=true>
using OffsetTo = OT::Offset16To<Type, has_null>;
template <typename Type, typename BaseType=void, bool has_null=true>
using OffsetTo = OT::Offset16To<Type, BaseType, has_null>;
template <typename Type>
using ArrayOf = OT::Array16Of<Type>;
template <typename Type>
@ -52,8 +52,8 @@ struct MediumTypes {
using HBUINT = HBUINT24;
using HBGlyphID = HBGlyphID24;
using Offset = Offset24;
template <typename Type, bool has_null=true>
using OffsetTo = OT::Offset24To<Type, has_null>;
template <typename Type, typename BaseType=void, bool has_null=true>
using OffsetTo = OT::Offset24To<Type, BaseType, has_null>;
template <typename Type>
using ArrayOf = OT::Array24Of<Type>;
template <typename Type>

View file

@ -242,7 +242,9 @@ struct NameRecord
bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && offset.sanitize (c, base, length));
return_trace (c->check_struct (this) &&
hb_barrier () &&
offset.sanitize (c, base, length));
}
HBUINT16 platformID; /* Platform ID. */
@ -465,6 +467,7 @@ struct name
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
likely (format == 0 || format == 1) &&
c->check_array (nameRecordZ.arrayZ, count) &&
c->check_range (this, stringOffset) &&

View file

@ -39,6 +39,7 @@ struct ClassDefFormat1 : public OT::ClassDefFormat1_3<SmallTypes>
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
constexpr unsigned min_size = OT::ClassDefFormat1_3<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
return vertex_len >= min_size + classValue.get_size () - classValue.len.get_size ();
}
};
@ -50,6 +51,7 @@ struct ClassDefFormat2 : public OT::ClassDefFormat2_4<SmallTypes>
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
constexpr unsigned min_size = OT::ClassDefFormat2_4<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
return vertex_len >= min_size + rangeRecord.get_size () - rangeRecord.len.get_size ();
}
};
@ -114,6 +116,7 @@ struct ClassDef : public OT::ClassDef
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < OT::ClassDef::min_size) return false;
hb_barrier ();
switch (u.format)
{
case 1: return ((ClassDefFormat1*)this)->sanitize (vertex);

View file

@ -39,6 +39,7 @@ struct CoverageFormat1 : public OT::Layout::Common::CoverageFormat1_3<SmallTypes
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
constexpr unsigned min_size = OT::Layout::Common::CoverageFormat1_3<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
return vertex_len >= min_size + glyphArray.get_size () - glyphArray.len.get_size ();
}
};
@ -50,6 +51,7 @@ struct CoverageFormat2 : public OT::Layout::Common::CoverageFormat2_4<SmallTypes
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
constexpr unsigned min_size = OT::Layout::Common::CoverageFormat2_4<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
return vertex_len >= min_size + rangeRecord.get_size () - rangeRecord.len.get_size ();
}
};
@ -138,6 +140,7 @@ struct Coverage : public OT::Layout::Common::Coverage
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < OT::Layout::Common::Coverage::min_size) return false;
hb_barrier ();
switch (u.format)
{
case 1: return ((CoverageFormat1*)this)->sanitize (vertex);

View file

@ -567,6 +567,7 @@ struct graph_t
update_distances ();
hb_priority_queue_t<int64_t> queue;
queue.alloc (vertices_.length);
hb_vector_t<vertex_t> &sorted_graph = vertices_scratch_;
if (unlikely (!check_success (sorted_graph.resize (vertices_.length)))) return;
hb_vector_t<unsigned> id_map;
@ -1370,6 +1371,7 @@ struct graph_t
vertices_.tail ().distance = 0;
hb_priority_queue_t<int64_t> queue;
queue.alloc (count);
queue.insert (0, vertices_.length - 1);
hb_vector_t<bool> visited;

View file

@ -76,6 +76,7 @@ struct Lookup : public OT::Lookup
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < OT::Lookup::min_size) return false;
hb_barrier ();
return vertex_len >= this->get_size ();
}
@ -351,6 +352,7 @@ struct LookupList : public OT::LookupList<T>
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < OT::LookupList<T>::min_size) return false;
hb_barrier ();
return vertex_len >= OT::LookupList<T>::item_size * this->len;
}
};
@ -364,6 +366,7 @@ struct GSTAR : public OT::GSUBGPOS
GSTAR* gstar = (GSTAR*) r.obj.head;
if (!gstar || !gstar->sanitize (r))
return nullptr;
hb_barrier ();
return gstar;
}
@ -383,6 +386,7 @@ struct GSTAR : public OT::GSUBGPOS
{
int64_t len = vertex.obj.tail - vertex.obj.head;
if (len < OT::GSUBGPOS::min_size) return false;
hb_barrier ();
return len >= get_size ();
}

View file

@ -40,6 +40,7 @@ struct AnchorMatrix : public OT::Layout::GPOS_impl::AnchorMatrix
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < AnchorMatrix::min_size) return false;
hb_barrier ();
return vertex_len >= AnchorMatrix::min_size +
OT::Offset16::static_size * class_count * this->rows;
@ -128,6 +129,7 @@ struct MarkArray : public OT::Layout::GPOS_impl::MarkArray
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
unsigned min_size = MarkArray::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
return vertex_len >= get_size ();
}
@ -495,6 +497,7 @@ struct MarkBasePos : public OT::Layout::GPOS_impl::MarkBasePos
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < u.format.get_size ()) return false;
hb_barrier ();
switch (u.format) {
case 1:

View file

@ -42,6 +42,7 @@ struct PairPosFormat1 : public OT::Layout::GPOS_impl::PairPosFormat1_3<SmallType
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
unsigned min_size = OT::Layout::GPOS_impl::PairPosFormat1_3<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
return vertex_len >=
min_size + pairSet.get_size () - pairSet.len.get_size();
@ -198,6 +199,7 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
size_t vertex_len = vertex.table_size ();
unsigned min_size = OT::Layout::GPOS_impl::PairPosFormat2_4<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
const unsigned class1_count = class1Count;
return vertex_len >=
@ -625,6 +627,7 @@ struct PairPos : public OT::Layout::GPOS_impl::PairPos
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < u.format.get_size ()) return false;
hb_barrier ();
switch (u.format) {
case 1:

View file

@ -75,6 +75,7 @@ struct ankr
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version == 0 &&
c->check_range (this, anchorData) &&
lookupTable.sanitize (c, this, &(this+anchorData))));

View file

@ -123,6 +123,7 @@ struct bsln
TRACE_SANITIZE (this);
if (unlikely (!(c->check_struct (this) && defaultBaseline < 32)))
return_trace (false);
hb_barrier ();
switch (format)
{

View file

@ -191,6 +191,7 @@ struct LookupSegmentArray
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
first <= last &&
valuesZ.sanitize (c, base, last - first + 1));
}
@ -199,6 +200,7 @@ struct LookupSegmentArray
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
first <= last &&
valuesZ.sanitize (c, base, last - first + 1, std::forward<Ts> (ds)...));
}
@ -360,6 +362,7 @@ struct LookupFormat10
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
valueSize <= 4 &&
valueArrayZ.sanitize (c, glyphCount * valueSize));
}
@ -415,6 +418,7 @@ struct Lookup
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 0: return_trace (u.format0.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
@ -429,6 +433,7 @@ struct Lookup
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 0: return_trace (u.format0.sanitize (c, base));
case 2: return_trace (u.format2.sanitize (c, base));
@ -558,6 +563,7 @@ struct StateTable
{
TRACE_SANITIZE (this);
if (unlikely (!(c->check_struct (this) &&
hb_barrier () &&
nClasses >= 4 /* Ensure pre-defined classes fit. */ &&
classTable.sanitize (c, this)))) return_trace (false);

View file

@ -138,6 +138,7 @@ struct FeatureName
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
(base+settingTableZ).sanitize (c, nSettings)));
}
@ -200,6 +201,7 @@ struct feat
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version.major == 1 &&
namesZ.sanitize (c, featureNameCount, this)));
}

View file

@ -185,6 +185,7 @@ struct ActionSubrecord
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
switch (u.header.actionType)
{
@ -220,6 +221,7 @@ struct PostcompensationActionChain
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
unsigned int offset = min_size;
for (unsigned int i = 0; i < count; i++)
@ -389,6 +391,7 @@ struct just
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version.major == 1 &&
horizData.sanitize (c, this, this) &&
vertData.sanitize (c, this, this)));

View file

@ -54,6 +54,7 @@ kerxTupleKern (int value,
unsigned int offset = value;
const FWORD *pv = &StructAtOffset<FWORD> (base, offset);
if (unlikely (!c->sanitizer.check_array (pv, tupleCount))) return 0;
hb_barrier ();
return *pv;
}
@ -259,6 +260,7 @@ struct KerxSubTableFormat1
depth = 0;
return;
}
hb_barrier ();
hb_mask_t kern_mask = c->plan->kern_mask;
@ -389,6 +391,7 @@ struct KerxSubTableFormat2
kern_idx = Types::offsetToIndex (kern_idx, this, arrayZ.arrayZ);
const FWORD *v = &arrayZ[kern_idx];
if (unlikely (!v->sanitize (&c->sanitizer))) return 0;
hb_barrier ();
return kerxTupleKern (*v, header.tuple_count (), this, c);
}
@ -429,6 +432,7 @@ struct KerxSubTableFormat2
return_trace (likely (c->check_struct (this) &&
leftClassTable.sanitize (c, this) &&
rightClassTable.sanitize (c, this) &&
hb_barrier () &&
c->check_range (this, array)));
}
@ -509,6 +513,7 @@ struct KerxSubTableFormat4
double the ankrActionIndex to get the correct offset here. */
const HBUINT16 *data = &ankrData[entry.data.ankrActionIndex * 2];
if (!c->sanitizer.check_array (data, 2)) return;
hb_barrier ();
unsigned int markControlPoint = *data++;
unsigned int currControlPoint = *data++;
hb_position_t markX = 0;
@ -537,6 +542,7 @@ struct KerxSubTableFormat4
double the ankrActionIndex to get the correct offset here. */
const HBUINT16 *data = &ankrData[entry.data.ankrActionIndex * 2];
if (!c->sanitizer.check_array (data, 2)) return;
hb_barrier ();
unsigned int markAnchorPoint = *data++;
unsigned int currAnchorPoint = *data++;
const Anchor &markAnchor = c->ankr_table->get_anchor (c->buffer->info[mark].codepoint,
@ -557,6 +563,7 @@ struct KerxSubTableFormat4
by 4 to get the correct offset for the given action. */
const FWORD *data = (const FWORD *) &ankrData[entry.data.ankrActionIndex * 4];
if (!c->sanitizer.check_array (data, 4)) return;
hb_barrier ();
int markX = *data++;
int markY = *data++;
int currX = *data++;
@ -639,6 +646,7 @@ struct KerxSubTableFormat6
if (unlikely (hb_unsigned_mul_overflows (offset, sizeof (FWORD32)))) return 0;
const FWORD32 *v = &StructAtOffset<FWORD32> (&(this+t.array), offset * sizeof (FWORD32));
if (unlikely (!v->sanitize (&c->sanitizer))) return 0;
hb_barrier ();
return kerxTupleKern (*v, header.tuple_count (), &(this+vector), c);
}
else
@ -649,6 +657,7 @@ struct KerxSubTableFormat6
unsigned int offset = l + r;
const FWORD *v = &StructAtOffset<FWORD> (&(this+t.array), offset * sizeof (FWORD));
if (unlikely (!v->sanitize (&c->sanitizer))) return 0;
hb_barrier ();
return kerxTupleKern (*v, header.tuple_count (), &(this+vector), c);
}
}
@ -674,6 +683,7 @@ struct KerxSubTableFormat6
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
(is_long () ?
(
u.l.rowIndexTable.sanitize (c, this) &&
@ -787,9 +797,10 @@ struct KerxSubTable
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!u.header.sanitize (c) ||
u.header.length <= u.header.static_size ||
!c->check_range (this, u.header.length))
if (!(u.header.sanitize (c) &&
hb_barrier () &&
u.header.length >= u.header.static_size &&
c->check_range (this, u.header.length)))
return_trace (false);
return_trace (dispatch (c));
@ -936,9 +947,10 @@ struct KerxTable
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!thiz()->version.sanitize (c) ||
(unsigned) thiz()->version < (unsigned) T::minVersion ||
!thiz()->tableCount.sanitize (c)))
if (unlikely (!(thiz()->version.sanitize (c) &&
hb_barrier () &&
(unsigned) thiz()->version >= (unsigned) T::minVersion &&
thiz()->tableCount.sanitize (c))))
return_trace (false);
typedef typename T::SubTable SubTable;
@ -949,6 +961,7 @@ struct KerxTable
{
if (unlikely (!st->u.header.sanitize (c)))
return_trace (false);
hb_barrier ();
/* OpenType kern table has 2-byte subtable lengths. That's limiting.
* MS implementation also only supports one subtable, of format 0,
* anyway. Certain versions of some fonts, like Calibry, contain

View file

@ -259,7 +259,9 @@ struct ContextualSubtable
unsigned int offset = entry.data.markIndex + buffer->info[mark].codepoint;
const UnsizedArrayOf<HBGlyphID16> &subs_old = (const UnsizedArrayOf<HBGlyphID16> &) subs;
replacement = &subs_old[Types::wordOffsetToIndex (offset, table, subs_old.arrayZ)];
if (!replacement->sanitize (&c->sanitizer) || !*replacement)
if (!(replacement->sanitize (&c->sanitizer) &&
hb_barrier () &&
*replacement))
replacement = nullptr;
}
if (replacement)
@ -287,7 +289,9 @@ struct ContextualSubtable
unsigned int offset = entry.data.currentIndex + buffer->info[idx].codepoint;
const UnsizedArrayOf<HBGlyphID16> &subs_old = (const UnsizedArrayOf<HBGlyphID16> &) subs;
replacement = &subs_old[Types::wordOffsetToIndex (offset, table, subs_old.arrayZ)];
if (!replacement->sanitize (&c->sanitizer) || !*replacement)
if (!(replacement->sanitize (&c->sanitizer) &&
hb_barrier () &&
*replacement))
replacement = nullptr;
}
if (replacement)
@ -315,7 +319,7 @@ struct ContextualSubtable
bool has_glyph_classes;
unsigned int mark;
const ContextualSubtable *table;
const UnsizedListOfOffset16To<Lookup<HBGlyphID16>, HBUINT, false> &subs;
const UnsizedListOfOffset16To<Lookup<HBGlyphID16>, HBUINT, void, false> &subs;
};
bool apply (hb_aat_apply_context_t *c) const
@ -336,6 +340,7 @@ struct ContextualSubtable
unsigned int num_entries = 0;
if (unlikely (!machine.sanitize (c, &num_entries))) return_trace (false);
hb_barrier ();
if (!Types::extended)
return_trace (substitutionTables.sanitize (c, this, 0));
@ -359,7 +364,7 @@ struct ContextualSubtable
protected:
StateTable<Types, EntryData>
machine;
NNOffsetTo<UnsizedListOfOffset16To<Lookup<HBGlyphID16>, HBUINT, false>, HBUINT>
NNOffsetTo<UnsizedListOfOffset16To<Lookup<HBGlyphID16>, HBUINT, void, false>, HBUINT>
substitutionTables;
public:
DEFINE_SIZE_STATIC (20);
@ -513,6 +518,7 @@ struct LigatureSubtable
if (unlikely (!buffer->move_to (match_positions[--cursor % ARRAY_LENGTH (match_positions)]))) return;
if (unlikely (!actionData->sanitize (&c->sanitizer))) break;
hb_barrier ();
action = *actionData;
uint32_t uoffset = action & LigActionOffset;
@ -523,6 +529,7 @@ struct LigatureSubtable
component_idx = Types::wordOffsetToIndex (component_idx, table, component.arrayZ);
const HBUINT16 &componentData = component[component_idx];
if (unlikely (!componentData.sanitize (&c->sanitizer))) break;
hb_barrier ();
ligature_idx += componentData;
DEBUG_MSG (APPLY, nullptr, "Action store %d last %d",
@ -533,6 +540,7 @@ struct LigatureSubtable
ligature_idx = Types::offsetToIndex (ligature_idx, table, ligature.arrayZ);
const HBGlyphID16 &ligatureData = ligature[ligature_idx];
if (unlikely (!ligatureData.sanitize (&c->sanitizer))) break;
hb_barrier ();
hb_codepoint_t lig = ligatureData;
DEBUG_MSG (APPLY, nullptr, "Produced ligature %u", lig);
@ -587,6 +595,7 @@ struct LigatureSubtable
TRACE_SANITIZE (this);
/* The rest of array sanitizations are done at run-time. */
return_trace (c->check_struct (this) && machine.sanitize (c) &&
hb_barrier () &&
ligAction && component && ligature);
}
@ -765,6 +774,7 @@ struct InsertionSubtable
unsigned int start = entry.data.markedInsertIndex;
const HBGlyphID16 *glyphs = &insertionAction[start];
if (unlikely (!c->sanitizer.check_array (glyphs, count))) count = 0;
hb_barrier ();
bool before = flags & MarkedInsertBefore;
@ -793,6 +803,7 @@ struct InsertionSubtable
unsigned int start = entry.data.currentInsertIndex;
const HBGlyphID16 *glyphs = &insertionAction[start];
if (unlikely (!c->sanitizer.check_array (glyphs, count))) count = 0;
hb_barrier ();
bool before = flags & CurrentInsertBefore;
@ -849,6 +860,7 @@ struct InsertionSubtable
TRACE_SANITIZE (this);
/* The rest of array sanitizations are done at run-time. */
return_trace (c->check_struct (this) && machine.sanitize (c) &&
hb_barrier () &&
insertionAction);
}
@ -944,9 +956,10 @@ struct ChainSubtable
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!length.sanitize (c) ||
length <= min_size ||
!c->check_range (this, length))
if (!(length.sanitize (c) &&
hb_barrier () &&
length >= min_size &&
c->check_range (this, length)))
return_trace (false);
hb_sanitize_with_object_t with (c, this);
@ -1089,9 +1102,10 @@ struct Chain
bool sanitize (hb_sanitize_context_t *c, unsigned int version HB_UNUSED) const
{
TRACE_SANITIZE (this);
if (!length.sanitize (c) ||
length < min_size ||
!c->check_range (this, length))
if (!(length.sanitize (c) &&
hb_barrier () &&
length >= min_size &&
c->check_range (this, length)))
return_trace (false);
if (!c->check_array (featureZ.arrayZ, featureCount))
@ -1103,6 +1117,7 @@ struct Chain
{
if (!subtable->sanitize (c))
return_trace (false);
hb_barrier ();
subtable = &StructAfter<ChainSubtable<Types>> (*subtable);
}
@ -1173,7 +1188,10 @@ struct mortmorx
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!version.sanitize (c) || !version || !chainCount.sanitize (c))
if (!(version.sanitize (c) &&
hb_barrier () &&
version &&
chainCount.sanitize (c)))
return_trace (false);
const Chain<Types> *chain = &firstChain;
@ -1182,6 +1200,7 @@ struct mortmorx
{
if (!chain->sanitize (c, version))
return_trace (false);
hb_barrier ();
chain = &StructAfter<Chain<Types>> (*chain);
}

View file

@ -144,6 +144,7 @@ struct opbd
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this) || version.major != 1))
return_trace (false);
hb_barrier ();
switch (format)
{

View file

@ -134,6 +134,7 @@ struct TrackData
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
sizeTable.sanitize (c, base, nSizes) &&
trackTable.sanitize (c, nTracks, base, nSizes)));
}
@ -203,6 +204,7 @@ struct trak
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version.major == 1 &&
horizData.sanitize (c, this, this) &&
vertData.sanitize (c, this, this)));

View file

@ -40,7 +40,7 @@ HB_BEGIN_DECLS
* @HB_AAT_LAYOUT_FEATURE_TYPE_INVALID: Initial, unset feature type
* @HB_AAT_LAYOUT_FEATURE_TYPE_ALL_TYPOGRAPHIC: [All Typographic Features](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type0)
* @HB_AAT_LAYOUT_FEATURE_TYPE_LIGATURES: [Ligatures](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type1)
* @HB_AAT_LAYOUT_FEATURE_TYPE_CURISVE_CONNECTION: [Cursive Connection](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type2)
* @HB_AAT_LAYOUT_FEATURE_TYPE_CURSIVE_CONNECTION: [Cursive Connection](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type2)
* @HB_AAT_LAYOUT_FEATURE_TYPE_LETTER_CASE: [Letter Case](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type3)
* @HB_AAT_LAYOUT_FEATURE_TYPE_VERTICAL_SUBSTITUTION: [Vertical Substitution](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type4)
* @HB_AAT_LAYOUT_FEATURE_TYPE_LINGUISTIC_REARRANGEMENT: [Linguistic Rearrangement](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type5)
@ -88,7 +88,7 @@ typedef enum
HB_AAT_LAYOUT_FEATURE_TYPE_ALL_TYPOGRAPHIC = 0,
HB_AAT_LAYOUT_FEATURE_TYPE_LIGATURES = 1,
HB_AAT_LAYOUT_FEATURE_TYPE_CURISVE_CONNECTION = 2,
HB_AAT_LAYOUT_FEATURE_TYPE_CURSIVE_CONNECTION = 2,
HB_AAT_LAYOUT_FEATURE_TYPE_LETTER_CASE = 3,
HB_AAT_LAYOUT_FEATURE_TYPE_VERTICAL_SUBSTITUTION = 4,
HB_AAT_LAYOUT_FEATURE_TYPE_LINGUISTIC_REARRANGEMENT = 5,

View file

@ -46,7 +46,9 @@ struct FTStringRange
bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && (base+tag).sanitize (c, length));
return_trace (c->check_struct (this) &&
hb_barrier () &&
(base+tag).sanitize (c, length));
}
protected:
@ -73,6 +75,7 @@ struct ltag
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version >= 1 &&
tagRanges.sanitize (c, this)));
}

View file

@ -47,6 +47,8 @@ enum hb_not_found_t
template <typename Type>
struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
{
static constexpr bool realloc_move = true;
/*
* Constructors.
*/

View file

@ -118,12 +118,12 @@ _hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
*/
#ifndef _hb_compiler_memory_r_barrier
#if defined(__ATOMIC_ACQUIRE) // gcc-like
#define _hb_compiler_memory_r_barrier() asm volatile("": : :"memory")
static inline void _hb_compiler_memory_r_barrier () { asm volatile("": : :"memory"); }
#elif !defined(_MSC_VER)
#include <atomic>
#define _hb_compiler_memory_r_barrier() std::atomic_signal_fence (std::memory_order_acquire)
#else
#define _hb_compiler_memory_r_barrier() do {} while (0)
static inline void _hb_compiler_memory_r_barrier () {}
#endif
#endif
@ -218,5 +218,11 @@ struct hb_atomic_ptr_t
T *v = nullptr;
};
static inline bool hb_barrier ()
{
_hb_compiler_memory_r_barrier ();
return true;
}
#endif /* HB_ATOMIC_HH */

View file

@ -359,8 +359,8 @@ struct hb_bit_set_invertible_t
typedef hb_codepoint_t __item_t__;
hb_codepoint_t __item__ () const { return v; }
bool __more__ () const { return v != INVALID; }
void __next__ () { s->next (&v); if (l) l--; }
void __prev__ () { s->previous (&v); }
void __next__ () { s->next (&v); if (likely (l)) l--; }
void __prev__ () { s->previous (&v); l++; }
unsigned __len__ () const { return l; }
iter_t end () const { return iter_t (*s, false); }
bool operator != (const iter_t& o) const

View file

@ -56,7 +56,7 @@ HB_BEGIN_DECLS
/**
* HB_SCRIPT_CANADIAN_ABORIGINAL:
*
* Use #HB_SCRIPT_CANADIAN_SYLLABICS instead:
* Use #HB_SCRIPT_CANADIAN_SYLLABICS instead.
*
* Deprecated: 0.9.20
*/
@ -301,6 +301,15 @@ hb_font_get_glyph_shape (hb_font_t *font,
hb_draw_funcs_t *dfuncs, void *draw_data);
/**
* HB_AAT_LAYOUT_FEATURE_TYPE_CURISVE_CONNECTION:
*
* Use #HB_AAT_LAYOUT_FEATURE_TYPE_CURSIVE_CONNECTION instead.
*
* Deprecated: 8.3.0
*/
#define HB_AAT_LAYOUT_FEATURE_TYPE_CURISVE_CONNECTION HB_AAT_LAYOUT_FEATURE_TYPE_CURSIVE_CONNECTION
#endif

View file

@ -225,7 +225,7 @@ _hb_ft_hb_font_check_changed (hb_font_t *font,
* Sets the FT_Load_Glyph load flags for the specified #hb_font_t.
*
* For more information, see
* https://www.freetype.org/freetype2/docs/reference/ft2-base_interface.html#ft_load_xxx
* <https://freetype.org/freetype2/docs/reference/ft2-glyph_retrieval.html#ft_load_xxx>
*
* This function works with #hb_font_t objects created by
* hb_ft_font_create() or hb_ft_font_create_referenced().
@ -253,7 +253,7 @@ hb_ft_font_set_load_flags (hb_font_t *font, int load_flags)
* Fetches the FT_Load_Glyph load flags of the specified #hb_font_t.
*
* For more information, see
* https://www.freetype.org/freetype2/docs/reference/ft2-base_interface.html#ft_load_xxx
* <https://freetype.org/freetype2/docs/reference/ft2-glyph_retrieval.html#ft_load_xxx>
*
* This function works with #hb_font_t objects created by
* hb_ft_font_create() or hb_ft_font_create_referenced().

View file

@ -42,10 +42,34 @@ template <typename K, typename V,
bool minus_one = false>
struct hb_hashmap_t
{
static constexpr bool realloc_move = true;
hb_hashmap_t () { init (); }
~hb_hashmap_t () { fini (); }
hb_hashmap_t (const hb_hashmap_t& o) : hb_hashmap_t () { alloc (o.population); hb_copy (o, *this); }
hb_hashmap_t (const hb_hashmap_t& o) : hb_hashmap_t ()
{
if (unlikely (!o.mask)) return;
if (item_t::is_trivial)
{
items = (item_t *) hb_malloc (sizeof (item_t) * (o.mask + 1));
if (unlikely (!items))
{
successful = false;
return;
}
population = o.population;
occupancy = o.occupancy;
mask = o.mask;
prime = o.prime;
max_chain_length = o.max_chain_length;
memcpy (items, o.items, sizeof (item_t) * (mask + 1));
return;
}
alloc (o.population); hb_copy (o, *this);
}
hb_hashmap_t (hb_hashmap_t&& o) : hb_hashmap_t () { hb_swap (*this, o); }
hb_hashmap_t& operator= (const hb_hashmap_t& o) { reset (); alloc (o.population); hb_copy (o, *this); return *this; }
hb_hashmap_t& operator= (hb_hashmap_t&& o) { hb_swap (*this, o); return *this; }
@ -209,9 +233,10 @@ struct hb_hashmap_t
old_items[i].hash,
std::move (old_items[i].value));
}
if (!item_t::is_trivial)
old_items[i].~item_t ();
}
if (!item_t::is_trivial)
for (unsigned int i = 0; i < old_size; i++)
old_items[i].~item_t ();
hb_free (old_items);

View file

@ -267,6 +267,7 @@ struct TTCHeader
{
TRACE_SANITIZE (this);
if (unlikely (!u.header.version.sanitize (c))) return_trace (false);
hb_barrier ();
switch (u.header.version.major) {
case 2: /* version 2 is compatible with version 1 */
case 1: return_trace (u.version1.sanitize (c));
@ -302,6 +303,7 @@ struct ResourceRecord
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
offset.sanitize (c, data_base) &&
hb_barrier () &&
get_face (data_base).sanitize (c));
}
@ -337,6 +339,7 @@ struct ResourceTypeRecord
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
resourcesZ.sanitize (c, type_base,
get_resource_count (),
data_base));
@ -385,6 +388,7 @@ struct ResourceMap
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
typeList.sanitize (c, this,
&(this+typeList),
data_base));
@ -428,6 +432,7 @@ struct ResourceForkHeader
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
data.sanitize (c, this, dataLen) &&
map.sanitize (c, this, &(this+data)));
}
@ -508,6 +513,7 @@ struct OpenTypeFontFile
{
TRACE_SANITIZE (this);
if (unlikely (!u.tag.sanitize (c))) return_trace (false);
hb_barrier ();
switch (u.tag) {
case CFFTag: /* All the non-collection tags */
case TrueTag:

View file

@ -309,7 +309,7 @@ struct _hb_has_null<Type, true>
static Type *get_crap () { return &Crap (Type); }
};
template <typename Type, typename OffsetType, bool has_null=true>
template <typename Type, typename OffsetType, typename BaseType=void, bool has_null=true>
struct OffsetTo : Offset<OffsetType, has_null>
{
using target_t = Type;
@ -335,22 +335,22 @@ struct OffsetTo : Offset<OffsetType, has_null>
}
template <typename Base,
hb_enable_if (hb_is_convertible (const Base, const void *))>
hb_enable_if (hb_is_convertible (const Base, const BaseType *))>
friend const Type& operator + (const Base &base, const OffsetTo &offset) { return offset ((const void *) base); }
template <typename Base,
hb_enable_if (hb_is_convertible (const Base, const void *))>
hb_enable_if (hb_is_convertible (const Base, const BaseType *))>
friend const Type& operator + (const OffsetTo &offset, const Base &base) { return offset ((const void *) base); }
template <typename Base,
hb_enable_if (hb_is_convertible (Base, void *))>
hb_enable_if (hb_is_convertible (Base, BaseType *))>
friend Type& operator + (Base &&base, OffsetTo &offset) { return offset ((void *) base); }
template <typename Base,
hb_enable_if (hb_is_convertible (Base, void *))>
hb_enable_if (hb_is_convertible (Base, BaseType *))>
friend Type& operator + (OffsetTo &offset, Base &&base) { return offset ((void *) base); }
template <typename ...Ts>
template <typename Base, typename ...Ts>
bool serialize_subset (hb_subset_context_t *c, const OffsetTo& src,
const void *src_base, Ts&&... ds)
const Base *src_base, Ts&&... ds)
{
*this = 0;
if (src.is_null ())
@ -414,10 +414,11 @@ struct OffsetTo : Offset<OffsetType, has_null>
const void *src_base, unsigned dst_bias = 0)
{ return serialize_copy (c, src, src_base, dst_bias, hb_serialize_context_t::Head); }
bool sanitize_shallow (hb_sanitize_context_t *c, const void *base) const
bool sanitize_shallow (hb_sanitize_context_t *c, const BaseType *base) const
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this))) return_trace (false);
hb_barrier ();
//if (unlikely (this->is_null ())) return_trace (true);
if (unlikely ((const char *) base + (unsigned) *this < (const char *) base)) return_trace (false);
return_trace (true);
@ -427,10 +428,11 @@ struct OffsetTo : Offset<OffsetType, has_null>
#ifndef HB_OPTIMIZE_SIZE
HB_ALWAYS_INLINE
#endif
bool sanitize (hb_sanitize_context_t *c, const void *base, Ts&&... ds) const
bool sanitize (hb_sanitize_context_t *c, const BaseType *base, Ts&&... ds) const
{
TRACE_SANITIZE (this);
return_trace (sanitize_shallow (c, base) &&
hb_barrier () &&
(this->is_null () ||
c->dispatch (StructAtOffset<Type> (base, *this), std::forward<Ts> (ds)...) ||
neuter (c)));
@ -445,14 +447,14 @@ struct OffsetTo : Offset<OffsetType, has_null>
DEFINE_SIZE_STATIC (sizeof (OffsetType));
};
/* Partial specializations. */
template <typename Type, bool has_null=true> using Offset16To = OffsetTo<Type, HBUINT16, has_null>;
template <typename Type, bool has_null=true> using Offset24To = OffsetTo<Type, HBUINT24, has_null>;
template <typename Type, bool has_null=true> using Offset32To = OffsetTo<Type, HBUINT32, has_null>;
template <typename Type, typename BaseType=void, bool has_null=true> using Offset16To = OffsetTo<Type, HBUINT16, BaseType, has_null>;
template <typename Type, typename BaseType=void, bool has_null=true> using Offset24To = OffsetTo<Type, HBUINT24, BaseType, has_null>;
template <typename Type, typename BaseType=void, bool has_null=true> using Offset32To = OffsetTo<Type, HBUINT32, BaseType, has_null>;
template <typename Type, typename OffsetType> using NNOffsetTo = OffsetTo<Type, OffsetType, false>;
template <typename Type> using NNOffset16To = Offset16To<Type, false>;
template <typename Type> using NNOffset24To = Offset24To<Type, false>;
template <typename Type> using NNOffset32To = Offset32To<Type, false>;
template <typename Type, typename OffsetType, typename BaseType=void> using NNOffsetTo = OffsetTo<Type, OffsetType, BaseType, false>;
template <typename Type, typename BaseType=void> using NNOffset16To = Offset16To<Type, BaseType, false>;
template <typename Type, typename BaseType=void> using NNOffset24To = Offset24To<Type, BaseType, false>;
template <typename Type, typename BaseType=void> using NNOffset32To = Offset32To<Type, BaseType, false>;
/*
@ -536,6 +538,7 @@ struct UnsizedArrayOf
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
hb_barrier ();
for (unsigned int i = 0; i < count; i++)
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
return_trace (false);
@ -555,17 +558,17 @@ struct UnsizedArrayOf
};
/* Unsized array of offset's */
template <typename Type, typename OffsetType, bool has_null=true>
using UnsizedArray16OfOffsetTo = UnsizedArrayOf<OffsetTo<Type, OffsetType, has_null>>;
template <typename Type, typename OffsetType, typename BaseType=void, bool has_null=true>
using UnsizedArray16OfOffsetTo = UnsizedArrayOf<OffsetTo<Type, OffsetType, BaseType, has_null>>;
/* Unsized array of offsets relative to the beginning of the array itself. */
template <typename Type, typename OffsetType, bool has_null=true>
struct UnsizedListOfOffset16To : UnsizedArray16OfOffsetTo<Type, OffsetType, has_null>
template <typename Type, typename OffsetType, typename BaseType=void, bool has_null=true>
struct UnsizedListOfOffset16To : UnsizedArray16OfOffsetTo<Type, OffsetType, BaseType, has_null>
{
const Type& operator [] (int i_) const
{
unsigned int i = (unsigned int) i_;
const OffsetTo<Type, OffsetType, has_null> *p = &this->arrayZ[i];
const OffsetTo<Type, OffsetType, BaseType, has_null> *p = &this->arrayZ[i];
if (unlikely ((const void *) p < (const void *) this->arrayZ)) return Null (Type); /* Overflowed. */
_hb_compiler_memory_r_barrier ();
return this+*p;
@ -573,7 +576,7 @@ struct UnsizedListOfOffset16To : UnsizedArray16OfOffsetTo<Type, OffsetType, has_
Type& operator [] (int i_)
{
unsigned int i = (unsigned int) i_;
const OffsetTo<Type, OffsetType, has_null> *p = &this->arrayZ[i];
const OffsetTo<Type, OffsetType, BaseType, has_null> *p = &this->arrayZ[i];
if (unlikely ((const void *) p < (const void *) this->arrayZ)) return Crap (Type); /* Overflowed. */
_hb_compiler_memory_r_barrier ();
return this+*p;
@ -583,7 +586,7 @@ struct UnsizedListOfOffset16To : UnsizedArray16OfOffsetTo<Type, OffsetType, has_
bool sanitize (hb_sanitize_context_t *c, unsigned int count, Ts&&... ds) const
{
TRACE_SANITIZE (this);
return_trace ((UnsizedArray16OfOffsetTo<Type, OffsetType, has_null>
return_trace ((UnsizedArray16OfOffsetTo<Type, OffsetType, BaseType, has_null>
::sanitize (c, count, this, std::forward<Ts> (ds)...)));
}
};
@ -725,6 +728,7 @@ struct ArrayOf
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c))) return_trace (false);
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
hb_barrier ();
unsigned int count = len;
for (unsigned int i = 0; i < count; i++)
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
@ -735,7 +739,9 @@ struct ArrayOf
bool sanitize_shallow (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (len.sanitize (c) && c->check_array_sized (arrayZ, len, sizeof (LenType)));
return_trace (len.sanitize (c) &&
hb_barrier () &&
c->check_array_sized (arrayZ, len, sizeof (LenType)));
}
public:
@ -866,6 +872,7 @@ struct HeadlessArrayOf
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c))) return_trace (false);
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
hb_barrier ();
unsigned int count = get_length ();
for (unsigned int i = 0; i < count; i++)
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
@ -878,6 +885,7 @@ struct HeadlessArrayOf
{
TRACE_SANITIZE (this);
return_trace (lenP1.sanitize (c) &&
hb_barrier () &&
(!lenP1 || c->check_array_sized (arrayZ, lenP1 - 1, sizeof (LenType))));
}
@ -919,6 +927,7 @@ struct ArrayOfM1
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c))) return_trace (false);
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
hb_barrier ();
unsigned int count = lenM1 + 1;
for (unsigned int i = 0; i < count; i++)
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
@ -931,6 +940,7 @@ struct ArrayOfM1
{
TRACE_SANITIZE (this);
return_trace (lenM1.sanitize (c) &&
hb_barrier () &&
(c->check_array_sized (arrayZ, lenM1 + 1, sizeof (LenType))));
}
@ -1104,6 +1114,7 @@ struct VarSizedBinSearchArrayOf
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c))) return_trace (false);
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
hb_barrier ();
unsigned int count = get_length ();
for (unsigned int i = 0; i < count; i++)
if (unlikely (!(*this)[i].sanitize (c, std::forward<Ts> (ds)...)))
@ -1130,6 +1141,7 @@ struct VarSizedBinSearchArrayOf
{
TRACE_SANITIZE (this);
return_trace (header.sanitize (c) &&
hb_barrier () &&
Type::static_size <= header.unitSize &&
c->check_range (bytesZ.arrayZ,
header.nUnits,

View file

@ -78,7 +78,8 @@ struct CFFIndex
hb_requires (hb_is_iterable (Iterable))>
bool serialize (hb_serialize_context_t *c,
const Iterable &iterable,
const unsigned *p_data_size = nullptr)
const unsigned *p_data_size = nullptr,
unsigned min_off_size = 0)
{
TRACE_SERIALIZE (this);
unsigned data_size;
@ -88,7 +89,7 @@ struct CFFIndex
total_size (iterable, &data_size);
auto it = hb_iter (iterable);
if (unlikely (!serialize_header (c, +it, data_size))) return_trace (false);
if (unlikely (!serialize_header (c, +it, data_size, min_off_size))) return_trace (false);
unsigned char *ret = c->allocate_size<unsigned char> (data_size, false);
if (unlikely (!ret)) return_trace (false);
for (const auto &_ : +it)
@ -111,11 +112,13 @@ struct CFFIndex
hb_requires (hb_is_iterator (Iterator))>
bool serialize_header (hb_serialize_context_t *c,
Iterator it,
unsigned data_size)
unsigned data_size,
unsigned min_off_size = 0)
{
TRACE_SERIALIZE (this);
unsigned off_size = (hb_bit_storage (data_size + 1) + 7) / 8;
off_size = hb_max(min_off_size, off_size);
/* serialize CFFIndex header */
if (unlikely (!c->extend_min (this))) return_trace (false);
@ -195,7 +198,7 @@ struct CFFIndex
template <typename Iterable,
hb_requires (hb_is_iterable (Iterable))>
static unsigned total_size (const Iterable &iterable, unsigned *data_size = nullptr)
static unsigned total_size (const Iterable &iterable, unsigned *data_size = nullptr, unsigned min_off_size = 0)
{
auto it = + hb_iter (iterable);
if (!it)
@ -211,6 +214,7 @@ struct CFFIndex
if (data_size) *data_size = total;
unsigned off_size = (hb_bit_storage (total + 1) + 7) / 8;
off_size = hb_max(min_off_size, off_size);
return min_size + HBUINT8::static_size + (hb_len (it) + 1) * off_size + total;
}
@ -274,8 +278,10 @@ struct CFFIndex
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
(count == 0 || /* empty INDEX */
(count < count + 1u &&
hb_barrier () &&
c->check_struct (&offSize) && offSize >= 1 && offSize <= 4 &&
c->check_array (offsets, offSize, count + 1u) &&
c->check_array ((const HBUINT8*) data_base (), 1, offset_at (count))))));
@ -412,6 +418,7 @@ struct FDSelect0 {
TRACE_SANITIZE (this);
if (unlikely (!(c->check_struct (this))))
return_trace (false);
hb_barrier ();
if (unlikely (!c->check_array (fds, c->get_num_glyphs ())))
return_trace (false);
@ -438,7 +445,9 @@ struct FDSelect3_4_Range
bool sanitize (hb_sanitize_context_t *c, const void * /*nullptr*/, unsigned int fdcount) const
{
TRACE_SANITIZE (this);
return_trace (first < c->get_num_glyphs () && (fd < fdcount));
return_trace (c->check_struct (this) &&
hb_barrier () &&
first < c->get_num_glyphs () && (fd < fdcount));
}
GID_TYPE first;
@ -456,15 +465,20 @@ struct FDSelect3_4
bool sanitize (hb_sanitize_context_t *c, unsigned int fdcount) const
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this) || !ranges.sanitize (c, nullptr, fdcount) ||
(nRanges () == 0) || ranges[0].first != 0))
if (unlikely (!(c->check_struct (this) &&
ranges.sanitize (c, nullptr, fdcount) &&
hb_barrier () &&
(nRanges () != 0) &&
ranges[0].first == 0)))
return_trace (false);
for (unsigned int i = 1; i < nRanges (); i++)
if (unlikely (ranges[i - 1].first >= ranges[i].first))
return_trace (false);
if (unlikely (!sentinel().sanitize (c) || (sentinel() != c->get_num_glyphs ())))
if (unlikely (!(sentinel().sanitize (c) &&
hb_barrier () &&
(sentinel() == c->get_num_glyphs ()))))
return_trace (false);
return_trace (true);
@ -559,6 +573,7 @@ struct FDSelect
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
switch (format)
{

View file

@ -275,6 +275,7 @@ struct Encoding
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
switch (table_format ())
{
@ -376,13 +377,13 @@ struct Charset1_2 {
bool sanitize (hb_sanitize_context_t *c, unsigned int num_glyphs, unsigned *num_charset_entries) const
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
num_glyphs--;
unsigned i;
for (i = 0; num_glyphs > 0; i++)
{
if (unlikely (!ranges[i].sanitize (c) || (num_glyphs < ranges[i].nLeft + 1)))
if (unlikely (!(ranges[i].sanitize (c) &&
hb_barrier () &&
(num_glyphs >= ranges[i].nLeft + 1))))
return_trace (false);
num_glyphs -= (ranges[i].nLeft + 1);
}
@ -615,6 +616,7 @@ struct Charset
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
switch (format)
{
@ -1055,6 +1057,7 @@ struct cff1
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
likely (version.major == 1));
}
@ -1085,14 +1088,17 @@ struct cff1
nameIndex = &cff->nameIndex (cff);
if ((nameIndex == &Null (CFF1NameIndex)) || !nameIndex->sanitize (&sc))
goto fail;
hb_barrier ();
topDictIndex = &StructAtOffset<CFF1TopDictIndex> (nameIndex, nameIndex->get_size ());
if ((topDictIndex == &Null (CFF1TopDictIndex)) || !topDictIndex->sanitize (&sc) || (topDictIndex->count == 0))
goto fail;
hb_barrier ();
{ /* parse top dict */
const hb_ubytes_t topDictStr = (*topDictIndex)[0];
if (unlikely (!topDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
cff1_top_dict_interp_env_t env (topDictStr);
cff1_top_dict_interpreter_t top_interp (env);
if (unlikely (!top_interp.interpret (topDict))) goto fail;
@ -1104,6 +1110,7 @@ struct cff1
{
charset = &StructAtOffsetOrNull<Charset> (cff, topDict.CharsetOffset);
if (unlikely ((charset == &Null (Charset)) || !charset->sanitize (&sc, &num_charset_entries))) goto fail;
hb_barrier ();
}
fdCount = 1;
@ -1114,6 +1121,7 @@ struct cff1
if (unlikely ((fdArray == &Null (CFF1FDArray)) || !fdArray->sanitize (&sc) ||
(fdSelect == &Null (CFF1FDSelect)) || !fdSelect->sanitize (&sc, fdArray->count)))
goto fail;
hb_barrier ();
fdCount = fdArray->count;
}
@ -1134,21 +1142,25 @@ struct cff1
{
encoding = &StructAtOffsetOrNull<Encoding> (cff, topDict.EncodingOffset);
if (unlikely ((encoding == &Null (Encoding)) || !encoding->sanitize (&sc))) goto fail;
hb_barrier ();
}
}
stringIndex = &StructAtOffset<CFF1StringIndex> (topDictIndex, topDictIndex->get_size ());
if ((stringIndex == &Null (CFF1StringIndex)) || !stringIndex->sanitize (&sc))
goto fail;
hb_barrier ();
globalSubrs = &StructAtOffset<CFF1Subrs> (stringIndex, stringIndex->get_size ());
if ((globalSubrs != &Null (CFF1Subrs)) && !globalSubrs->sanitize (&sc))
goto fail;
hb_barrier ();
charStrings = &StructAtOffsetOrNull<CFF1CharStrings> (cff, topDict.charStringsOffset);
if ((charStrings == &Null (CFF1CharStrings)) || unlikely (!charStrings->sanitize (&sc)))
goto fail;
hb_barrier ();
num_glyphs = charStrings->count;
if (num_glyphs != sc.get_num_glyphs ())
@ -1166,6 +1178,7 @@ struct cff1
{
hb_ubytes_t fontDictStr = (*fdArray)[i];
if (unlikely (!fontDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
cff1_font_dict_values_t *font;
cff1_top_dict_interp_env_t env (fontDictStr);
cff1_font_dict_interpreter_t font_interp (env);
@ -1177,6 +1190,7 @@ struct cff1
PRIVDICTVAL *priv = &privateDicts[i];
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
num_interp_env_t env2 (privDictStr);
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env2);
priv->init ();
@ -1186,6 +1200,7 @@ struct cff1
if (priv->localSubrs != &Null (CFF1Subrs) &&
unlikely (!priv->localSubrs->sanitize (&sc)))
goto fail;
hb_barrier ();
}
}
else /* non-CID */
@ -1195,6 +1210,7 @@ struct cff1
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
num_interp_env_t env (privDictStr);
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env);
priv->init ();
@ -1204,6 +1220,7 @@ struct cff1
if (priv->localSubrs != &Null (CFF1Subrs) &&
unlikely (!priv->localSubrs->sanitize (&sc)))
goto fail;
hb_barrier ();
}
return;

View file

@ -90,6 +90,7 @@ struct CFF2FDSelect
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
switch (format)
{
@ -115,7 +116,10 @@ struct CFF2VariationStore
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this)) && c->check_range (&varStore, size) && varStore.sanitize (c));
return_trace (c->check_struct (this) &&
hb_barrier () &&
c->check_range (&varStore, size) &&
varStore.sanitize (c));
}
bool serialize (hb_serialize_context_t *c, const CFF2VariationStore *varStore)
@ -384,6 +388,7 @@ struct cff2
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
likely (version.major == 2));
}
@ -414,6 +419,7 @@ struct cff2
{ /* parse top dict */
hb_ubytes_t topDictStr = (cff2 + cff2->topDict).as_ubytes (cff2->topDictSize);
if (unlikely (!topDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
num_interp_env_t env (topDictStr);
cff2_top_dict_interpreter_t top_interp (env);
topDict.init ();
@ -430,6 +436,7 @@ struct cff2
(charStrings == &Null (CFF2CharStrings)) || unlikely (!charStrings->sanitize (&sc)) ||
(globalSubrs == &Null (CFF2Subrs)) || unlikely (!globalSubrs->sanitize (&sc)) ||
(fdArray == &Null (CFF2FDArray)) || unlikely (!fdArray->sanitize (&sc)) ||
!hb_barrier () ||
(((fdSelect != &Null (CFF2FDSelect)) && unlikely (!fdSelect->sanitize (&sc, fdArray->count)))))
goto fail;
@ -446,6 +453,7 @@ struct cff2
{
const hb_ubytes_t fontDictStr = (*fdArray)[i];
if (unlikely (!fontDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
cff2_font_dict_values_t *font;
num_interp_env_t env (fontDictStr);
cff2_font_dict_interpreter_t font_interp (env);
@ -456,6 +464,7 @@ struct cff2
const hb_ubytes_t privDictStr = StructAtOffsetOrNull<UnsizedByteStr> (cff2, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
cff2_priv_dict_interp_env_t env2 (privDictStr);
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL, cff2_priv_dict_interp_env_t> priv_interp (env2);
privateDicts[i].init ();
@ -465,6 +474,7 @@ struct cff2
if (privateDicts[i].localSubrs != &Null (CFF2Subrs) &&
unlikely (!privateDicts[i].localSubrs->sanitize (&sc)))
goto fail;
hb_barrier ();
}
return;

View file

@ -556,6 +556,7 @@ struct CmapSubtableFormat4
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
if (unlikely (!c->check_range (this, length)))
{
@ -742,10 +743,11 @@ struct CmapSubtableLongSegmented
unsigned num_glyphs) const
{
hb_codepoint_t last_end = 0;
for (unsigned i = 0; i < this->groups.len; i++)
unsigned count = this->groups.len;
for (unsigned i = 0; i < count; i++)
{
hb_codepoint_t start = this->groups[i].startCharCode;
hb_codepoint_t end = hb_min ((hb_codepoint_t) this->groups[i].endCharCode,
hb_codepoint_t start = this->groups.arrayZ[i].startCharCode;
hb_codepoint_t end = hb_min ((hb_codepoint_t) this->groups.arrayZ[i].endCharCode,
(hb_codepoint_t) HB_UNICODE_MAX);
if (unlikely (start > end || start < last_end)) {
// Range is not in order and is invalid, skip it.
@ -754,7 +756,7 @@ struct CmapSubtableLongSegmented
last_end = end;
hb_codepoint_t gid = this->groups[i].glyphID;
hb_codepoint_t gid = this->groups.arrayZ[i].glyphID;
if (!gid)
{
if (T::formatNumber == 13) continue;
@ -767,9 +769,9 @@ struct CmapSubtableLongSegmented
mapping->alloc (mapping->get_population () + end - start + 1);
unicodes->add_range (start, end);
for (unsigned cp = start; cp <= end; cp++)
{
unicodes->add (cp);
mapping->set (cp, gid);
gid += T::increment;
}
@ -1427,6 +1429,7 @@ struct CmapSubtable
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 0: return_trace (u.format0 .sanitize (c));
case 4: return_trace (u.format4 .sanitize (c));
@ -2060,6 +2063,7 @@ struct cmap
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
likely (version == 0) &&
encodingRecord.sanitize (c, this));
}

View file

@ -71,6 +71,7 @@ struct DeviceRecord
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
c->check_range (this, sizeDeviceRecord)));
}
@ -152,6 +153,7 @@ struct hdmx
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
!hb_unsigned_mul_overflows (numRecords, sizeDeviceRecord) &&
min_size + numRecords * sizeDeviceRecord > numRecords * sizeDeviceRecord &&
sizeDeviceRecord >= DeviceRecord::min_size &&

View file

@ -103,6 +103,7 @@ struct head
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
version.major == 1 &&
magicNumber == 0x5F0F3CF5u);
}

View file

@ -50,7 +50,9 @@ struct _hea
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && likely (version.major == 1));
return_trace (c->check_struct (this) &&
hb_barrier () &&
likely (version.major == 1));
}
public:

View file

@ -79,6 +79,7 @@ struct KernSubTableFormat3
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
c->check_range (kernValueZ,
kernValueCount * sizeof (FWORD) +
glyphCount * 2 +
@ -147,9 +148,10 @@ struct KernSubTable
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!u.header.sanitize (c) ||
u.header.length < u.header.min_size ||
!c->check_range (this, u.header.length))) return_trace (false);
if (unlikely (!(u.header.sanitize (c) &&
hb_barrier () &&
u.header.length >= u.header.min_size &&
c->check_range (this, u.header.length)))) return_trace (false);
return_trace (dispatch (c));
}
@ -337,6 +339,7 @@ struct kern
{
TRACE_SANITIZE (this);
if (!u.version32.sanitize (c)) return_trace (false);
hb_barrier ();
return_trace (dispatch (c));
}

View file

@ -135,6 +135,7 @@ struct BaseCoord
{
TRACE_SANITIZE (this);
if (unlikely (!u.format.sanitize (c))) return_trace (false);
hb_barrier ();
switch (u.format) {
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
@ -496,6 +497,7 @@ struct BASE
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
likely (version.major == 1) &&
hAxis.sanitize (c, this) &&
vAxis.sanitize (c, this) &&

View file

@ -64,7 +64,7 @@ struct hb_collect_feature_substitutes_with_var_context_t
const hb_hashmap_t<hb_tag_t, Triple> *axes_location;
hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *record_cond_idx_map;
hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
bool& insert_catch_all_feature_variation_record;
hb_set_t& catch_all_record_feature_idxes;
// not stored in subset_plan
hb_set_t *feature_indices;
@ -142,6 +142,8 @@ struct hb_subset_layout_context_t :
const hb_map_t *feature_index_map;
const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map;
const hb_set_t *catch_all_record_feature_idxes;
const hb_hashmap_t<unsigned, hb_pair_t<const void*, const void*>> *feature_idx_tag_map;
unsigned cur_script_index;
unsigned cur_feature_var_record_idx;
@ -164,6 +166,8 @@ struct hb_subset_layout_context_t :
feature_index_map = &c_->plan->gsub_features;
feature_substitutes_map = &c_->plan->gsub_feature_substitutes_map;
feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gsub_feature_record_cond_idx_map;
catch_all_record_feature_idxes = &c_->plan->gsub_old_features;
feature_idx_tag_map = &c_->plan->gsub_old_feature_idx_tag_map;
}
else
{
@ -172,6 +176,8 @@ struct hb_subset_layout_context_t :
feature_index_map = &c_->plan->gpos_features;
feature_substitutes_map = &c_->plan->gpos_feature_substitutes_map;
feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gpos_feature_record_cond_idx_map;
catch_all_record_feature_idxes = &c_->plan->gpos_old_features;
feature_idx_tag_map = &c_->plan->gpos_old_feature_idx_tag_map;
}
}
@ -454,6 +460,7 @@ struct FeatureParamsSize
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this))) return_trace (false);
hb_barrier ();
/* This subtable has some "history", if you will. Some earlier versions of
* Adobe tools calculated the offset of the FeatureParams subtable from the
@ -820,6 +827,7 @@ struct Feature
TRACE_SANITIZE (this);
if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
return_trace (false);
hb_barrier ();
/* Some earlier versions of Adobe tools calculated the offset of the
* FeatureParams subtable from the beginning of the FeatureList table!
@ -838,6 +846,7 @@ struct Feature
unsigned int orig_offset = featureParams;
if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
return_trace (false);
hb_barrier ();
if (featureParams == 0 && closure &&
closure->tag == HB_TAG ('s','i','z','e') &&
@ -900,7 +909,8 @@ struct Record
{
TRACE_SANITIZE (this);
const Record_sanitize_closure_t closure = {tag, base};
return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
return_trace (c->check_struct (this) &&
offset.sanitize (c, base, &closure));
}
Tag tag; /* 4-byte Tag identifier */
@ -1371,10 +1381,20 @@ struct Lookup
if (lookupFlag & LookupFlag::UseMarkFilteringSet)
{
if (unlikely (!c->serializer->extend (out))) return_trace (false);
const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable);
outMarkFilteringSet = markFilteringSet;
hb_codepoint_t *idx;
if (!c->plan->used_mark_sets_map.has (markFilteringSet, &idx))
{
unsigned new_flag = lookupFlag;
new_flag &= ~LookupFlag::UseMarkFilteringSet;
out->lookupFlag = new_flag;
}
else
{
if (unlikely (!c->serializer->extend (out))) return_trace (false);
HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable);
outMarkFilteringSet = *idx;
}
}
// Always keep the lookup even if it's empty. The rest of layout subsetting depends on lookup
@ -1391,6 +1411,7 @@ struct Lookup
{
TRACE_SANITIZE (this);
if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
hb_barrier ();
unsigned subtables = get_subtable_count ();
if (unlikely (!c->visit_subtables (subtables))) return_trace (false);
@ -1406,6 +1427,8 @@ struct Lookup
if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ()))
{
hb_barrier ();
/* The spec says all subtables of an Extension lookup should
* have the same type, which shall not be the Extension type
* itself (but we already checked for that).
@ -2156,6 +2179,7 @@ struct ClassDef
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
@ -2534,7 +2558,9 @@ struct VarRegionList
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && axesZ.sanitize (c, axisCount * regionCount));
return_trace (c->check_struct (this) &&
hb_barrier () &&
axesZ.sanitize (c, axisCount * regionCount));
}
bool serialize (hb_serialize_context_t *c,
@ -2728,6 +2754,7 @@ struct VarData
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
regionIndices.sanitize (c) &&
hb_barrier () &&
wordCount () <= regionIndices.len &&
c->check_range (get_delta_bytes (),
itemCount,
@ -3077,6 +3104,7 @@ struct VariationStore
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
format == 1 &&
regions.sanitize (c, this) &&
dataSets.sanitize (c, this));
@ -3330,8 +3358,12 @@ struct ConditionFormat1
Triple axis_range (-1.f, 0.f, 1.f);
Triple *axis_limit;
bool axis_set_by_user = false;
if (c->axes_location->has (axis_tag, &axis_limit))
{
axis_range = *axis_limit;
axis_set_by_user = true;
}
float axis_min_val = axis_range.minimum;
float axis_default_val = axis_range.middle;
@ -3350,8 +3382,7 @@ struct ConditionFormat1
return DROP_RECORD_WITH_VAR;
//condition met and axis pinned, drop the condition
if (c->axes_location->has (axis_tag) &&
c->axes_location->get (axis_tag).is_point ())
if (axis_set_by_user && axis_range.is_point ())
return DROP_COND_WITH_VAR;
if (filter_max_val != axis_max_val || filter_min_val != axis_min_val)
@ -3365,7 +3396,6 @@ struct ConditionFormat1
condition_map->set (axisIndex, val);
return KEEP_COND_WITH_VAR;
}
return KEEP_RECORD_WITH_VAR;
}
@ -3424,6 +3454,7 @@ struct Condition
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 1: return_trace (u.format1.sanitize (c));
default:return_trace (true);
@ -3497,12 +3528,15 @@ struct ConditionSet
}
bool subset (hb_subset_context_t *c,
hb_subset_layout_context_t *l) const
hb_subset_layout_context_t *l,
bool insert_catch_all) const
{
TRACE_SUBSET (this);
auto *out = c->serializer->start_embed (this);
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
if (insert_catch_all) return_trace (true);
hb_set_t *retained_cond_set = nullptr;
if (l->feature_record_cond_idx_map != nullptr)
retained_cond_set = l->feature_record_cond_idx_map->get (l->cur_feature_var_record_idx);
@ -3548,27 +3582,51 @@ struct FeatureTableSubstitutionRecord
}
void collect_feature_substitutes_with_variations (hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
hb_set_t& catch_all_record_feature_idxes,
const hb_set_t *feature_indices,
const void *base) const
{
if (feature_indices->has (featureIndex))
{
feature_substitutes_map->set (featureIndex, &(base+feature));
catch_all_record_feature_idxes.add (featureIndex);
}
}
bool serialize (hb_subset_layout_context_t *c,
unsigned feature_index,
const Feature *f, const Tag *tag)
{
TRACE_SERIALIZE (this);
hb_serialize_context_t *s = c->subset_context->serializer;
if (unlikely (!s->extend_min (this))) return_trace (false);
uint32_t *new_feature_idx;
if (!c->feature_index_map->has (feature_index, &new_feature_idx))
return_trace (false);
if (!s->check_assign (featureIndex, *new_feature_idx, HB_SERIALIZE_ERROR_INT_OVERFLOW))
return_trace (false);
s->push ();
bool ret = f->subset (c->subset_context, c, tag);
if (ret) s->add_link (feature, s->pop_pack ());
else s->pop_discard ();
return_trace (ret);
}
bool subset (hb_subset_layout_context_t *c, const void *base) const
{
TRACE_SUBSET (this);
if (!c->feature_index_map->has (featureIndex) ||
c->feature_substitutes_map->has (featureIndex)) {
// Feature that is being substituted is not being retained, so we don't
// need this.
uint32_t *new_feature_index;
if (!c->feature_index_map->has (featureIndex, &new_feature_index))
return_trace (false);
}
auto *out = c->subset_context->serializer->embed (this);
if (unlikely (!out)) return_trace (false);
out->featureIndex = c->feature_index_map->get (featureIndex);
out->featureIndex = *new_feature_index;
return_trace (out->feature.serialize_subset (c->subset_context, feature, base, c));
}
@ -3600,16 +3658,10 @@ struct FeatureTableSubstitution
}
void collect_lookups (const hb_set_t *feature_indexes,
const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
hb_set_t *lookup_indexes /* OUT */) const
{
+ hb_iter (substitutions)
| hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex)
| hb_filter ([feature_substitutes_map] (const FeatureTableSubstitutionRecord& record)
{
if (feature_substitutes_map == nullptr) return true;
return !feature_substitutes_map->has (record.featureIndex);
})
| hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r)
{ r.collect_lookups (this, lookup_indexes); })
;
@ -3634,11 +3686,14 @@ struct FeatureTableSubstitution
void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
{
for (const FeatureTableSubstitutionRecord& record : substitutions)
record.collect_feature_substitutes_with_variations (c->feature_substitutes_map, c->feature_indices, this);
record.collect_feature_substitutes_with_variations (c->feature_substitutes_map,
c->catch_all_record_feature_idxes,
c->feature_indices, this);
}
bool subset (hb_subset_context_t *c,
hb_subset_layout_context_t *l) const
hb_subset_layout_context_t *l,
bool insert_catch_all) const
{
TRACE_SUBSET (this);
auto *out = c->serializer->start_embed (*this);
@ -3647,6 +3702,22 @@ struct FeatureTableSubstitution
out->version.major = version.major;
out->version.minor = version.minor;
if (insert_catch_all)
{
for (unsigned feature_index : *(l->catch_all_record_feature_idxes))
{
hb_pair_t<const void*, const void*> *p;
if (!l->feature_idx_tag_map->has (feature_index, &p))
return_trace (false);
auto *o = out->substitutions.serialize_append (c->serializer);
if (!o->serialize (l, feature_index,
reinterpret_cast<const Feature*> (p->first),
reinterpret_cast<const Tag*> (p->second)))
return_trace (false);
}
return_trace (true);
}
+ substitutions.iter ()
| hb_apply (subset_record_array (l, &(out->substitutions), this))
;
@ -3658,6 +3729,7 @@ struct FeatureTableSubstitution
{
TRACE_SANITIZE (this);
return_trace (version.sanitize (c) &&
hb_barrier () &&
likely (version.major == 1) &&
substitutions.sanitize (c, this));
}
@ -3676,10 +3748,9 @@ struct FeatureVariationRecord
void collect_lookups (const void *base,
const hb_set_t *feature_indexes,
const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
hb_set_t *lookup_indexes /* OUT */) const
{
return (base+substitutions).collect_lookups (feature_indexes, feature_substitutes_map, lookup_indexes);
return (base+substitutions).collect_lookups (feature_indexes, lookup_indexes);
}
void closure_features (const void *base,
@ -3705,14 +3776,15 @@ struct FeatureVariationRecord
}
}
bool subset (hb_subset_layout_context_t *c, const void *base) const
bool subset (hb_subset_layout_context_t *c, const void *base,
bool insert_catch_all = false) const
{
TRACE_SUBSET (this);
auto *out = c->subset_context->serializer->embed (this);
if (unlikely (!out)) return_trace (false);
out->conditions.serialize_subset (c->subset_context, conditions, base, c);
out->substitutions.serialize_subset (c->subset_context, substitutions, base, c);
out->conditions.serialize_subset (c->subset_context, conditions, base, c, insert_catch_all);
out->substitutions.serialize_subset (c->subset_context, substitutions, base, c, insert_catch_all);
return_trace (true);
}
@ -3771,9 +3843,8 @@ struct FeatureVariations
if (c->universal)
break;
}
if (c->variation_applied && !c->universal &&
!c->record_cond_idx_map->is_empty ())
c->insert_catch_all_feature_variation_record = true;
if (c->universal || c->record_cond_idx_map->is_empty ())
c->catch_all_record_feature_idxes.reset ();
}
FeatureVariations* copy (hb_serialize_context_t *c) const
@ -3783,11 +3854,17 @@ struct FeatureVariations
}
void collect_lookups (const hb_set_t *feature_indexes,
const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
hb_set_t *lookup_indexes /* OUT */) const
{
for (const FeatureVariationRecord& r : varRecords)
r.collect_lookups (this, feature_indexes, feature_substitutes_map, lookup_indexes);
unsigned count = varRecords.len;
for (unsigned int i = 0; i < count; i++)
{
if (feature_record_cond_idx_map &&
!feature_record_cond_idx_map->has (i))
continue;
varRecords[i].collect_lookups (this, feature_indexes, lookup_indexes);
}
}
void closure_features (const hb_map_t *lookup_indexes,
@ -3832,6 +3909,13 @@ struct FeatureVariations
l->cur_feature_var_record_idx = i;
subset_record_array (l, &(out->varRecords), this) (varRecords[i]);
}
if (out->varRecords.len && !l->catch_all_record_feature_idxes->is_empty ())
{
bool insert_catch_all_record = true;
subset_record_array (l, &(out->varRecords), this, insert_catch_all_record) (varRecords[0]);
}
return_trace (bool (out->varRecords));
}
@ -3839,6 +3923,7 @@ struct FeatureVariations
{
TRACE_SANITIZE (this);
return_trace (version.sanitize (c) &&
hb_barrier () &&
likely (version.major == 1) &&
varRecords.sanitize (c, this));
}

View file

@ -2051,6 +2051,7 @@ struct Rule
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
c->check_range (inputZ.arrayZ,
inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
LookupRecord::static_size * lookupCount));
@ -2826,6 +2827,7 @@ struct ContextFormat3
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this))) return_trace (false);
hb_barrier ();
unsigned int count = glyphCount;
if (unlikely (!count)) return_trace (false); /* We want to access coverageZ[0] freely. */
if (unlikely (!c->check_array (coverageZ.arrayZ, count))) return_trace (false);
@ -3219,10 +3221,13 @@ struct ChainRule
TRACE_SANITIZE (this);
/* Hyper-optimized sanitized because this is really hot. */
if (unlikely (!backtrack.len.sanitize (c))) return_trace (false);
hb_barrier ();
const auto &input = StructAfter<decltype (inputX)> (backtrack);
if (unlikely (!input.lenP1.sanitize (c))) return_trace (false);
hb_barrier ();
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
if (unlikely (!lookahead.len.sanitize (c))) return_trace (false);
hb_barrier ();
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
return_trace (likely (lookup.sanitize (c)));
}
@ -4121,11 +4126,14 @@ struct ChainContextFormat3
{
TRACE_SANITIZE (this);
if (unlikely (!backtrack.sanitize (c, this))) return_trace (false);
hb_barrier ();
const auto &input = StructAfter<decltype (inputX)> (backtrack);
if (unlikely (!input.sanitize (c, this))) return_trace (false);
hb_barrier ();
if (unlikely (!input.len)) return_trace (false); /* To be consistent with Context. */
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
if (unlikely (!lookahead.sanitize (c, this))) return_trace (false);
hb_barrier ();
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
return_trace (likely (lookup.sanitize (c)));
}
@ -4209,6 +4217,7 @@ struct ExtensionFormat1
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
extensionLookupType != T::SubTable::Extension);
}
@ -4472,13 +4481,6 @@ struct GSUBGPOSVersion1_2
if (!c->subset_context->serializer->extend_min (&out->featureVars))
return_trace (false);
// TODO(qxliu76): the current implementation doesn't correctly handle feature variations
// that are dropped by instancing when the associated conditions don't trigger.
// Since partial instancing isn't yet supported this isn't an issue yet but will
// need to be fixed for partial instancing.
// if all axes are pinned all feature vars are dropped.
bool ret = !c->subset_context->plan->all_axes_pinned
&& out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
@ -4513,6 +4515,7 @@ struct GSUBGPOS
{
TRACE_SANITIZE (this);
if (unlikely (!u.version.sanitize (c))) return_trace (false);
hb_barrier ();
switch (u.version.major) {
case 1: return_trace (u.version1.sanitize<TLookup> (c));
#ifndef HB_NO_BEYOND_64K
@ -4638,11 +4641,11 @@ struct GSUBGPOS
}
void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
hb_set_t *lookup_indexes /* OUT */) const
{
#ifndef HB_NO_VAR
get_feature_variations ().collect_lookups (feature_indexes, feature_substitutes_map, lookup_indexes);
get_feature_variations ().collect_lookups (feature_indexes, feature_record_cond_idx_map, lookup_indexes);
#endif
}

View file

@ -214,6 +214,7 @@ struct JSTF
{
TRACE_SANITIZE (this);
return_trace (version.sanitize (c) &&
hb_barrier () &&
likely (version.major == 1) &&
scriptList.sanitize (c, this));
}

View file

@ -333,6 +333,7 @@ struct MathKern
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
c->check_array (mathValueRecordsZ.arrayZ, 2 * heightCount + 1) &&
sanitize_math_value_records (c));
}
@ -984,6 +985,7 @@ struct MathVariants
return_trace (c->check_struct (this) &&
vertGlyphCoverage.sanitize (c, this) &&
horizGlyphCoverage.sanitize (c, this) &&
hb_barrier () &&
c->check_array (glyphConstruction.arrayZ, vertGlyphCount + horizGlyphCount) &&
sanitize_offsets (c));
}
@ -1103,6 +1105,7 @@ struct MATH
TRACE_SANITIZE (this);
return_trace (version.sanitize (c) &&
likely (version.major == 1) &&
hb_barrier () &&
mathConstants.sanitize (c, this) &&
mathGlyphInfo.sanitize (c, this) &&
mathVariants.sanitize (c, this));

View file

@ -85,7 +85,7 @@ struct maxp
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
if (version.major == 1)
{
const maxpV1Tail &v1 = StructAfter<maxpV1Tail> (*this);
@ -103,6 +103,7 @@ struct maxp
maxp_prime->numGlyphs = hb_min (c->plan->num_output_glyphs (), 0xFFFFu);
if (maxp_prime->version.major == 1)
{
hb_barrier ();
const maxpV1Tail *src_v1 = &StructAfter<maxpV1Tail> (*this);
maxpV1Tail *dest_v1 = c->serializer->embed<maxpV1Tail> (src_v1);
if (unlikely (!dest_v1)) return_trace (false);

View file

@ -51,6 +51,7 @@ struct DataMap
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
dataZ.sanitize (c, base, dataLength)));
}
@ -101,6 +102,7 @@ struct meta
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version == 1 &&
dataMaps.sanitize (c, this)));
}

View file

@ -209,6 +209,23 @@ struct OS2
return ret;
}
static unsigned calc_avg_char_width (const hb_hashmap_t<hb_codepoint_t, hb_pair_t<unsigned, int>>& hmtx_map)
{
unsigned num = 0;
unsigned total_width = 0;
for (const auto& _ : hmtx_map.values_ref ())
{
unsigned width = _.first;
if (width)
{
total_width += width;
num++;
}
}
return num ? (unsigned) roundf (total_width / num) : 0;
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
@ -239,10 +256,16 @@ struct OS2
if (os2_prime->version >= 2)
{
hb_barrier ();
auto *table = & const_cast<OS2V2Tail &> (os2_prime->v2 ());
HB_ADD_MVAR_VAR (HB_OT_METRICS_TAG_X_HEIGHT, sxHeight);
HB_ADD_MVAR_VAR (HB_OT_METRICS_TAG_CAP_HEIGHT, sCapHeight);
}
unsigned avg_char_width = calc_avg_char_width (c->plan->hmtx_map);
if (!c->serializer->check_assign (os2_prime->xAvgCharWidth, avg_char_width,
HB_SERIALIZE_ERROR_INT_OVERFLOW))
return_trace (false);
}
#endif
@ -334,6 +357,7 @@ struct OS2
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this))) return_trace (false);
hb_barrier ();
if (unlikely (version >= 1 && !v1X.sanitize (c))) return_trace (false);
if (unlikely (version >= 2 && !v2X.sanitize (c))) return_trace (false);
if (unlikely (version >= 5 && !v5X.sanitize (c))) return_trace (false);

View file

@ -122,7 +122,10 @@ struct post
}
if (glyph_names && version.major == 2)
{
hb_barrier ();
return_trace (v2X.subset (c));
}
return_trace (true);
}
@ -138,6 +141,7 @@ struct post
version = table->version.to_int ();
if (version != 0x00020000) return;
hb_barrier ();
const postV2Tail &v2 = table->v2X;
@ -217,10 +221,16 @@ struct post
unsigned int get_glyph_count () const
{
if (version == 0x00010000)
{
hb_barrier ();
return format1_names_length;
}
if (version == 0x00020000)
{
hb_barrier ();
return glyphNameIndex->len;
}
return 0;
}
@ -245,13 +255,18 @@ struct post
{
if (version == 0x00010000)
{
hb_barrier ();
if (glyph >= format1_names_length)
return hb_bytes_t ();
return format1_names (glyph);
}
if (version != 0x00020000 || glyph >= glyphNameIndex->len)
if (version != 0x00020000)
return hb_bytes_t ();
hb_barrier ();
if (glyph >= glyphNameIndex->len)
return hb_bytes_t ();
unsigned int index = glyphNameIndex->arrayZ[glyph];
@ -284,6 +299,7 @@ struct post
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
(version.to_int () == 0x00010000 ||
(version.to_int () == 0x00020000 && v2X.sanitize (c)) ||
version.to_int () == 0x00030000));

View file

@ -327,6 +327,7 @@ struct AxisValueFormat4
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
axisValues.sanitize (c, axisCount)));
}
@ -416,6 +417,7 @@ struct AxisValue
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
switch (u.format)
{
@ -560,6 +562,7 @@ struct STAT
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version.major == 1 &&
version.minor > 0 &&
designAxesOffset.sanitize (c, this, designAxisCount) &&

View file

@ -273,6 +273,7 @@ struct avar
{
TRACE_SANITIZE (this);
if (!(version.sanitize (c) &&
hb_barrier () &&
(version.major == 1
#ifndef HB_NO_AVAR2
|| version.major == 2
@ -293,6 +294,7 @@ struct avar
#ifndef HB_NO_AVAR2
if (version.major < 2)
return_trace (true);
hb_barrier ();
const auto &v2 = * (const avarV2Tail *) map;
if (unlikely (!v2.sanitize (c, this)))
@ -316,6 +318,7 @@ struct avar
#ifndef HB_NO_AVAR2
if (version.major < 2)
return;
hb_barrier ();
for (; count < axisCount; count++)
map = &StructAfter<SegmentMaps> (*map);

View file

@ -119,6 +119,7 @@ struct DeltaSetIndexMapFormat01
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
c->check_range (mapDataZ.arrayZ,
mapCount,
get_width ()));
@ -191,6 +192,7 @@ struct DeltaSetIndexMap
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 0: return_trace (u.format0.sanitize (c));
case 1: return_trace (u.format1.sanitize (c));
@ -434,6 +436,8 @@ enum packed_delta_flag_t
struct tuple_delta_t
{
static constexpr bool realloc_move = true; // Watch out when adding new members!
public:
hb_hashmap_t<hb_tag_t, Triple> axis_tuples;
@ -514,14 +518,19 @@ struct tuple_delta_t
return *this;
unsigned num = indices.length;
for (unsigned i = 0; i < num; i++)
{
if (!indices.arrayZ[i]) continue;
deltas_x[i] *= scalar;
if (deltas_y)
deltas_y[i] *= scalar;
}
if (deltas_y)
for (unsigned i = 0; i < num; i++)
{
if (!indices.arrayZ[i]) continue;
deltas_x[i] *= scalar;
deltas_y[i] *= scalar;
}
else
for (unsigned i = 0; i < num; i++)
{
if (!indices.arrayZ[i]) continue;
deltas_x[i] *= scalar;
}
return *this;
}
@ -767,7 +776,7 @@ struct tuple_delta_t
unsigned encoded_len = 0;
while (i < num_deltas)
{
int val = deltas[i];
int val = deltas.arrayZ[i];
if (val == 0)
encoded_len += encode_delta_run_as_zeroes (i, encoded_bytes.sub_array (encoded_len), deltas);
else if (val >= -128 && val <= 127)
@ -786,7 +795,7 @@ struct tuple_delta_t
unsigned run_length = 0;
auto it = encoded_bytes.iter ();
unsigned encoded_len = 0;
while (i < num_deltas && deltas[i] == 0)
while (i < num_deltas && deltas.arrayZ[i] == 0)
{
i++;
run_length++;
@ -815,13 +824,13 @@ struct tuple_delta_t
unsigned num_deltas = deltas.length;
while (i < num_deltas)
{
int val = deltas[i];
int val = deltas.arrayZ[i];
if (val > 127 || val < -128)
break;
/* from fonttools: if there're 2 or more zeros in a sequence,
* it is better to start a new run to save bytes. */
if (val == 0 && i + 1 < num_deltas && deltas[i+1] == 0)
if (val == 0 && i + 1 < num_deltas && deltas.arrayZ[i+1] == 0)
break;
i++;
@ -838,7 +847,7 @@ struct tuple_delta_t
for (unsigned j = 0; j < 64; j++)
{
*it++ = static_cast<char> (deltas[start + j]);
*it++ = static_cast<char> (deltas.arrayZ[start + j]);
encoded_len++;
}
@ -853,7 +862,7 @@ struct tuple_delta_t
while (start < i)
{
*it++ = static_cast<char> (deltas[start++]);
*it++ = static_cast<char> (deltas.arrayZ[start++]);
encoded_len++;
}
}
@ -869,8 +878,8 @@ struct tuple_delta_t
unsigned num_deltas = deltas.length;
while (i < num_deltas)
{
int val = deltas[i];
int val = deltas.arrayZ[i];
/* start a new run for a single zero value*/
if (val == 0) break;
@ -879,7 +888,7 @@ struct tuple_delta_t
* Only start a new run when there're 2 continuous such values. */
if (val >= -128 && val <= 127 &&
i + 1 < num_deltas &&
deltas[i+1] >= -128 && deltas[i+1] <= 127)
deltas.arrayZ[i+1] >= -128 && deltas.arrayZ[i+1] <= 127)
break;
i++;
@ -895,7 +904,7 @@ struct tuple_delta_t
for (unsigned j = 0; j < 64; j++)
{
int16_t delta_val = deltas[start + j];
int16_t delta_val = deltas.arrayZ[start + j];
*it++ = static_cast<char> (delta_val >> 8);
*it++ = static_cast<char> (delta_val & 0xFF);
@ -912,7 +921,7 @@ struct tuple_delta_t
encoded_len++;
while (start < i)
{
int16_t delta_val = deltas[start++];
int16_t delta_val = deltas.arrayZ[start++];
*it++ = static_cast<char> (delta_val >> 8);
*it++ = static_cast<char> (delta_val & 0xFF);
@ -1175,6 +1184,7 @@ struct TupleVariationData
bool create_from_item_var_data (const VarData &var_data,
const hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>>& regions,
const hb_map_t& axes_old_index_tag_map,
unsigned& item_count,
const hb_inc_bimap_t* inner_map = nullptr)
{
/* NULL offset, to keep original varidx valid, just return */
@ -1184,7 +1194,8 @@ struct TupleVariationData
unsigned num_regions = var_data.get_region_index_count ();
if (!tuple_vars.alloc (num_regions)) return false;
unsigned item_count = inner_map ? inner_map->get_population () : var_data.get_item_count ();
item_count = inner_map ? inner_map->get_population () : var_data.get_item_count ();
if (!item_count) return true;
unsigned row_size = var_data.get_row_size ();
const HBUINT8 *delta_bytes = var_data.get_delta_bytes ();
@ -1775,6 +1786,14 @@ struct item_variations_t
* have the same num of deltas (rows) */
hb_vector_t<tuple_variations_t> vars;
/* num of retained rows for each subtable, there're 2 cases when var_data is empty:
* 1. retained item_count is zero
* 2. regions is empty and item_count is non-zero.
* when converting to tuples, both will be dropped because the tuple is empty,
* however, we need to retain 2. as all-zero rows to keep original varidx
* valid, so we need a way to remember the num of rows for each subtable */
hb_vector_t<unsigned> var_data_num_rows;
/* original region list, decompiled from item varstore, used when rebuilding
* region list after instantiation */
hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>> orig_region_list;
@ -1836,22 +1855,26 @@ struct item_variations_t
unsigned num_var_data = varStore.get_sub_table_count ();
if (inner_maps && inner_maps.length != num_var_data) return false;
if (!vars.alloc (num_var_data)) return false;
if (!vars.alloc (num_var_data) ||
!var_data_num_rows.alloc (num_var_data)) return false;
for (unsigned i = 0; i < num_var_data; i++)
{
if (inner_maps && !inner_maps.arrayZ[i].get_population ())
continue;
tuple_variations_t var_data_tuples;
unsigned item_count = 0;
if (!var_data_tuples.create_from_item_var_data (varStore.get_sub_table (i),
orig_region_list,
axes_old_index_tag_map,
item_count,
inner_maps ? &(inner_maps.arrayZ[i]) : nullptr))
return false;
var_data_num_rows.push (item_count);
vars.push (std::move (var_data_tuples));
}
return !vars.in_error ();
return !vars.in_error () && !var_data_num_rows.in_error () && vars.length == var_data_num_rows.length;
}
bool instantiate_tuple_vars (const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location,
@ -1973,12 +1996,8 @@ struct item_variations_t
unsigned num_cols = region_list.length;
/* pre-alloc a 2D vector for all sub_table's VarData rows */
unsigned total_rows = 0;
for (unsigned major = 0; major < vars.length; major++)
{
const tuple_variations_t& tuples = vars[major];
/* all tuples in each sub_table should have same num of deltas(num rows) */
total_rows += tuples.tuple_vars[0].deltas_x.length;
}
for (unsigned major = 0; major < var_data_num_rows.length; major++)
total_rows += var_data_num_rows[major];
if (!delta_rows.resize (total_rows)) return false;
/* init all rows to [0]*num_cols */
@ -1998,7 +2017,7 @@ struct item_variations_t
/* deltas are stored in tuples(column based), convert them back into items
* (row based) delta */
const tuple_variations_t& tuples = vars[major];
unsigned num_rows = tuples.tuple_vars[0].deltas_x.length;
unsigned num_rows = var_data_num_rows[major];
for (const tuple_delta_t& tuple: tuples.tuple_vars)
{
if (tuple.deltas_x.length != num_rows)

View file

@ -45,7 +45,8 @@ struct cvar
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
version.sanitize (c) && likely (version.major == 1) &&
hb_barrier () &&
likely (version.major == 1) &&
tupleVariationData.sanitize (c));
}

View file

@ -131,6 +131,7 @@ struct InstanceRecord
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
c->check_array (coordinatesZ.arrayZ, axis_count));
}
@ -277,8 +278,10 @@ struct fvar
{
TRACE_SANITIZE (this);
return_trace (version.sanitize (c) &&
hb_barrier () &&
likely (version.major == 1) &&
c->check_struct (this) &&
hb_barrier () &&
axisSize == 20 && /* Assumed in our code. */
instanceSize >= axisCount * 4 + 4 &&
get_axes ().sanitize (c) &&

View file

@ -296,7 +296,9 @@ struct gvar
bool sanitize_shallow (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && (version.major == 1) &&
return_trace (c->check_struct (this) &&
hb_barrier () &&
(version.major == 1) &&
sharedTuples.sanitize (c, this, axisCount * sharedTupleCount) &&
(is_long_offset () ?
c->check_array (get_long_offset_array (), c->get_num_glyphs () + 1) :
@ -426,7 +428,10 @@ struct gvar
subset_data_size += get_glyph_var_data_bytes (c->source_blob, glyph_count, old_gid).length;
}
bool long_offset = subset_data_size & ~0xFFFFu;
bool long_offset = (subset_data_size & ~0xFFFFu);
#ifdef HB_EXPERIMENTAL_API
long_offset = long_offset || (c->plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS);
#endif
out->flags = long_offset ? 1 : 0;
HBUINT8 *subset_offsets = c->serializer->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1), false);
@ -444,6 +449,8 @@ struct gvar
hb_memcpy (tuples, this+sharedTuples, shared_tuple_size);
}
/* This ordering relative to the shared tuples array, which puts the glyphVariationData
last in the table, is required when HB_SUBSET_FLAGS_IFTB_REQUIREMENTS is set */
char *subset_data = c->serializer->allocate_size<char> (subset_data_size, false);
if (!subset_data) return_trace (false);
out->dataZ = subset_data - (char *) out;

View file

@ -288,6 +288,7 @@ struct HVARVVAR
{
TRACE_SANITIZE (this);
return_trace (version.sanitize (c) &&
hb_barrier () &&
likely (version.major == 1) &&
varStore.sanitize (c, this) &&
advMap.sanitize (c, this) &&

View file

@ -77,8 +77,10 @@ struct MVAR
{
TRACE_SANITIZE (this);
return_trace (version.sanitize (c) &&
hb_barrier () &&
likely (version.major == 1) &&
c->check_struct (this) &&
hb_barrier () &&
valueRecordSize >= VariationValueRecord::static_size &&
varStore.sanitize (c, this) &&
c->check_range (valuesZ.arrayZ,

View file

@ -117,6 +117,7 @@ struct VORG
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
version.major == 1 &&
vertYOrigins.sanitize (c));
}

View file

@ -55,6 +55,9 @@ struct hb_priority_queue_t
bool in_error () const { return heap.in_error (); }
bool alloc (unsigned size)
{ return heap.alloc (size); }
#ifndef HB_OPTIMIZE_SIZE
HB_ALWAYS_INLINE
#endif

View file

@ -134,7 +134,10 @@ struct hb_sanitize_context_t :
const char *get_name () { return "SANITIZE"; }
template <typename T, typename F>
bool may_dispatch (const T *obj HB_UNUSED, const F *format)
{ return format->sanitize (this); }
{
return format->sanitize (this) &&
hb_barrier ();
}
static return_t default_return_value () { return true; }
static return_t no_dispatch_return_value () { return false; }
bool stop_sublookup_iteration (const return_t r) const { return !r; }

View file

@ -35,6 +35,8 @@
template <typename impl_t>
struct hb_sparseset_t
{
static constexpr bool realloc_move = true;
hb_object_header_t header;
impl_t s;

View file

@ -620,6 +620,12 @@ struct cff1_subset_plan
drop_hints = plan->flags & HB_SUBSET_FLAGS_NO_HINTING;
desubroutinize = plan->flags & HB_SUBSET_FLAGS_DESUBROUTINIZE;
#ifdef HB_EXPERIMENTAL_API
min_charstrings_off_size = (plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS) ? 4 : 0;
#else
min_charstrings_off_size = 0;
#endif
subset_charset = !acc.is_predef_charset ();
if (!subset_charset)
/* check whether the subset renumbers any glyph IDs */
@ -778,13 +784,43 @@ struct cff1_subset_plan
unsigned int topDictModSIDs[name_dict_values_t::ValCount];
bool desubroutinize = false;
unsigned min_charstrings_off_size = 0;
};
} // namespace OT
static bool _serialize_cff1_charstrings (hb_serialize_context_t *c,
struct OT::cff1_subset_plan &plan,
const OT::cff1::accelerator_subset_t &acc)
{
c->push<CFF1CharStrings> ();
unsigned data_size = 0;
unsigned total_size = CFF1CharStrings::total_size (plan.subset_charstrings, &data_size, plan.min_charstrings_off_size);
if (unlikely (!c->start_zerocopy (total_size)))
return false;
auto *cs = c->start_embed<CFF1CharStrings> ();
if (unlikely (!cs->serialize (c, plan.subset_charstrings, &data_size, plan.min_charstrings_off_size))) {
c->pop_discard ();
return false;
}
plan.info.char_strings_link = c->pop_pack (false);
return true;
}
bool
OT::cff1::accelerator_subset_t::serialize (hb_serialize_context_t *c,
struct OT::cff1_subset_plan &plan) const
{
/* push charstrings onto the object stack first which will ensure it packs as the last
object in the table. Keeping the chastrings last satisfies the requirements for patching
via IFTB. If this ordering needs to be changed in the future, charstrings should be left
at the end whenever HB_SUBSET_FLAGS_ITFB_REQUIREMENTS is enabled. */
if (!_serialize_cff1_charstrings(c, plan, *this))
return false;
/* private dicts & local subrs */
for (int i = (int) privateDicts.length; --i >= 0 ;)
{
@ -823,25 +859,6 @@ OT::cff1::accelerator_subset_t::serialize (hb_serialize_context_t *c,
if (!is_CID ())
plan.info.privateDictInfo = plan.fontdicts_mod[0].privateDictInfo;
/* CharStrings */
{
c->push<CFF1CharStrings> ();
unsigned data_size = 0;
unsigned total_size = CFF1CharStrings::total_size (plan.subset_charstrings, &data_size);
if (unlikely (!c->start_zerocopy (total_size)))
return false;
auto *cs = c->start_embed<CFF1CharStrings> ();
if (likely (cs->serialize (c, plan.subset_charstrings, &data_size)))
plan.info.char_strings_link = c->pop_pack (false);
else
{
c->pop_discard ();
return false;
}
}
/* FDArray (FD Index) */
if (fdArray != &Null (CFF1FDArray))
{

View file

@ -439,6 +439,12 @@ struct cff2_subset_plan
desubroutinize = plan->flags & HB_SUBSET_FLAGS_DESUBROUTINIZE ||
pinned; // For instancing we need this path
#ifdef HB_EXPERIMENTAL_API
min_charstrings_off_size = (plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS) ? 4 : 0;
#else
min_charstrings_off_size = 0;
#endif
if (desubroutinize)
{
/* Flatten global & local subrs */
@ -510,14 +516,45 @@ struct cff2_subset_plan
bool drop_hints = false;
bool desubroutinize = false;
unsigned min_charstrings_off_size = 0;
};
} // namespace OT
static bool _serialize_cff2_charstrings (hb_serialize_context_t *c,
cff2_subset_plan &plan,
const OT::cff2::accelerator_subset_t &acc)
{
c->push ();
unsigned data_size = 0;
unsigned total_size = CFF2CharStrings::total_size (plan.subset_charstrings, &data_size, plan.min_charstrings_off_size);
if (unlikely (!c->start_zerocopy (total_size)))
return false;
auto *cs = c->start_embed<CFF2CharStrings> ();
if (unlikely (!cs->serialize (c, plan.subset_charstrings, &data_size, plan.min_charstrings_off_size)))
{
c->pop_discard ();
return false;
}
plan.info.char_strings_link = c->pop_pack (false);
return true;
}
bool
OT::cff2::accelerator_subset_t::serialize (hb_serialize_context_t *c,
struct cff2_subset_plan &plan,
hb_array_t<int> normalized_coords) const
{
/* push charstrings onto the object stack first which will ensure it packs as the last
object in the table. Keeping the chastrings last satisfies the requirements for patching
via IFTB. If this ordering needs to be changed in the future, charstrings should be left
at the end whenever HB_SUBSET_FLAGS_ITFB_REQUIREMENTS is enabled. */
if (!_serialize_cff2_charstrings(c, plan, *this))
return false;
/* private dicts & local subrs */
hb_vector_t<table_info_t> private_dict_infos;
if (unlikely (!private_dict_infos.resize (plan.subset_fdcount))) return false;
@ -556,25 +593,6 @@ OT::cff2::accelerator_subset_t::serialize (hb_serialize_context_t *c,
}
}
/* CharStrings */
{
c->push ();
unsigned data_size = 0;
unsigned total_size = CFF2CharStrings::total_size (plan.subset_charstrings, &data_size);
if (unlikely (!c->start_zerocopy (total_size)))
return false;
auto *cs = c->start_embed<CFF2CharStrings> ();
if (likely (cs->serialize (c, plan.subset_charstrings, &data_size)))
plan.info.char_strings_link = c->pop_pack (false);
else
{
c->pop_discard ();
return false;
}
}
/* FDSelect */
if (fdSelect != &Null (CFF2FDSelect))
{

View file

@ -123,6 +123,12 @@ hb_subset_input_t::hb_subset_input_t ()
//justify
HB_TAG ('j', 'a', 'l', 't'), // HarfBuzz doesn't use; others might
//East Asian spacing
HB_TAG ('c', 'h', 'w', 's'),
HB_TAG ('v', 'c', 'h', 'w'),
HB_TAG ('h', 'a', 'l', 't'),
HB_TAG ('v', 'h', 'a', 'l'),
//private
HB_TAG ('H', 'a', 'r', 'f'),
HB_TAG ('H', 'A', 'R', 'F'),

View file

@ -168,12 +168,14 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
* |
* crossing
*/
if (gain > outGain)
if (gain >= outGain)
{
// Note that this is the branch taken if both gain and outGain are 0.
// Crossing point on the axis.
float crossing = peak + (1 - gain) * (upper - peak);
Triple loc{axisDef, peak, crossing};
Triple loc{hb_max (lower, axisDef), peak, crossing};
float scalar = 1.f;
// The part before the crossing point.
@ -253,7 +255,7 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
* axisDef axisMax
*/
float newUpper = peak + (1 - gain) * (upper - peak);
assert (axisMax <= newUpper); // Because outGain >= gain
assert (axisMax <= newUpper); // Because outGain > gain
if (newUpper <= axisDef + (axisMax - axisDef) * 2)
{
upper = newUpper;

View file

@ -70,6 +70,9 @@ HB_SUBSET_PLAN_MEMBER (hb_set_t, _glyphset_colred)
HB_SUBSET_PLAN_MEMBER (hb_map_t, gsub_lookups)
HB_SUBSET_PLAN_MEMBER (hb_map_t, gpos_lookups)
//use_mark_sets mapping: old->new
HB_SUBSET_PLAN_MEMBER (hb_map_t, used_mark_sets_map)
//active langsys we'd like to retain
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb::unique_ptr<hb_set_t>>), gsub_langsys)
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb::unique_ptr<hb_set_t>>), gpos_langsys)
@ -87,6 +90,15 @@ HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb::shared_ptr<hb_set_t>>), gpo
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, const OT::Feature*>), gsub_feature_substitutes_map)
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, const OT::Feature*>), gpos_feature_substitutes_map)
// old feature_indexes set, used to reinstate the old features
HB_SUBSET_PLAN_MEMBER (hb_set_t, gsub_old_features)
HB_SUBSET_PLAN_MEMBER (hb_set_t, gpos_old_features)
//feature_index->pair of (address of old feature, feature tag), used for inserting a catch all record
//if necessary
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb_pair_t E(<const void*, const void*>)>), gsub_old_feature_idx_tag_map)
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb_pair_t E(<const void*, const void*>)>), gpos_old_feature_idx_tag_map)
//active layers/palettes we'd like to retain
HB_SUBSET_PLAN_MEMBER (hb_map_t, colrv1_layers)
HB_SUBSET_PLAN_MEMBER (hb_map_t, colr_palettes)

View file

@ -150,7 +150,8 @@ static void _collect_layout_indices (hb_subset_plan_t *plan,
hb_set_t *feature_indices, /* OUT */
hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, /* OUT */
hb_hashmap_t<unsigned, const OT::Feature*> *feature_substitutes_map, /* OUT */
bool& insert_catch_all_feature_variation_record)
hb_set_t& catch_all_record_feature_idxes, /* OUT */
hb_hashmap_t<unsigned, hb_pair_t<const void*, const void*>>& catch_all_record_idx_feature_map /* OUT */)
{
unsigned num_features = table.get_feature_count ();
hb_vector_t<hb_tag_t> features;
@ -186,7 +187,7 @@ static void _collect_layout_indices (hb_subset_plan_t *plan,
&plan->axes_location,
feature_record_cond_idx_map,
feature_substitutes_map,
insert_catch_all_feature_variation_record,
catch_all_record_feature_idxes,
feature_indices,
false,
false,
@ -208,17 +209,25 @@ static void _collect_layout_indices (hb_subset_plan_t *plan,
f->add_lookup_indexes_to (lookup_indices);
}
#ifndef HB_NO_VAR
if (catch_all_record_feature_idxes)
{
for (unsigned feature_index : catch_all_record_feature_idxes)
{
const OT::Feature& f = table.get_feature (feature_index);
f.add_lookup_indexes_to (lookup_indices);
const void *tag = reinterpret_cast<const void*> (&(table.get_feature_list ().get_tag (feature_index)));
catch_all_record_idx_feature_map.set (feature_index, hb_pair (&f, tag));
}
}
// If all axes are pinned then all feature variations will be dropped so there's no need
// to collect lookups from them.
if (!plan->all_axes_pinned)
{
// TODO(qxliu76): this collection doesn't work correctly for feature variations that are dropped
// but not applied. The collection will collect and retain the lookup indices
// associated with those dropped but not activated rules. Since partial instancing
// isn't yet supported this isn't an issue yet but will need to be fixed for
// partial instancing.
table.feature_variation_collect_lookups (feature_indices, feature_substitutes_map, lookup_indices);
}
table.feature_variation_collect_lookups (feature_indices,
plan->user_axes_location.is_empty () ? nullptr: feature_record_cond_idx_map,
lookup_indices);
#endif
}
@ -302,7 +311,8 @@ _closure_glyphs_lookups_features (hb_subset_plan_t *plan,
script_langsys_map *langsys_map,
hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
hb_hashmap_t<unsigned, const OT::Feature*> *feature_substitutes_map,
bool& insert_catch_all_feature_variation_record)
hb_set_t &catch_all_record_feature_idxes,
hb_hashmap_t<unsigned, hb_pair_t<const void*, const void*>>& catch_all_record_idx_feature_map)
{
hb_blob_ptr_t<T> table = plan->source_table<T> ();
hb_tag_t table_tag = table->tableTag;
@ -313,7 +323,8 @@ _closure_glyphs_lookups_features (hb_subset_plan_t *plan,
&feature_indices,
feature_record_cond_idx_map,
feature_substitutes_map,
insert_catch_all_feature_variation_record);
catch_all_record_feature_idxes,
catch_all_record_idx_feature_map);
if (table_tag == HB_OT_TAG_GSUB && !(plan->flags & HB_SUBSET_FLAGS_NO_LAYOUT_CLOSURE))
hb_ot_layout_lookups_substitute_closure (plan->source,
@ -465,6 +476,24 @@ _math_closure (hb_subset_plan_t *plan,
math.destroy ();
}
static inline void
_remap_used_mark_sets (hb_subset_plan_t *plan,
hb_map_t& used_mark_sets_map)
{
hb_blob_ptr_t<OT::GDEF> gdef = plan->source_table<OT::GDEF> ();
if (!gdef->has_data () || !gdef->has_mark_glyph_sets ())
{
gdef.destroy ();
return;
}
hb_set_t used_mark_sets;
gdef->get_mark_glyph_sets ().collect_used_mark_sets (plan->_glyphset_gsub, used_mark_sets);
gdef.destroy ();
_remap_indexes (&used_mark_sets, &used_mark_sets_map);
}
static inline void
_remove_invalid_gids (hb_set_t *glyphs,
@ -578,14 +607,18 @@ _populate_unicodes_to_retain (const hb_set_t *unicodes,
else
{
plan->codepoint_to_glyph->alloc (cmap_unicodes->get_population ());
for (hb_codepoint_t cp : *cmap_unicodes)
hb_codepoint_t first = HB_SET_VALUE_INVALID, last = HB_SET_VALUE_INVALID;
for (; cmap_unicodes->next_range (&first, &last); )
{
hb_codepoint_t gid = (*unicode_glyphid_map)[cp];
if (!unicodes->has (cp) && !glyphs->has (gid))
continue;
for (unsigned cp = first; cp <= last; cp++)
{
hb_codepoint_t gid = (*unicode_glyphid_map)[cp];
if (!unicodes->has (cp) && !glyphs->has (gid))
continue;
plan->codepoint_to_glyph->set (cp, gid);
plan->unicode_to_new_gid_list.push (hb_pair (cp, gid));
plan->codepoint_to_glyph->set (cp, gid);
plan->unicode_to_new_gid_list.push (hb_pair (cp, gid));
}
}
}
@ -714,7 +747,8 @@ _populate_gids_to_retain (hb_subset_plan_t* plan,
&plan->gsub_langsys,
&plan->gsub_feature_record_cond_idx_map,
&plan->gsub_feature_substitutes_map,
plan->gsub_insert_catch_all_feature_variation_rec);
plan->gsub_old_features,
plan->gsub_old_feature_idx_tag_map);
if (!drop_tables->has (HB_OT_TAG_GPOS))
_closure_glyphs_lookups_features<GPOS> (
@ -725,7 +759,8 @@ _populate_gids_to_retain (hb_subset_plan_t* plan,
&plan->gpos_langsys,
&plan->gpos_feature_record_cond_idx_map,
&plan->gpos_feature_substitutes_map,
plan->gpos_insert_catch_all_feature_variation_rec);
plan->gpos_old_features,
plan->gpos_old_feature_idx_tag_map);
#endif
_remove_invalid_gids (&plan->_glyphset_gsub, plan->source->get_num_glyphs ());
@ -814,12 +849,12 @@ _create_old_gid_to_new_gid_map (const hb_face_t *face,
if (retain_gids)
{
DEBUG_MSG (SUBSET, nullptr,
DEBUG_MSG (SUBSET, nullptr,
"HB_SUBSET_FLAGS_RETAIN_GIDS cannot be set if "
"a custom glyph mapping has been provided.");
return false;
}
hb_codepoint_t max_glyph = 0;
hb_set_t remaining;
for (auto old_gid : all_gids_to_retain->iter ())
@ -871,9 +906,11 @@ _create_old_gid_to_new_gid_map (const hb_face_t *face,
*num_glyphs = max_glyph + 1;
}
reverse_glyph_map->alloc (reverse_glyph_map->get_population () + new_to_old_gid_list->length);
+ hb_iter (new_to_old_gid_list)
| hb_sink (reverse_glyph_map)
;
glyph_map->alloc (glyph_map->get_population () + new_to_old_gid_list->length);
+ hb_iter (new_to_old_gid_list)
| hb_map (&hb_codepoint_pair_t::reverse)
| hb_sink (glyph_map)
@ -969,7 +1006,7 @@ _update_instance_metrics_map_from_cff2 (hb_subset_plan_t *plan)
float *hvar_store_cache = nullptr;
if (_hmtx.has_data () && _hmtx.var_table.get_length ())
hvar_store_cache = _hmtx.var_table->get_var_store ().create_cache ();
OT::vmtx_accelerator_t _vmtx (plan->source);
float *vvar_store_cache = nullptr;
if (_vmtx.has_data () && _vmtx.var_table.get_length ())
@ -1093,6 +1130,7 @@ hb_subset_plan_t::hb_subset_plan_t (hb_face_t *face,
user_axes_location = input->axes_location;
all_axes_pinned = false;
pinned_at_default = true;
has_gdef_varstore = false;
#ifdef HB_EXPERIMENTAL_API
for (auto _ : input->name_table_overrides)
@ -1112,6 +1150,10 @@ hb_subset_plan_t::hb_subset_plan_t (hb_face_t *face,
attach_accelerator_data = input->attach_accelerator_data;
force_long_loca = input->force_long_loca;
#ifdef HB_EXPERIMENTAL_API
force_long_loca = force_long_loca || (flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS);
#endif
if (accel)
accelerator = (hb_subset_accelerator_t*) accel;
@ -1160,6 +1202,9 @@ hb_subset_plan_t::hb_subset_plan_t (hb_face_t *face,
for (auto &v : bounds_height_vec)
v = 0xFFFFFFFF;
if (!drop_tables.has (HB_OT_TAG_GDEF))
_remap_used_mark_sets (this, used_mark_sets_map);
if (unlikely (in_error ()))
return;

View file

@ -147,6 +147,9 @@ struct hb_subset_plan_t
bool gsub_insert_catch_all_feature_variation_rec;
bool gpos_insert_catch_all_feature_variation_rec;
// whether GDEF VarStore is retained
mutable bool has_gdef_varstore;
#define HB_SUBSET_PLAN_MEMBER(Type, Name) Type Name;
#include "hb-subset-plan-member-list.hh"
#undef HB_SUBSET_PLAN_MEMBER

View file

@ -460,9 +460,10 @@ _dependencies_satisfied (hb_subset_plan_t *plan, hb_tag_t tag,
case HB_OT_TAG_hmtx:
case HB_OT_TAG_vmtx:
case HB_OT_TAG_maxp:
case HB_OT_TAG_OS2:
return !plan->normalized_coords || !pending_subset_tags.has (HB_OT_TAG_glyf);
case HB_OT_TAG_GPOS:
return !plan->normalized_coords || plan->all_axes_pinned || !pending_subset_tags.has (HB_OT_TAG_GDEF);
return plan->all_axes_pinned || !pending_subset_tags.has (HB_OT_TAG_GDEF);
default:
return true;
}

View file

@ -73,6 +73,9 @@ typedef struct hb_subset_plan_t hb_subset_plan_t;
* OS/2 will not be recalculated.
* @HB_SUBSET_FLAGS_NO_LAYOUT_CLOSURE: If set don't perform glyph closure on layout
* substitution rules (GSUB). Since: 7.2.0.
* @HB_SUBSET_FLAGS_IFTB_REQUIREMENTS: If set enforce requirements on the output subset
* to allow it to be used with incremental font transfer IFTB patches. Primarily,
* this forces all outline data to use long (32 bit) offsets. Since: EXPERIMENTAL
*
* List of boolean properties that can be configured on the subset input.
*
@ -90,6 +93,9 @@ typedef enum { /*< flags >*/
HB_SUBSET_FLAGS_GLYPH_NAMES = 0x00000080u,
HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES = 0x00000100u,
HB_SUBSET_FLAGS_NO_LAYOUT_CLOSURE = 0x00000200u,
#ifdef HB_EXPERIMENTAL_API
HB_SUBSET_FLAGS_IFTB_REQUIREMENTS = 0x00000400u,
#endif
} hb_subset_flags_t;
/**

View file

@ -37,6 +37,8 @@ template <typename Type,
bool sorted=false>
struct hb_vector_t
{
static constexpr bool realloc_move = true;
typedef Type item_t;
static constexpr unsigned item_size = hb_static_size (Type);
using array_t = typename std::conditional<sorted, hb_sorted_array_t<Type>, hb_array_t<Type>>::type;
@ -268,10 +270,9 @@ struct hb_vector_t
}
return new_array;
}
/* Specialization for hb_vector_t<hb_{vector,array}_t<U>> to speed up. */
/* Specialization for types that can be moved using realloc(). */
template <typename T = Type,
hb_enable_if (hb_is_same (T, hb_vector_t<typename T::item_t>) ||
hb_is_same (T, hb_array_t <typename T::item_t>))>
hb_enable_if (T::realloc_move)>
Type *
realloc_vector (unsigned new_allocated, hb_priority<1>)
{

View file

@ -47,20 +47,20 @@ HB_BEGIN_DECLS
*
* The minor component of the library version available at compile-time.
*/
#define HB_VERSION_MINOR 2
#define HB_VERSION_MINOR 3
/**
* HB_VERSION_MICRO:
*
* The micro component of the library version available at compile-time.
*/
#define HB_VERSION_MICRO 2
#define HB_VERSION_MICRO 0
/**
* HB_VERSION_STRING:
*
* A string literal containing the library version available at compile-time.
*/
#define HB_VERSION_STRING "8.2.2"
#define HB_VERSION_STRING "8.3.0"
/**
* HB_VERSION_ATLEAST: