Merge pull request #70173 from bruvzg/hb600

HarfBuzz: Update to version 6.0.0
This commit is contained in:
Rémi Verschelde 2022-12-17 12:23:09 +01:00
commit 1e2297f13a
No known key found for this signature in database
GPG Key ID: C3336907360768E1
152 changed files with 6779 additions and 4547 deletions

View File

@ -230,7 +230,7 @@ Files extracted from upstream source:
## harfbuzz
- Upstream: https://github.com/harfbuzz/harfbuzz
- Version: 5.3.1 (970321db7bddbe8c579b73751fc655a924ea3ce6, 2022)
- Version: 6.0.0 (afcae83a064843d71d47624bc162e121cc56c08b, 2022)
- License: MIT
Files extracted from upstream source:

View File

@ -49,7 +49,7 @@ struct Coverage
HBUINT16 format; /* Format identifier */
CoverageFormat1_3<SmallTypes> format1;
CoverageFormat2_4<SmallTypes> format2;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
CoverageFormat1_3<MediumTypes>format3;
CoverageFormat2_4<MediumTypes>format4;
#endif
@ -65,7 +65,7 @@ struct Coverage
{
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return_trace (u.format3.sanitize (c));
case 4: return_trace (u.format4.sanitize (c));
#endif
@ -74,10 +74,8 @@ struct Coverage
}
/* Has interface. */
static constexpr unsigned SENTINEL = NOT_COVERED;
typedef unsigned int value_t;
value_t operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
unsigned operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k] != NOT_COVERED; }
/* Predicate. */
bool operator () (hb_codepoint_t k) const { return has (k); }
@ -87,7 +85,7 @@ struct Coverage
switch (u.format) {
case 1: return u.format1.get_coverage (glyph_id);
case 2: return u.format2.get_coverage (glyph_id);
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.get_coverage (glyph_id);
case 4: return u.format4.get_coverage (glyph_id);
#endif
@ -100,7 +98,7 @@ struct Coverage
switch (u.format) {
case 1: return u.format1.get_population ();
case 2: return u.format2.get_population ();
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.get_population ();
case 4: return u.format4.get_population ();
#endif
@ -127,7 +125,7 @@ struct Coverage
}
u.format = count <= num_ranges * 3 ? 1 : 2;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
if (count && last > 0xFFFFu)
u.format += 2;
#endif
@ -136,7 +134,7 @@ struct Coverage
{
case 1: return_trace (u.format1.serialize (c, glyphs));
case 2: return_trace (u.format2.serialize (c, glyphs));
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return_trace (u.format3.serialize (c, glyphs));
case 4: return_trace (u.format4.serialize (c, glyphs));
#endif
@ -166,7 +164,7 @@ struct Coverage
{
case 1: return u.format1.intersects (glyphs);
case 2: return u.format2.intersects (glyphs);
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.intersects (glyphs);
case 4: return u.format4.intersects (glyphs);
#endif
@ -179,7 +177,7 @@ struct Coverage
{
case 1: return u.format1.intersects_coverage (glyphs, index);
case 2: return u.format2.intersects_coverage (glyphs, index);
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.intersects_coverage (glyphs, index);
case 4: return u.format4.intersects_coverage (glyphs, index);
#endif
@ -196,7 +194,7 @@ struct Coverage
{
case 1: return u.format1.collect_coverage (glyphs);
case 2: return u.format2.collect_coverage (glyphs);
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.collect_coverage (glyphs);
case 4: return u.format4.collect_coverage (glyphs);
#endif
@ -212,7 +210,7 @@ struct Coverage
{
case 1: return u.format1.intersect_set (glyphs, intersect_glyphs);
case 2: return u.format2.intersect_set (glyphs, intersect_glyphs);
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.intersect_set (glyphs, intersect_glyphs);
case 4: return u.format4.intersect_set (glyphs, intersect_glyphs);
#endif
@ -225,13 +223,13 @@ struct Coverage
static constexpr bool is_sorted_iterator = true;
iter_t (const Coverage &c_ = Null (Coverage))
{
memset (this, 0, sizeof (*this));
hb_memset (this, 0, sizeof (*this));
format = c_.u.format;
switch (format)
{
case 1: u.format1.init (c_.u.format1); return;
case 2: u.format2.init (c_.u.format2); return;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: u.format3.init (c_.u.format3); return;
case 4: u.format4.init (c_.u.format4); return;
#endif
@ -244,7 +242,7 @@ struct Coverage
{
case 1: return u.format1.__more__ ();
case 2: return u.format2.__more__ ();
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.__more__ ();
case 4: return u.format4.__more__ ();
#endif
@ -257,7 +255,7 @@ struct Coverage
{
case 1: u.format1.__next__ (); break;
case 2: u.format2.__next__ (); break;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: u.format3.__next__ (); break;
case 4: u.format4.__next__ (); break;
#endif
@ -273,7 +271,7 @@ struct Coverage
{
case 1: return u.format1.get_glyph ();
case 2: return u.format2.get_glyph ();
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.get_glyph ();
case 4: return u.format4.get_glyph ();
#endif
@ -287,7 +285,7 @@ struct Coverage
{
case 1: return u.format1 != o.u.format1;
case 2: return u.format2 != o.u.format2;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return u.format3 != o.u.format3;
case 4: return u.format4 != o.u.format4;
#endif
@ -302,7 +300,7 @@ struct Coverage
{
case 1: it.u.format1 = u.format1.__end__ (); break;
case 2: it.u.format2 = u.format2.__end__ (); break;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: it.u.format3 = u.format3.__end__ (); break;
case 4: it.u.format4 = u.format4.__end__ (); break;
#endif
@ -314,7 +312,7 @@ struct Coverage
private:
unsigned int format;
union {
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
CoverageFormat2_4<MediumTypes>::iter_t format4; /* Put this one first since it's larger; helps shut up compiler. */
CoverageFormat1_3<MediumTypes>::iter_t format3;
#endif

View File

@ -77,7 +77,14 @@ struct CoverageFormat1_3
bool intersects (const hb_set_t *glyphs) const
{
/* TODO Speed up, using hb_set_next() and bsearch()? */
if (glyphArray.len > glyphs->get_population () * hb_bit_storage ((unsigned) glyphArray.len) / 2)
{
for (hb_codepoint_t g = HB_SET_VALUE_INVALID; glyphs->next (&g);)
if (get_coverage (g) != NOT_COVERED)
return true;
return false;
}
for (const auto& g : glyphArray.as_array ())
if (glyphs->has (g))
return true;

View File

@ -80,8 +80,6 @@ struct CoverageFormat2_4
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (this))) return_trace (false);
/* TODO(iter) Write more efficiently? */
unsigned num_ranges = 0;
hb_codepoint_t last = (hb_codepoint_t) -2;
for (auto g: glyphs)
@ -115,26 +113,22 @@ struct CoverageFormat2_4
bool intersects (const hb_set_t *glyphs) const
{
if (rangeRecord.len > glyphs->get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2)
{
for (hb_codepoint_t g = HB_SET_VALUE_INVALID; glyphs->next (&g);)
if (get_coverage (g) != NOT_COVERED)
return true;
return false;
}
return hb_any (+ hb_iter (rangeRecord)
| hb_map ([glyphs] (const RangeRecord<Types> &range) { return range.intersects (*glyphs); }));
}
bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
{
auto cmp = [] (const void *pk, const void *pr) -> int
{
unsigned index = * (const unsigned *) pk;
const RangeRecord<Types> &range = * (const RangeRecord<Types> *) pr;
if (index < range.value) return -1;
if (index > (unsigned int) range.value + (range.last - range.first)) return +1;
return 0;
};
auto arr = rangeRecord.as_array ();
unsigned idx;
if (hb_bsearch_impl (&idx, index,
arr.arrayZ, arr.length, sizeof (arr[0]),
(int (*)(const void *_key, const void *_item)) cmp))
return arr.arrayZ[idx].intersects (*glyphs);
auto *range = rangeRecord.as_array ().bsearch (index);
if (range)
return range->intersects (*glyphs);
return false;
}
@ -142,9 +136,14 @@ struct CoverageFormat2_4
hb_requires (hb_is_sink_of (IterableOut, hb_codepoint_t))>
void intersect_set (const hb_set_t &glyphs, IterableOut&& intersect_glyphs) const
{
/* Break out of loop for overlapping, broken, tables,
* to avoid fuzzer timouts. */
hb_codepoint_t last = 0;
for (const auto& range : rangeRecord)
{
hb_codepoint_t last = range.last;
if (unlikely (range.first < last))
break;
last = range.last;
for (hb_codepoint_t g = range.first - 1;
glyphs.next (&g) && g <= last;)
intersect_glyphs << g;

View File

@ -13,7 +13,7 @@ struct MarkBasePos
union {
HBUINT16 format; /* Format identifier */
MarkBasePosFormat1_2<SmallTypes> format1;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
MarkBasePosFormat1_2<MediumTypes> format2;
#endif
} u;
@ -26,7 +26,7 @@ struct MarkBasePos
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());

View File

@ -13,7 +13,7 @@ struct MarkLigPos
union {
HBUINT16 format; /* Format identifier */
MarkLigPosFormat1_2<SmallTypes> format1;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
MarkLigPosFormat1_2<MediumTypes> format2;
#endif
} u;
@ -26,7 +26,7 @@ struct MarkLigPos
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());

View File

@ -13,7 +13,7 @@ struct MarkMarkPos
union {
HBUINT16 format; /* Format identifier */
MarkMarkPosFormat1_2<SmallTypes> format1;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
MarkMarkPosFormat1_2<MediumTypes> format2;
#endif
} u;
@ -26,7 +26,7 @@ struct MarkMarkPos
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());

View File

@ -15,7 +15,7 @@ struct PairPos
HBUINT16 format; /* Format identifier */
PairPosFormat1_3<SmallTypes> format1;
PairPosFormat2_4<SmallTypes> format2;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
PairPosFormat1_3<MediumTypes> format3;
PairPosFormat2_4<MediumTypes> format4;
#endif
@ -30,7 +30,7 @@ struct PairPos
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
case 4: return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
#endif

View File

@ -51,8 +51,21 @@ struct PairPosFormat1_3
bool intersects (const hb_set_t *glyphs) const
{
auto &cov = this+coverage;
if (pairSet.len > glyphs->get_population () * hb_bit_storage ((unsigned) pairSet.len) / 4)
{
for (hb_codepoint_t g = HB_SET_VALUE_INVALID; glyphs->next (&g);)
{
unsigned i = cov.get_coverage (g);
if ((this+pairSet[i]).intersects (glyphs, valueFormat))
return true;
}
return false;
}
return
+ hb_zip (this+coverage, pairSet)
+ hb_zip (cov, pairSet)
| hb_filter (*glyphs, hb_first)
| hb_map (hb_second)
| hb_map ([glyphs, this] (const typename Types::template OffsetTo<PairSet> &_)
@ -171,12 +184,16 @@ struct PairPosFormat1_3
unsigned format1 = 0;
unsigned format2 = 0;
for (const auto & _ :
+ hb_zip (this+coverage, pairSet) | hb_filter (glyphset, hb_first) | hb_map (hb_second))
+ hb_zip (this+coverage, pairSet)
| hb_filter (glyphset, hb_first)
| hb_map (hb_second)
)
{
const PairSet& set = (this + _);
const PairValueRecord *record = &set.firstPairValueRecord;
for (unsigned i = 0; i < set.len; i++)
unsigned count = set.len;
for (unsigned i = 0; i < count; i++)
{
if (record->intersects (glyphset))
{
@ -185,6 +202,9 @@ struct PairPosFormat1_3
}
record = &StructAtOffset<const PairValueRecord> (record, record_size);
}
if (format1 == valueFormat[0] && format2 == valueFormat[1])
break;
}
return hb_pair (format1, format2);

View File

@ -220,17 +220,25 @@ struct PairPosFormat2_4
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
c->buffer->message (c->font,
"kerning glyphs at %d,%d",
"try kerning glyphs at %d,%d",
c->buffer->idx, skippy_iter.idx);
}
applied_first = valueFormat1.apply_value (c, this, v, buffer->cur_pos());
applied_second = valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]);
if (applied_first || applied_second)
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
c->buffer->message (c->font,
"kerned glyphs at %d,%d",
c->buffer->idx, skippy_iter.idx);
}
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
c->buffer->message (c->font,
"kerned glyphs at %d,%d",
"tried kerning glyphs at %d,%d",
c->buffer->idx, skippy_iter.idx);
}
@ -241,10 +249,15 @@ struct PairPosFormat2_4
boring:
buffer->unsafe_to_concat (buffer->idx, skippy_iter.idx + 1);
if (len2)
{
skippy_iter.idx++;
// https://github.com/harfbuzz/harfbuzz/issues/3824
// https://github.com/harfbuzz/harfbuzz/issues/3888#issuecomment-1326781116
buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
}
buffer->idx = skippy_iter.idx;
if (len2)
buffer->idx++;
return_trace (true);
}
@ -309,6 +322,7 @@ struct PairPosFormat2_4
{
unsigned len1 = valueFormat1.get_len ();
unsigned len2 = valueFormat2.get_len ();
unsigned record_size = len1 + len2;
unsigned format1 = 0;
unsigned format2 = 0;
@ -317,10 +331,13 @@ struct PairPosFormat2_4
{
for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
{
unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * record_size;
format1 = format1 | valueFormat1.get_effective_format (&values[idx]);
format2 = format2 | valueFormat2.get_effective_format (&values[idx + len1]);
}
if (format1 == valueFormat1 && format2 == valueFormat2)
break;
}
return hb_pair (format1, format2);

View File

@ -112,24 +112,38 @@ struct PairSet
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
c->buffer->message (c->font,
"kerning glyphs at %d,%d",
"try kerning glyphs at %d,%d",
c->buffer->idx, pos);
}
bool applied_first = valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos());
bool applied_second = valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]);
if (applied_first || applied_second)
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
c->buffer->message (c->font,
"kerned glyphs at %d,%d",
c->buffer->idx, pos);
}
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
c->buffer->message (c->font,
"kerned glyphs at %d,%d",
"tried kerning glyphs at %d,%d",
c->buffer->idx, pos);
}
if (applied_first || applied_second)
buffer->unsafe_to_break (buffer->idx, pos + 1);
if (len2)
pos++;
{
pos++;
// https://github.com/harfbuzz/harfbuzz/issues/3824
// https://github.com/harfbuzz/harfbuzz/issues/3888#issuecomment-1326781116
buffer->unsafe_to_break (buffer->idx, pos + 1);
}
buffer->idx = pos;
return_trace (true);

View File

@ -84,7 +84,7 @@ struct AlternateSet
{
if (alternates.len && alternate_count)
{
+ alternates.sub_array (start_offset, alternate_count)
+ alternates.as_array ().sub_array (start_offset, alternate_count)
| hb_sink (hb_array (alternate_glyphs, *alternate_count))
;
}

View File

@ -14,7 +14,7 @@ struct AlternateSubst
union {
HBUINT16 format; /* Format identifier */
AlternateSubstFormat1_2<SmallTypes> format1;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
AlternateSubstFormat1_2<MediumTypes> format2;
#endif
} u;
@ -27,7 +27,7 @@ struct AlternateSubst
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());

View File

@ -118,7 +118,7 @@ struct Ligature
match_positions[i] += delta;
if (i)
*p++ = ',';
snprintf (p, sizeof(buf), "%u", match_positions[i]);
snprintf (p, sizeof(buf) - (p - buf), "%u", match_positions[i]);
p += strlen(p);
}

View File

@ -14,7 +14,7 @@ struct LigatureSubst
union {
HBUINT16 format; /* Format identifier */
LigatureSubstFormat1_2<SmallTypes> format1;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
LigatureSubstFormat1_2<MediumTypes> format2;
#endif
} u;
@ -27,7 +27,7 @@ struct LigatureSubst
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());

View File

@ -14,7 +14,7 @@ struct MultipleSubst
union {
HBUINT16 format; /* Format identifier */
MultipleSubstFormat1_2<SmallTypes> format1;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
MultipleSubstFormat1_2<MediumTypes> format2;
#endif
} u;
@ -28,7 +28,7 @@ struct MultipleSubst
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());

View File

@ -117,7 +117,7 @@ struct Sequence
{
if (buf < p)
*p++ = ',';
snprintf (p, sizeof(buf), "%u", i);
snprintf (p, sizeof(buf) - (p - buf), "%u", i);
p += strlen(p);
}

View File

@ -16,7 +16,7 @@ struct SingleSubst
HBUINT16 format; /* Format identifier */
SingleSubstFormat1_3<SmallTypes> format1;
SingleSubstFormat2_4<SmallTypes> format2;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
SingleSubstFormat1_3<MediumTypes> format3;
SingleSubstFormat2_4<MediumTypes> format4;
#endif
@ -32,7 +32,7 @@ struct SingleSubst
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
case 4: return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
#endif
@ -55,7 +55,7 @@ struct SingleSubst
format = 1;
hb_codepoint_t mask = 0xFFFFu;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
if (+ glyphs
| hb_map_retains_sorting (hb_first)
| hb_filter ([] (hb_codepoint_t gid) { return gid > 0xFFFFu; }))
@ -78,7 +78,7 @@ struct SingleSubst
| hb_map_retains_sorting (hb_first),
delta));
case 2: return_trace (u.format2.serialize (c, glyphs));
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: return_trace (u.format3.serialize (c,
+ glyphs
| hb_map_retains_sorting (hb_first),

View File

@ -57,7 +57,7 @@ struct SingleSubstFormat1_3
hb_codepoint_t max_before = intersection.get_max ();
hb_codepoint_t min_after = (min_before + d) & mask;
hb_codepoint_t max_after = (max_before + d) & mask;
if (pop >= max_before - min_before &&
if (intersection.get_population () == max_before - min_before + 1 &&
((min_before <= min_after && min_after <= max_before) ||
(min_before <= max_after && max_after <= max_before)))
return;

View File

@ -36,8 +36,24 @@ struct SingleSubstFormat2_4
void closure (hb_closure_context_t *c) const
{
+ hb_zip (this+coverage, substitute)
| hb_filter (c->parent_active_glyphs (), hb_first)
auto &cov = this+coverage;
auto &glyph_set = c->parent_active_glyphs ();
if (substitute.len > glyph_set.get_population () * 4)
{
for (auto g : glyph_set)
{
unsigned i = cov.get_coverage (g);
if (i == NOT_COVERED || i >= substitute.len)
continue;
c->output->add (substitute.arrayZ[i]);
}
return;
}
+ hb_zip (cov, substitute)
| hb_filter (glyph_set, hb_first)
| hb_map (hb_second)
| hb_sink (c->output)
;

View File

@ -3,6 +3,7 @@
#include "../../hb-open-type.hh"
#include "composite-iter.hh"
namespace OT {
@ -121,7 +122,7 @@ struct CompositeGlyphRecord
if (flags & ARG_1_AND_2_ARE_WORDS)
{
// no overflow, copy and update value with deltas
memcpy (out, this, len);
hb_memcpy (out, this, len);
const HBINT16 *px = reinterpret_cast<const HBINT16 *> (p);
HBINT16 *o = reinterpret_cast<HBINT16 *> (out + len_before_val);
@ -135,7 +136,7 @@ struct CompositeGlyphRecord
if (new_x <= 127 && new_x >= -128 &&
new_y <= 127 && new_y >= -128)
{
memcpy (out, this, len);
hb_memcpy (out, this, len);
HBINT8 *o = reinterpret_cast<HBINT8 *> (out + len_before_val);
o[0] = new_x;
o[1] = new_y;
@ -143,7 +144,7 @@ struct CompositeGlyphRecord
else
{
// int8 overflows after deltas applied
memcpy (out, this, len_before_val);
hb_memcpy (out, this, len_before_val);
//update flags
CompositeGlyphRecord *o = reinterpret_cast<CompositeGlyphRecord *> (out);
@ -152,14 +153,14 @@ struct CompositeGlyphRecord
HBINT16 new_value;
new_value = new_x;
memcpy (out, &new_value, HBINT16::static_size);
hb_memcpy (out, &new_value, HBINT16::static_size);
out += HBINT16::static_size;
new_value = new_y;
memcpy (out, &new_value, HBINT16::static_size);
hb_memcpy (out, &new_value, HBINT16::static_size);
out += HBINT16::static_size;
memcpy (out, p+2, len - len_before_val - 2);
hb_memcpy (out, p+2, len - len_before_val - 2);
len += 2;
}
}
@ -252,55 +253,7 @@ struct CompositeGlyphRecord
DEFINE_SIZE_MIN (4);
};
struct composite_iter_t : hb_iter_with_fallback_t<composite_iter_t, const CompositeGlyphRecord &>
{
typedef const CompositeGlyphRecord *__item_t__;
composite_iter_t (hb_bytes_t glyph_, __item_t__ current_) :
glyph (glyph_), current (nullptr), current_size (0)
{
set_current (current_);
}
composite_iter_t () : glyph (hb_bytes_t ()), current (nullptr), current_size (0) {}
item_t __item__ () const { return *current; }
bool __more__ () const { return current; }
void __next__ ()
{
if (!current->has_more ()) { current = nullptr; return; }
set_current (&StructAtOffset<CompositeGlyphRecord> (current, current_size));
}
composite_iter_t __end__ () const { return composite_iter_t (); }
bool operator != (const composite_iter_t& o) const
{ return current != o.current; }
void set_current (__item_t__ current_)
{
if (!glyph.check_range (current_, CompositeGlyphRecord::min_size))
{
current = nullptr;
current_size = 0;
return;
}
unsigned size = current_->get_size ();
if (!glyph.check_range (current_, size))
{
current = nullptr;
current_size = 0;
return;
}
current = current_;
current_size = size;
}
private:
hb_bytes_t glyph;
__item_t__ current;
unsigned current_size;
};
using composite_iter_t = composite_iter_tmpl<CompositeGlyphRecord>;
struct CompositeGlyph
{
@ -382,7 +335,7 @@ struct CompositeGlyph
unsigned comp_len = component.get_size ();
if (component.is_anchored ())
{
memcpy (p, &component, comp_len);
hb_memcpy (p, &component, comp_len);
p += comp_len;
}
else
@ -398,7 +351,7 @@ struct CompositeGlyph
if (source_len > source_comp_len)
{
unsigned instr_len = source_len - source_comp_len;
memcpy (p, (const char *)c + source_comp_len, instr_len);
hb_memcpy (p, (const char *)c + source_comp_len, instr_len);
p += instr_len;
}

View File

@ -7,6 +7,8 @@
#include "GlyphHeader.hh"
#include "SimpleGlyph.hh"
#include "CompositeGlyph.hh"
#include "VarCompositeGlyph.hh"
#include "coord-setter.hh"
namespace OT {
@ -16,6 +18,11 @@ struct glyf_accelerator_t;
namespace glyf_impl {
#ifndef HB_GLYF_MAX_POINTS
#define HB_GLYF_MAX_POINTS 10000
#endif
enum phantom_point_index_t
{
PHANTOM_LEFT = 0,
@ -27,7 +34,7 @@ enum phantom_point_index_t
struct Glyph
{
enum glyph_type_t { EMPTY, SIMPLE, COMPOSITE };
enum glyph_type_t { EMPTY, SIMPLE, COMPOSITE, VAR_COMPOSITE };
public:
composite_iter_t get_composite_iterator () const
@ -35,6 +42,11 @@ struct Glyph
if (type != COMPOSITE) return composite_iter_t ();
return CompositeGlyph (*header, bytes).iter ();
}
var_composite_iter_t get_var_composite_iterator () const
{
if (type != VAR_COMPOSITE) return var_composite_iter_t ();
return VarCompositeGlyph (*header, bytes).iter ();
}
const hb_bytes_t trim_padding () const
{
@ -108,25 +120,25 @@ struct Glyph
if (unlikely (!glyph_header)) return false;
}
int xMin = 0, xMax = 0;
int yMin = 0, yMax = 0;
float xMin = 0, xMax = 0;
float yMin = 0, yMax = 0;
if (all_points.length > 4)
{
xMin = xMax = roundf (all_points[0].x);
yMin = yMax = roundf (all_points[0].y);
xMin = xMax = all_points[0].x;
yMin = yMax = all_points[0].y;
}
for (unsigned i = 1; i < all_points.length - 4; i++)
{
float rounded_x = roundf (all_points[i].x);
float rounded_y = roundf (all_points[i].y);
xMin = hb_min (xMin, rounded_x);
xMax = hb_max (xMax, rounded_x);
yMin = hb_min (yMin, rounded_y);
yMax = hb_max (yMax, rounded_y);
float x = all_points[i].x;
float y = all_points[i].y;
xMin = hb_min (xMin, x);
xMax = hb_max (xMax, x);
yMin = hb_min (yMin, y);
yMax = hb_max (yMax, y);
}
update_mtx (plan, xMin, yMax, all_points);
update_mtx (plan, roundf (xMin), roundf (yMax), all_points);
/*for empty glyphs: all_points only include phantom points.
*just update metrics and then return */
@ -134,10 +146,10 @@ struct Glyph
return true;
glyph_header->numberOfContours = header->numberOfContours;
glyph_header->xMin = xMin;
glyph_header->yMin = yMin;
glyph_header->xMax = xMax;
glyph_header->yMax = yMax;
glyph_header->xMin = roundf (xMin);
glyph_header->yMin = roundf (yMin);
glyph_header->xMax = roundf (xMax);
glyph_header->yMax = roundf (yMax);
dest_bytes = hb_bytes_t ((const char *)glyph_header, GlyphHeader::static_size);
return true;
@ -199,15 +211,24 @@ struct Glyph
bool shift_points_hori = true,
bool use_my_metrics = true,
bool phantom_only = false,
hb_array_t<int> coords = hb_array_t<int> (),
unsigned int depth = 0) const
{
if (unlikely (depth > HB_MAX_NESTING_LEVEL)) return false;
if (!coords)
coords = hb_array (font->coords, font->num_coords);
contour_point_vector_t stack_points;
bool inplace = type == SIMPLE && all_points.length == 0;
/* Load into all_points if it's empty, as an optimization. */
contour_point_vector_t &points = inplace ? all_points : stack_points;
switch (type) {
case SIMPLE:
if (unlikely (!SimpleGlyph (*header, bytes).get_contour_points (points, phantom_only)))
return false;
break;
case COMPOSITE:
{
/* pseudo component points for each component in composite glyph */
@ -215,20 +236,25 @@ struct Glyph
if (unlikely (!points.resize (num_points))) return false;
break;
}
case SIMPLE:
if (unlikely (!SimpleGlyph (*header, bytes).get_contour_points (points, phantom_only)))
return false;
#ifndef HB_NO_VAR_COMPOSITES
case VAR_COMPOSITE:
{
for (auto &item : get_var_composite_iterator ())
if (unlikely (!item.get_points (points))) return false;
}
#endif
default:
break;
}
/* Init phantom points */
if (unlikely (!points.resize (points.length + PHANTOM_COUNT))) return false;
hb_array_t<contour_point_t> phantoms = points.sub_array (points.length - PHANTOM_COUNT, PHANTOM_COUNT);
hb_array_t<contour_point_t> phantoms = points.as_array ().sub_array (points.length - PHANTOM_COUNT, PHANTOM_COUNT);
{
int lsb = 0;
int h_delta = glyf_accelerator.hmtx->get_leading_bearing_without_var_unscaled (gid, &lsb) ?
(int) header->xMin - lsb : 0;
int tsb = 0;
HB_UNUSED int tsb = 0;
int v_orig = (int) header->yMax +
#ifndef HB_NO_VERTICAL
((void) glyf_accelerator.vmtx->get_leading_bearing_without_var_unscaled (gid, &tsb), tsb)
@ -257,7 +283,9 @@ struct Glyph
}
#ifndef HB_NO_VAR
glyf_accelerator.gvar->apply_deltas_to_points (gid, font, points.as_array ());
glyf_accelerator.gvar->apply_deltas_to_points (gid,
coords,
points.as_array ());
#endif
// mainly used by CompositeGlyph calculating new X/Y offset value so no need to extend it
@ -283,11 +311,24 @@ struct Glyph
for (auto &item : get_composite_iterator ())
{
comp_points.reset ();
if (unlikely (!glyf_accelerator.glyph_for_gid (item.get_gid ())
.get_points (font, glyf_accelerator, comp_points,
deltas, shift_points_hori, use_my_metrics, phantom_only, depth + 1)))
.get_points (font,
glyf_accelerator,
comp_points,
deltas,
shift_points_hori,
use_my_metrics,
phantom_only,
coords,
depth + 1)))
return false;
/* Copy phantom points from component if USE_MY_METRICS flag set */
if (use_my_metrics && item.is_use_my_metrics ())
for (unsigned int i = 0; i < PHANTOM_COUNT; i++)
phantoms[i] = comp_points[comp_points.length - PHANTOM_COUNT + i];
/* Apply component transformation & translation */
item.transform_points (comp_points);
@ -308,20 +349,63 @@ struct Glyph
}
}
/* Copy phantom points from component if USE_MY_METRICS flag set */
if (use_my_metrics && item.is_use_my_metrics ())
for (unsigned int i = 0; i < PHANTOM_COUNT; i++)
phantoms[i] = comp_points[comp_points.length - PHANTOM_COUNT + i];
all_points.extend (comp_points.as_array ().sub_array (0, comp_points.length - PHANTOM_COUNT));
all_points.extend (comp_points.sub_array (0, comp_points.length - PHANTOM_COUNT));
if (all_points.length > HB_GLYF_MAX_POINTS)
return false;
comp_index++;
}
all_points.extend (phantoms);
} break;
#ifndef HB_NO_VAR_COMPOSITES
case VAR_COMPOSITE:
{
contour_point_vector_t comp_points;
hb_array_t<contour_point_t> points_left = points.as_array ();
for (auto &item : get_var_composite_iterator ())
{
hb_array_t<contour_point_t> record_points = points_left.sub_array (0, item.get_num_points ());
comp_points.reset ();
coord_setter_t coord_setter (coords);
item.set_variations (coord_setter, record_points);
if (unlikely (!glyf_accelerator.glyph_for_gid (item.get_gid ())
.get_points (font,
glyf_accelerator,
comp_points,
deltas,
shift_points_hori,
use_my_metrics,
phantom_only,
coord_setter.get_coords (),
depth + 1)))
return false;
/* Apply component transformation */
item.transform_points (record_points, comp_points);
/* Copy phantom points from component if USE_MY_METRICS flag set */
if (use_my_metrics && item.is_use_my_metrics ())
for (unsigned int i = 0; i < PHANTOM_COUNT; i++)
phantoms[i] = comp_points[comp_points.length - PHANTOM_COUNT + i];
all_points.extend (comp_points.as_array ().sub_array (0, comp_points.length - PHANTOM_COUNT));
if (all_points.length > HB_GLYF_MAX_POINTS)
return false;
points_left += item.get_num_points ();
}
all_points.extend (phantoms);
} break;
#endif
default:
all_points.extend (phantoms);
break;
}
if (depth == 0 && shift_points_hori) /* Apply at top level */
@ -360,6 +444,7 @@ struct Glyph
int num_contours = header->numberOfContours;
if (unlikely (num_contours == 0)) type = EMPTY;
else if (num_contours > 0) type = SIMPLE;
else if (num_contours == -2) type = VAR_COMPOSITE;
else type = COMPOSITE; /* negative numbers */
}

View File

@ -132,8 +132,8 @@ struct SimpleGlyph
if (unlikely (p + 1 > end)) return false;
unsigned int repeat_count = *p++;
unsigned stop = hb_min (i + repeat_count, count);
for (; i < stop;)
points_.arrayZ[i++].flag = flag;
for (; i < stop; i++)
points_.arrayZ[i].flag = flag;
}
}
return true;
@ -223,33 +223,34 @@ struct SimpleGlyph
if (value > 0) flag |= same_flag;
else value = -value;
coords.push ((uint8_t)value);
coords.arrayZ[coords.length++] = (uint8_t) value;
}
else
{
int16_t val = value;
coords.push (val >> 8);
coords.push (val & 0xff);
coords.arrayZ[coords.length++] = val >> 8;
coords.arrayZ[coords.length++] = val & 0xff;
}
}
static void encode_flag (uint8_t &flag,
uint8_t &repeat,
uint8_t &lastflag,
uint8_t lastflag,
hb_vector_t<uint8_t> &flags /* OUT */)
{
if (flag == lastflag && repeat != 255)
{
repeat = repeat + 1;
repeat++;
if (repeat == 1)
{
flags.push(flag);
/* We know there's room. */
flags.arrayZ[flags.length++] = flag;
}
else
{
unsigned len = flags.length;
flags[len-2] = flag | FLAG_REPEAT;
flags[len-1] = repeat;
flags.arrayZ[len-2] = flag | FLAG_REPEAT;
flags.arrayZ[len-1] = repeat;
}
}
else
@ -257,7 +258,6 @@ struct SimpleGlyph
repeat = 0;
flags.push (flag);
}
lastflag = flag;
}
bool compile_bytes_with_deltas (const contour_point_vector_t &all_points,
@ -269,7 +269,6 @@ struct SimpleGlyph
dest_bytes = hb_bytes_t ();
return true;
}
//convert absolute values to relative values
unsigned num_points = all_points.length - 4;
hb_vector_t<uint8_t> flags, x_coords, y_coords;
@ -277,23 +276,23 @@ struct SimpleGlyph
if (unlikely (!x_coords.alloc (2*num_points))) return false;
if (unlikely (!y_coords.alloc (2*num_points))) return false;
uint8_t lastflag = 0, repeat = 0;
int prev_x = 0.f, prev_y = 0.f;
uint8_t lastflag = 255, repeat = 0;
int prev_x = 0, prev_y = 0;
for (unsigned i = 0; i < num_points; i++)
{
uint8_t flag = all_points[i].flag;
uint8_t flag = all_points.arrayZ[i].flag;
flag &= FLAG_ON_CURVE + FLAG_OVERLAP_SIMPLE;
float cur_x = roundf (all_points[i].x);
float cur_y = roundf (all_points[i].y);
int cur_x = roundf (all_points.arrayZ[i].x);
int cur_y = roundf (all_points.arrayZ[i].y);
encode_coord (cur_x - prev_x, flag, FLAG_X_SHORT, FLAG_X_SAME, x_coords);
encode_coord (cur_y - prev_y, flag, FLAG_Y_SHORT, FLAG_Y_SAME, y_coords);
if (i == 0) lastflag = flag + 1; //make lastflag != flag for the first point
encode_flag (flag, repeat, lastflag, flags);
prev_x = cur_x;
prev_y = cur_y;
lastflag = flag;
}
unsigned len_before_instrs = 2 * header.numberOfContours + 2;
@ -303,29 +302,29 @@ struct SimpleGlyph
if (!no_hinting)
total_len += len_instrs;
char *p = (char *) hb_calloc (total_len, sizeof (char));
char *p = (char *) hb_malloc (total_len);
if (unlikely (!p)) return false;
const char *src = bytes.arrayZ + GlyphHeader::static_size;
char *cur = p;
memcpy (p, src, len_before_instrs);
hb_memcpy (p, src, len_before_instrs);
cur += len_before_instrs;
src += len_before_instrs;
if (!no_hinting)
{
memcpy (cur, src, len_instrs);
hb_memcpy (cur, src, len_instrs);
cur += len_instrs;
}
memcpy (cur, flags.arrayZ, flags.length);
hb_memcpy (cur, flags.arrayZ, flags.length);
cur += flags.length;
memcpy (cur, x_coords.arrayZ, x_coords.length);
hb_memcpy (cur, x_coords.arrayZ, x_coords.length);
cur += x_coords.length;
memcpy (cur, y_coords.arrayZ, y_coords.length);
hb_memcpy (cur, y_coords.arrayZ, y_coords.length);
dest_bytes = hb_bytes_t (p, total_len);
return true;

View File

@ -21,10 +21,18 @@ struct SubsetGlyph
bool serialize (hb_serialize_context_t *c,
bool use_short_loca,
const hb_subset_plan_t *plan) const
const hb_subset_plan_t *plan,
hb_font_t *font)
{
TRACE_SERIALIZE (this);
if (font)
{
const OT::glyf_accelerator_t &glyf = *font->face->table.glyf;
if (!this->compile_bytes_with_deltas (plan, font, glyf))
return_trace (false);
}
hb_bytes_t dest_glyph = dest_start.copy (c);
dest_glyph = hb_bytes_t (&dest_glyph, dest_glyph.length + dest_end.copy (c).length);
unsigned int pad_length = use_short_loca ? padding () : 0;

View File

@ -0,0 +1,353 @@
#ifndef OT_GLYF_VARCOMPOSITEGLYPH_HH
#define OT_GLYF_VARCOMPOSITEGLYPH_HH
#include "../../hb-open-type.hh"
#include "coord-setter.hh"
namespace OT {
namespace glyf_impl {
struct VarCompositeGlyphRecord
{
protected:
enum var_composite_glyph_flag_t
{
USE_MY_METRICS = 0x0001,
AXIS_INDICES_ARE_SHORT = 0x0002,
UNIFORM_SCALE = 0x0004,
HAVE_TRANSLATE_X = 0x0008,
HAVE_TRANSLATE_Y = 0x0010,
HAVE_ROTATION = 0x0020,
HAVE_SCALE_X = 0x0040,
HAVE_SCALE_Y = 0x0080,
HAVE_SKEW_X = 0x0100,
HAVE_SKEW_Y = 0x0200,
HAVE_TCENTER_X = 0x0400,
HAVE_TCENTER_Y = 0x0800,
GID_IS_24 = 0x1000,
AXES_HAVE_VARIATION = 0x2000,
};
public:
unsigned int get_size () const
{
unsigned int size = min_size;
unsigned axis_width = (flags & AXIS_INDICES_ARE_SHORT) ? 4 : 3;
size += numAxes * axis_width;
// gid
size += 2;
if (flags & GID_IS_24) size += 1;
if (flags & HAVE_TRANSLATE_X) size += 2;
if (flags & HAVE_TRANSLATE_Y) size += 2;
if (flags & HAVE_ROTATION) size += 2;
if (flags & HAVE_SCALE_X) size += 2;
if (flags & HAVE_SCALE_Y) size += 2;
if (flags & HAVE_SKEW_X) size += 2;
if (flags & HAVE_SKEW_Y) size += 2;
if (flags & HAVE_TCENTER_X) size += 2;
if (flags & HAVE_TCENTER_Y) size += 2;
return size;
}
bool has_more () const { return true; }
bool is_use_my_metrics () const { return flags & USE_MY_METRICS; }
hb_codepoint_t get_gid () const
{
if (flags & GID_IS_24)
return StructAfter<const HBGlyphID24> (numAxes);
else
return StructAfter<const HBGlyphID16> (numAxes);
}
unsigned get_numAxes () const
{
return numAxes;
}
unsigned get_num_points () const
{
unsigned num = 0;
if (flags & AXES_HAVE_VARIATION) num += numAxes;
if (flags & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y)) num++;
if (flags & HAVE_ROTATION) num++;
if (flags & (HAVE_SCALE_X | HAVE_SCALE_Y)) num++;
if (flags & (HAVE_SKEW_X | HAVE_SKEW_Y)) num++;
if (flags & (HAVE_TCENTER_X | HAVE_TCENTER_Y)) num++;
return num;
}
void transform_points (hb_array_t<contour_point_t> record_points,
contour_point_vector_t &points) const
{
float matrix[4];
contour_point_t trans;
get_transformation_from_points (record_points, matrix, trans);
points.transform (matrix);
points.translate (trans);
}
static inline void transform (float (&matrix)[4], contour_point_t &trans,
float (other)[6])
{
// https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L268
float xx1 = other[0];
float xy1 = other[1];
float yx1 = other[2];
float yy1 = other[3];
float dx1 = other[4];
float dy1 = other[5];
float xx2 = matrix[0];
float xy2 = matrix[1];
float yx2 = matrix[2];
float yy2 = matrix[3];
float dx2 = trans.x;
float dy2 = trans.y;
matrix[0] = xx1*xx2 + xy1*yx2;
matrix[1] = xx1*xy2 + xy1*yy2;
matrix[2] = yx1*xx2 + yy1*yx2;
matrix[3] = yx1*xy2 + yy1*yy2;
trans.x = xx2*dx1 + yx2*dy1 + dx2;
trans.y = xy2*dx1 + yy2*dy1 + dy2;
}
static void translate (float (&matrix)[4], contour_point_t &trans,
float translateX, float translateY)
{
// https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L213
float other[6] = {1.f, 0.f, 0.f, 1.f, translateX, translateY};
transform (matrix, trans, other);
}
static void scale (float (&matrix)[4], contour_point_t &trans,
float scaleX, float scaleY)
{
// https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L224
float other[6] = {scaleX, 0.f, 0.f, scaleY, 0.f, 0.f};
transform (matrix, trans, other);
}
static void rotate (float (&matrix)[4], contour_point_t &trans,
float rotation)
{
// https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L240
rotation = rotation * float (M_PI);
float c = cosf (rotation);
float s = sinf (rotation);
float other[6] = {c, s, -s, c, 0.f, 0.f};
transform (matrix, trans, other);
}
static void skew (float (&matrix)[4], contour_point_t &trans,
float skewX, float skewY)
{
// https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L255
skewX = skewX * float (M_PI);
skewY = skewY * float (M_PI);
float other[6] = {1.f, tanf (skewY), tanf (skewX), 1.f, 0.f, 0.f};
transform (matrix, trans, other);
}
bool get_points (contour_point_vector_t &points) const
{
float translateX = 0.f;
float translateY = 0.f;
float rotation = 0.f;
float scaleX = 1.f * (1 << 12);
float scaleY = 1.f * (1 << 12);
float skewX = 0.f;
float skewY = 0.f;
float tCenterX = 0.f;
float tCenterY = 0.f;
if (unlikely (!points.resize (points.length + get_num_points ()))) return false;
unsigned axis_width = (flags & AXIS_INDICES_ARE_SHORT) ? 2 : 1;
unsigned axes_size = numAxes * axis_width;
const F2DOT14 *q = (const F2DOT14 *) (axes_size +
(flags & GID_IS_24 ? 3 : 2) +
&StructAfter<const HBUINT8> (numAxes));
hb_array_t<contour_point_t> rec_points = points.as_array ().sub_array (points.length - get_num_points ());
unsigned count = numAxes;
if (flags & AXES_HAVE_VARIATION)
{
for (unsigned i = 0; i < count; i++)
rec_points[i].x = *q++;
rec_points += count;
}
else
q += count;
const HBUINT16 *p = (const HBUINT16 *) q;
if (flags & HAVE_TRANSLATE_X) translateX = * (const FWORD *) p++;
if (flags & HAVE_TRANSLATE_Y) translateY = * (const FWORD *) p++;
if (flags & HAVE_ROTATION) rotation = * (const F2DOT14 *) p++;
if (flags & HAVE_SCALE_X) scaleX = * (const F4DOT12 *) p++;
if (flags & HAVE_SCALE_Y) scaleY = * (const F4DOT12 *) p++;
if (flags & HAVE_SKEW_X) skewX = * (const F2DOT14 *) p++;
if (flags & HAVE_SKEW_Y) skewY = * (const F2DOT14 *) p++;
if (flags & HAVE_TCENTER_X) tCenterX = * (const FWORD *) p++;
if (flags & HAVE_TCENTER_Y) tCenterY = * (const FWORD *) p++;
if ((flags & UNIFORM_SCALE) && !(flags & HAVE_SCALE_Y))
scaleY = scaleX;
if (flags & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y))
{
rec_points[0].x = translateX;
rec_points[0].y = translateY;
rec_points++;
}
if (flags & HAVE_ROTATION)
{
rec_points[0].x = rotation;
rec_points++;
}
if (flags & (HAVE_SCALE_X | HAVE_SCALE_Y))
{
rec_points[0].x = scaleX;
rec_points[0].y = scaleY;
rec_points++;
}
if (flags & (HAVE_SKEW_X | HAVE_SKEW_Y))
{
rec_points[0].x = skewX;
rec_points[0].y = skewY;
rec_points++;
}
if (flags & (HAVE_TCENTER_X | HAVE_TCENTER_Y))
{
rec_points[0].x = tCenterX;
rec_points[0].y = tCenterY;
rec_points++;
}
assert (!rec_points);
return true;
}
void get_transformation_from_points (hb_array_t<contour_point_t> rec_points,
float (&matrix)[4], contour_point_t &trans) const
{
if (flags & AXES_HAVE_VARIATION)
rec_points += numAxes;
matrix[0] = matrix[3] = 1.f;
matrix[1] = matrix[2] = 0.f;
trans.init (0.f, 0.f);
float translateX = 0.f;
float translateY = 0.f;
float rotation = 0.f;
float scaleX = 1.f;
float scaleY = 1.f;
float skewX = 0.f;
float skewY = 0.f;
float tCenterX = 0.f;
float tCenterY = 0.f;
if (flags & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y))
{
translateX = rec_points[0].x;
translateY = rec_points[0].y;
rec_points++;
}
if (flags & HAVE_ROTATION)
{
rotation = rec_points[0].x / (1 << 14);
rec_points++;
}
if (flags & (HAVE_SCALE_X | HAVE_SCALE_Y))
{
scaleX = rec_points[0].x / (1 << 12);
scaleY = rec_points[0].y / (1 << 12);
rec_points++;
}
if (flags & (HAVE_SKEW_X | HAVE_SKEW_Y))
{
skewX = rec_points[0].x / (1 << 14);
skewY = rec_points[0].y / (1 << 14);
rec_points++;
}
if (flags & (HAVE_TCENTER_X | HAVE_TCENTER_Y))
{
tCenterX = rec_points[0].x;
tCenterY = rec_points[0].y;
rec_points++;
}
assert (!rec_points);
translate (matrix, trans, translateX + tCenterX, translateY + tCenterY);
rotate (matrix, trans, rotation);
scale (matrix, trans, scaleX, scaleY);
skew (matrix, trans, -skewX, skewY);
translate (matrix, trans, -tCenterX, -tCenterY);
}
void set_variations (coord_setter_t &setter,
hb_array_t<contour_point_t> rec_points) const
{
bool have_variations = flags & AXES_HAVE_VARIATION;
unsigned axis_width = (flags & AXIS_INDICES_ARE_SHORT) ? 2 : 1;
const HBUINT8 *p = (const HBUINT8 *) (((HBUINT8 *) &numAxes) + numAxes.static_size + (flags & GID_IS_24 ? 3 : 2));
const HBUINT16 *q = (const HBUINT16 *) (((HBUINT8 *) &numAxes) + numAxes.static_size + (flags & GID_IS_24 ? 3 : 2));
const F2DOT14 *a = (const F2DOT14 *) ((HBUINT8 *) (axis_width == 1 ? (p + numAxes) : (HBUINT8 *) (q + numAxes)));
unsigned count = numAxes;
for (unsigned i = 0; i < count; i++)
{
unsigned axis_index = axis_width == 1 ? (unsigned) *p++ : (unsigned) *q++;
signed v = have_variations ? rec_points[i].x : *a++;
v += setter[axis_index];
v = hb_clamp (v, -(1<<14), (1<<14));
setter[axis_index] = v;
}
}
protected:
HBUINT16 flags;
HBUINT8 numAxes;
public:
DEFINE_SIZE_MIN (3);
};
using var_composite_iter_t = composite_iter_tmpl<VarCompositeGlyphRecord>;
struct VarCompositeGlyph
{
const GlyphHeader &header;
hb_bytes_t bytes;
VarCompositeGlyph (const GlyphHeader &header_, hb_bytes_t bytes_) :
header (header_), bytes (bytes_) {}
var_composite_iter_t iter () const
{ return var_composite_iter_t (bytes, &StructAfter<VarCompositeGlyphRecord, GlyphHeader> (header)); }
};
} /* namespace glyf_impl */
} /* namespace OT */
#endif /* OT_GLYF_VARCOMPOSITEGLYPH_HH */

View File

@ -0,0 +1,68 @@
#ifndef OT_GLYF_COMPOSITE_ITER_HH
#define OT_GLYF_COMPOSITE_ITER_HH
#include "../../hb.hh"
namespace OT {
namespace glyf_impl {
template <typename CompositeGlyphRecord>
struct composite_iter_tmpl : hb_iter_with_fallback_t<composite_iter_tmpl<CompositeGlyphRecord>,
const CompositeGlyphRecord &>
{
typedef const CompositeGlyphRecord *__item_t__;
composite_iter_tmpl (hb_bytes_t glyph_, __item_t__ current_) :
glyph (glyph_), current (nullptr), current_size (0)
{
set_current (current_);
}
composite_iter_tmpl () : glyph (hb_bytes_t ()), current (nullptr), current_size (0) {}
const CompositeGlyphRecord & __item__ () const { return *current; }
bool __more__ () const { return current; }
void __next__ ()
{
if (!current->has_more ()) { current = nullptr; return; }
set_current (&StructAtOffset<CompositeGlyphRecord> (current, current_size));
}
composite_iter_tmpl __end__ () const { return composite_iter_tmpl (); }
bool operator != (const composite_iter_tmpl& o) const
{ return current != o.current; }
void set_current (__item_t__ current_)
{
if (!glyph.check_range (current_, CompositeGlyphRecord::min_size))
{
current = nullptr;
current_size = 0;
return;
}
unsigned size = current_->get_size ();
if (!glyph.check_range (current_, size))
{
current = nullptr;
current_size = 0;
return;
}
current = current_;
current_size = size;
}
private:
hb_bytes_t glyph;
__item_t__ current;
unsigned current_size;
};
} /* namespace glyf_impl */
} /* namespace OT */
#endif /* OT_GLYF_COMPOSITE_ITER_HH */

View File

@ -0,0 +1,34 @@
#ifndef OT_GLYF_COORD_SETTER_HH
#define OT_GLYF_COORD_SETTER_HH
#include "../../hb.hh"
namespace OT {
namespace glyf_impl {
struct coord_setter_t
{
coord_setter_t (hb_array_t<int> coords) :
coords (coords) {}
int& operator [] (unsigned idx)
{
if (coords.length < idx + 1)
coords.resize (idx + 1);
return coords[idx];
}
hb_array_t<int> get_coords ()
{ return coords.as_array (); }
hb_vector_t<int> coords;
};
} /* namespace glyf_impl */
} /* namespace OT */
#endif /* OT_GLYF_COORD_SETTER_HH */

View File

@ -42,11 +42,15 @@ struct glyf
bool serialize (hb_serialize_context_t *c,
Iterator it,
bool use_short_loca,
const hb_subset_plan_t *plan)
const hb_subset_plan_t *plan,
hb_font_t *font)
{
TRACE_SERIALIZE (this);
unsigned init_len = c->length ();
for (const auto &_ : it) _.serialize (c, use_short_loca, plan);
for (auto &_ : it)
if (unlikely (!_.serialize (c, use_short_loca, plan, font)))
return false;
/* As a special case when all glyph in the font are empty, add a zero byte
* to the table, so that OTS doesnt reject it, and to make the table work
@ -74,10 +78,11 @@ struct glyf
hb_vector_t<glyf_impl::SubsetGlyph> glyphs;
_populate_subset_glyphs (c->plan, glyphs);
hb_font_t *font = nullptr;
if (!c->plan->pinned_at_default)
{
if (!_compile_subset_glyphs_with_deltas (c->plan, &glyphs))
return_trace (false);
font = _create_font_for_instancing (c->plan);
if (unlikely (!font)) return false;
}
auto padded_offsets =
@ -85,11 +90,14 @@ struct glyf
| hb_map (&glyf_impl::SubsetGlyph::padded_size)
;
unsigned max_offset = + padded_offsets | hb_reduce (hb_add, 0);
bool use_short_loca = max_offset < 0x1FFFF;
bool use_short_loca = false;
if (likely (!c->plan->force_long_loca))
{
unsigned max_offset = + padded_offsets | hb_reduce (hb_add, 0);
use_short_loca = max_offset < 0x1FFFF;
}
glyf_prime->serialize (c->serializer, hb_iter (glyphs), use_short_loca, c->plan);
glyf_prime->serialize (c->serializer, glyphs.writer (), use_short_loca, c->plan, font);
if (!use_short_loca) {
padded_offsets =
+ hb_iter (glyphs)
@ -97,9 +105,12 @@ struct glyf
;
}
if (!c->plan->pinned_at_default)
if (font)
{
_free_compiled_subset_glyphs (&glyphs);
hb_font_destroy (font);
}
if (unlikely (c->serializer->in_error ())) return_trace (false);
return_trace (c->serializer->check_success (glyf_impl::_add_loca_and_head (c->plan,
padded_offsets,
@ -110,9 +121,8 @@ struct glyf
_populate_subset_glyphs (const hb_subset_plan_t *plan,
hb_vector_t<glyf_impl::SubsetGlyph> &glyphs /* OUT */) const;
bool
_compile_subset_glyphs_with_deltas (const hb_subset_plan_t *plan,
hb_vector_t<glyf_impl::SubsetGlyph> *glyphs /* OUT */) const;
hb_font_t *
_create_font_for_instancing (const hb_subset_plan_t *plan) const;
void _free_compiled_subset_glyphs (hb_vector_t<glyf_impl::SubsetGlyph> *glyphs) const
{
@ -394,7 +404,11 @@ glyf::_populate_subset_glyphs (const hb_subset_plan_t *plan,
plan->pinned_at_default)
subset_glyph.source_glyph = glyf_impl::Glyph ();
else
subset_glyph.source_glyph = glyf.glyph_for_gid (subset_glyph.old_gid, true);
{
/* If plan has an accelerator, the preprocessing step already trimmed glyphs.
* Don't trim them again! */
subset_glyph.source_glyph = glyf.glyph_for_gid (subset_glyph.old_gid, !plan->accelerator);
}
if (plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
subset_glyph.drop_hints_bytes ();
@ -403,17 +417,15 @@ glyf::_populate_subset_glyphs (const hb_subset_plan_t *plan,
}
}
inline bool
glyf::_compile_subset_glyphs_with_deltas (const hb_subset_plan_t *plan,
hb_vector_t<glyf_impl::SubsetGlyph> *glyphs /* OUT */) const
inline hb_font_t *
glyf::_create_font_for_instancing (const hb_subset_plan_t *plan) const
{
OT::glyf_accelerator_t glyf (plan->source);
hb_font_t *font = hb_font_create (plan->source);
if (unlikely (!font)) return false;
if (unlikely (font == hb_font_get_empty ())) return nullptr;
hb_vector_t<hb_variation_t> vars;
if (unlikely (!vars.alloc (plan->user_axes_location->get_population ())))
return false;
return nullptr;
for (auto _ : *plan->user_axes_location)
{
@ -423,18 +435,10 @@ glyf::_compile_subset_glyphs_with_deltas (const hb_subset_plan_t *plan,
vars.push (var);
}
#ifndef HB_NO_VAR
hb_font_set_variations (font, vars.arrayZ, plan->user_axes_location->get_population ());
for (auto& subset_glyph : *glyphs)
{
if (!const_cast<glyf_impl::SubsetGlyph &> (subset_glyph).compile_bytes_with_deltas (plan, font, glyf))
{
hb_font_destroy (font);
return false;
}
}
hb_font_destroy (font);
return true;
#endif
return font;
}

View File

@ -112,7 +112,7 @@ struct ClassDef : public OT::ClassDef
{
case 1: return ((ClassDefFormat1*)this)->sanitize (vertex);
case 2: return ((ClassDefFormat2*)this)->sanitize (vertex);
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
// Not currently supported
case 3:
case 4:

View File

@ -136,7 +136,7 @@ struct Coverage : public OT::Layout::Common::Coverage
{
case 1: return ((CoverageFormat1*)this)->sanitize (vertex);
case 2: return ((CoverageFormat2*)this)->sanitize (vertex);
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
// Not currently supported
case 3:
case 4:

View File

@ -49,6 +49,50 @@ struct graph_t
unsigned end = 0;
unsigned priority = 0;
bool link_positions_valid (unsigned num_objects, bool removed_nil)
{
hb_set_t assigned_bytes;
for (const auto& l : obj.real_links)
{
if (l.objidx >= num_objects
|| (removed_nil && !l.objidx))
{
DEBUG_MSG (SUBSET_REPACK, nullptr,
"Invalid graph. Invalid object index.");
return false;
}
unsigned start = l.position;
unsigned end = start + l.width - 1;
if (unlikely (l.width < 2 || l.width > 4))
{
DEBUG_MSG (SUBSET_REPACK, nullptr,
"Invalid graph. Invalid link width.");
return false;
}
if (unlikely (end >= table_size ()))
{
DEBUG_MSG (SUBSET_REPACK, nullptr,
"Invalid graph. Link position is out of bounds.");
return false;
}
if (unlikely (assigned_bytes.intersects (start, end)))
{
DEBUG_MSG (SUBSET_REPACK, nullptr,
"Invalid graph. Found offsets whose positions overlap.");
return false;
}
assigned_bytes.add_range (start, end);
}
return !assigned_bytes.in_error ();
}
void normalize ()
{
obj.real_links.qsort ();
@ -132,7 +176,7 @@ struct graph_t
for (unsigned i = 0; i < parents.length; i++)
{
if (parents[i] != parent_index) continue;
parents.remove (i);
parents.remove_unordered (i);
break;
}
}
@ -148,7 +192,7 @@ struct graph_t
if ((obj.head + link.position) != offset)
continue;
obj.real_links.remove (i);
obj.real_links.remove_unordered (i);
return;
}
}
@ -286,8 +330,6 @@ struct graph_t
vertices_scratch_.alloc (objects.length);
for (unsigned i = 0; i < objects.length; i++)
{
// TODO(grieger): check all links point to valid objects.
// If this graph came from a serialization buffer object 0 is the
// nil object. We don't need it for our purposes here so drop it.
if (i == 0 && !objects[i])
@ -299,6 +341,9 @@ struct graph_t
vertex_t* v = vertices_.push ();
if (check_success (!vertices_.in_error ()))
v->obj = *objects[i];
check_success (v->link_positions_valid (objects.length, removed_nil));
if (!removed_nil) continue;
// Fix indices to account for removed nil object.
for (auto& l : v->obj.all_links_writer ()) {
@ -418,6 +463,13 @@ struct graph_t
hb_swap (sorted_graph[new_id], vertices_[next_id]);
const vertex_t& next = sorted_graph[new_id];
if (unlikely (!check_success(new_id >= 0))) {
// We are out of ids. Which means we've visited a node more than once.
// This graph contains a cycle which is not allowed.
DEBUG_MSG (SUBSET_REPACK, nullptr, "Invalid graph. Contains cycle.");
return;
}
id_map[next_id] = new_id--;
for (const auto& link : next.obj.all_links ()) {
@ -580,7 +632,7 @@ struct graph_t
while (roots)
{
unsigned next = HB_SET_VALUE_INVALID;
uint32_t next = HB_SET_VALUE_INVALID;
if (unlikely (!check_success (!roots.in_error ()))) break;
if (!roots.next (&next)) break;
@ -661,8 +713,8 @@ struct graph_t
auto new_subgraph =
+ subgraph.keys ()
| hb_map([&] (unsigned node_idx) {
const unsigned *v;
| hb_map([&] (uint32_t node_idx) {
const uint32_t *v;
if (index_map.has (node_idx, &v)) return *v;
return node_idx;
})
@ -672,10 +724,10 @@ struct graph_t
remap_obj_indices (index_map, parents.iter (), true);
// Update roots set with new indices as needed.
unsigned next = HB_SET_VALUE_INVALID;
uint32_t next = HB_SET_VALUE_INVALID;
while (roots.next (&next))
{
const unsigned *v;
const uint32_t *v;
if (index_map.has (next, &v))
{
roots.del (next);
@ -690,7 +742,7 @@ struct graph_t
{
for (const auto& link : vertices_[node_idx].obj.all_links ())
{
const unsigned *v;
const uint32_t *v;
if (subgraph.has (link.objidx, &v))
{
subgraph.set (link.objidx, *v + 1);
@ -941,6 +993,72 @@ struct graph_t
return made_change;
}
bool is_fully_connected ()
{
update_parents();
if (root().parents)
// Root cannot have parents.
return false;
for (unsigned i = 0; i < root_idx (); i++)
{
if (!vertices_[i].parents)
return false;
}
return true;
}
#if 0
/*
* Saves the current graph to a packed binary format which the repacker fuzzer takes
* as a seed.
*/
void save_fuzzer_seed (hb_tag_t tag) const
{
FILE* f = fopen ("./repacker_fuzzer_seed", "w");
fwrite ((void*) &tag, sizeof (tag), 1, f);
uint16_t num_objects = vertices_.length;
fwrite ((void*) &num_objects, sizeof (num_objects), 1, f);
for (const auto& v : vertices_)
{
uint16_t blob_size = v.table_size ();
fwrite ((void*) &blob_size, sizeof (blob_size), 1, f);
fwrite ((const void*) v.obj.head, blob_size, 1, f);
}
uint16_t link_count = 0;
for (const auto& v : vertices_)
link_count += v.obj.real_links.length;
fwrite ((void*) &link_count, sizeof (link_count), 1, f);
typedef struct
{
uint16_t parent;
uint16_t child;
uint16_t position;
uint8_t width;
} link_t;
for (unsigned i = 0; i < vertices_.length; i++)
{
for (const auto& l : vertices_[i].obj.real_links)
{
link_t link {
(uint16_t) i, (uint16_t) l.objidx,
(uint16_t) l.position, (uint8_t) l.width
};
fwrite ((void*) &link, sizeof (link), 1, f);
}
}
fclose (f);
}
#endif
void print_orphaned_nodes ()
{
if (!DEBUG_ENABLED(SUBSET_REPACK)) return;
@ -949,6 +1067,10 @@ struct graph_t
parents_invalid = true;
update_parents();
if (root().parents) {
DEBUG_MSG (SUBSET_REPACK, nullptr, "Root node has incoming edges.");
}
for (unsigned i = 0; i < root_idx (); i++)
{
const auto& v = vertices_[i];
@ -1065,6 +1187,11 @@ struct graph_t
}
}
for (unsigned i = 0; i < vertices_.length; i++)
// parents arrays must be accurate or downstream operations like cycle detection
// and sorting won't work correctly.
check_success (!vertices_[i].parents.in_error ());
parents_invalid = false;
}
@ -1183,7 +1310,7 @@ struct graph_t
{
for (auto& link : vertices_[i].obj.all_links_writer ())
{
const unsigned *v;
const uint32_t *v;
if (!id_map.has (link.objidx, &v)) continue;
if (only_wide && !(link.width == 4 && !link.is_signed)) continue;

View File

@ -201,7 +201,7 @@ struct Lookup : public OT::Lookup
+ new_subtable_count * OT::Offset16::static_size;
char* buffer = (char*) hb_calloc (1, new_size);
c.add_buffer (buffer);
memcpy (buffer, v.obj.head, v.table_size());
hb_memcpy (buffer, v.obj.head, v.table_size());
v.obj.head = buffer;
v.obj.tail = buffer + new_size;
@ -355,7 +355,7 @@ struct GSTAR : public OT::GSUBGPOS
{
switch (u.version.major) {
case 1: return u.version1.get_lookup_list_offset ();
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 2: return u.version2.get_lookup_list_offset ();
#endif
default: return 0;
@ -374,7 +374,7 @@ struct GSTAR : public OT::GSUBGPOS
{
switch (u.version.major) {
case 1: find_lookups<SmallTypes> (graph, lookups); break;
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 2: find_lookups<MediumTypes> (graph, lookups); break;
#endif
}

View File

@ -112,7 +112,7 @@ struct AnchorMatrix : public OT::Layout::GPOS_impl::AnchorMatrix
auto& child = c.graph.vertices_[child_idx];
child.remove_parent (this_index);
o.real_links.remove (i);
o.real_links.remove_unordered (i);
num_links--;
i--;
}
@ -372,7 +372,7 @@ struct MarkBasePosFormat1 : public OT::Layout::GPOS_impl::MarkBasePosFormat1_2<S
if (!mark_coverage) return false;
hb_set_t marks = sc.marks_for (0, count);
auto new_coverage =
+ hb_zip (hb_range (), mark_coverage.table->iter ())
+ hb_enumerate (mark_coverage.table->iter ())
| hb_filter (marks, hb_first)
| hb_map_retains_sorting (hb_second)
;
@ -431,7 +431,7 @@ struct MarkBasePosFormat1 : public OT::Layout::GPOS_impl::MarkBasePosFormat1_2<S
if (!mark_coverage) return false;
hb_set_t marks = sc.marks_for (start, end);
auto new_coverage =
+ hb_zip (hb_range (), mark_coverage.table->iter ())
+ hb_enumerate (mark_coverage.table->iter ())
| hb_filter (marks, hb_first)
| hb_map_retains_sorting (hb_second)
;
@ -477,7 +477,7 @@ struct MarkBasePos : public OT::Layout::GPOS_impl::MarkBasePos
switch (u.format) {
case 1:
return ((MarkBasePosFormat1*)(&u.format1))->split_subtables (c, parent_index, this_index);
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 2: HB_FALLTHROUGH;
// Don't split 24bit PairPos's.
#endif
@ -494,7 +494,7 @@ struct MarkBasePos : public OT::Layout::GPOS_impl::MarkBasePos
switch (u.format) {
case 1:
return ((MarkBasePosFormat1*)(&u.format1))->sanitize (vertex);
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 2: HB_FALLTHROUGH;
#endif
default:

View File

@ -434,7 +434,7 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
char* start_addr = ((char*)&values[0]) + start * split_context.class1_record_size;
unsigned num_records = end - start;
memcpy (&pair_pos_prime->values[0],
hb_memcpy (&pair_pos_prime->values[0],
start_addr,
num_records * split_context.class1_record_size);
@ -611,7 +611,7 @@ struct PairPos : public OT::Layout::GPOS_impl::PairPos
return ((PairPosFormat1*)(&u.format1))->split_subtables (c, parent_index, this_index);
case 2:
return ((PairPosFormat2*)(&u.format2))->split_subtables (c, parent_index, this_index);
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: HB_FALLTHROUGH;
case 4: HB_FALLTHROUGH;
// Don't split 24bit PairPos's.
@ -631,7 +631,7 @@ struct PairPos : public OT::Layout::GPOS_impl::PairPos
return ((PairPosFormat1*)(&u.format1))->sanitize (vertex);
case 2:
return ((PairPosFormat2*)(&u.format2))->sanitize (vertex);
#ifndef HB_NO_BORING_EXPANSION
#ifndef HB_NO_BEYOND_64K
case 3: HB_FALLTHROUGH;
case 4: HB_FALLTHROUGH;
#endif

View File

@ -33,6 +33,23 @@ struct overflow_record_t
{
unsigned parent;
unsigned child;
bool operator != (const overflow_record_t o) const
{ return !(*this == o); }
inline bool operator == (const overflow_record_t& o) const
{
return parent == o.parent &&
child == o.child;
}
inline uint32_t hash () const
{
uint32_t current = 0;
current = current * 31 + hb_hash (parent);
current = current * 31 + hb_hash (child);
return current;
}
};
inline
@ -94,6 +111,7 @@ will_overflow (graph_t& graph,
if (overflows) overflows->resize (0);
graph.update_positions ();
hb_hashmap_t<overflow_record_t*, bool> record_set;
const auto& vertices = graph.vertices_;
for (int parent_idx = vertices.length - 1; parent_idx >= 0; parent_idx--)
{
@ -109,7 +127,10 @@ will_overflow (graph_t& graph,
overflow_record_t r;
r.parent = parent_idx;
r.child = link.objidx;
if (record_set.has(&r)) continue; // don't keep duplicate overflows.
overflows->push (r);
record_set.set(&r, true);
}
}
@ -223,7 +244,7 @@ inline hb_blob_t* serialize (const graph_t& graph)
return nullptr;
}
memcpy (start, vertices[i].obj.head, size);
hb_memcpy (start, vertices[i].obj.head, size);
// Only real links needs to be serialized.
for (const auto& link : vertices[i].obj.real_links)

View File

@ -131,14 +131,14 @@ struct RearrangementSubtable
hb_glyph_info_t *info = buffer->info;
hb_glyph_info_t buf[4];
memcpy (buf, info + start, l * sizeof (buf[0]));
memcpy (buf + 2, info + end - r, r * sizeof (buf[0]));
hb_memcpy (buf, info + start, l * sizeof (buf[0]));
hb_memcpy (buf + 2, info + end - r, r * sizeof (buf[0]));
if (l != r)
memmove (info + start + r, info + start + l, (end - start - l - r) * sizeof (buf[0]));
memcpy (info + start, buf + 2, r * sizeof (buf[0]));
memcpy (info + end - l, buf, l * sizeof (buf[0]));
hb_memcpy (info + start, buf + 2, r * sizeof (buf[0]));
hb_memcpy (info + end - l, buf, l * sizeof (buf[0]));
if (reverse_l)
{
buf[0] = info[end - 1];

View File

@ -289,7 +289,7 @@ is_deleted_glyph (const hb_glyph_info_t *info)
void
hb_aat_layout_remove_deleted_glyphs (hb_buffer_t *buffer)
{
hb_ot_layout_delete_glyphs_inplace (buffer, is_deleted_glyph);
buffer->delete_glyphs_inplace (is_deleted_glyph);
}
/**

View File

@ -38,7 +38,7 @@ struct hb_aat_map_t
void init ()
{
memset (this, 0, sizeof (*this));
hb_memset (this, 0, sizeof (*this));
chain_flags.init ();
}
void fini () { chain_flags.fini (); }

View File

@ -236,17 +236,6 @@ struct
template <typename T> constexpr auto
impl (const T& v, hb_priority<1>) const HB_RETURN (uint32_t, hb_deref (v).hash ())
template <typename T> constexpr uint32_t
impl (const hb::shared_ptr<T>& v, hb_priority<1>) const
{
return v.get () ? v.get ()->hash () : 0;
}
template <typename T> constexpr uint32_t
impl (const hb::unique_ptr<T>& v, hb_priority<1>) const
{
return v.get () ? v.get ()->hash () : 0;
}
template <typename T> constexpr auto
impl (const T& v, hb_priority<0>) const HB_RETURN (uint32_t, std::hash<hb_decay<decltype (hb_deref (v))>>{} (hb_deref (v)))
@ -495,6 +484,17 @@ struct
}
HB_FUNCOBJ (hb_equal);
struct
{
template <typename T> void
operator () (T& a, T& b) const
{
using std::swap; // allow ADL
swap (a, b);
}
}
HB_FUNCOBJ (hb_swap);
template <typename T1, typename T2>
struct hb_pair_t
@ -507,7 +507,7 @@ struct hb_pair_t
hb_enable_if (std::is_default_constructible<U1>::value &&
std::is_default_constructible<U2>::value)>
hb_pair_t () : first (), second () {}
hb_pair_t (T1 a, T2 b) : first (a), second (b) {}
hb_pair_t (T1 a, T2 b) : first (std::forward<T1> (a)), second (std::forward<T2> (b)) {}
template <typename Q1, typename Q2,
hb_enable_if (hb_is_convertible (T1, Q1) &&
@ -524,6 +524,25 @@ struct hb_pair_t
bool operator > (const pair_t& o) const { return first > o.first || (first == o.first && second > o.second); }
bool operator <= (const pair_t& o) const { return !(*this > o); }
static int cmp (const void *pa, const void *pb)
{
pair_t *a = (pair_t *) pa;
pair_t *b = (pair_t *) pb;
if (a->first < b->first) return -1;
if (a->first > b->first) return +1;
if (a->second < b->second) return -1;
if (a->second > b->second) return +1;
return 0;
}
friend void swap (hb_pair_t& a, hb_pair_t& b)
{
hb_swap (a.first, b.first);
hb_swap (a.second, b.second);
}
T1 first;
T2 second;
};
@ -570,17 +589,6 @@ struct
}
HB_FUNCOBJ (hb_clamp);
struct
{
template <typename T> void
operator () (T& a, T& b) const
{
using std::swap; // allow ADL
swap (a, b);
}
}
HB_FUNCOBJ (hb_swap);
/*
* Bithacks.
*/
@ -849,19 +857,14 @@ hb_in_range (T u, T lo, T hi)
return (T)(u - lo) <= (T)(hi - lo);
}
template <typename T> static inline bool
hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2)
hb_in_ranges (T u, T lo1, T hi1)
{
return hb_in_range (u, lo1, hi1) || hb_in_range (u, lo2, hi2);
return hb_in_range (u, lo1, hi1);
}
template <typename T> static inline bool
hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2, T lo3, T hi3)
template <typename T, typename ...Ts> static inline bool
hb_in_ranges (T u, T lo1, T hi1, Ts... ds)
{
return hb_in_range (u, lo1, hi1) || hb_in_range (u, lo2, hi2) || hb_in_range (u, lo3, hi3);
}
template <typename T> static inline bool
hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2, T lo3, T hi3, T lo4, T hi4)
{
return hb_in_range (u, lo1, hi1) || hb_in_range (u, lo2, hi2) || hb_in_range (u, lo3, hi3) || hb_in_range (u, lo4, hi4);
return hb_in_range<T> (u, lo1, hi1) || hb_in_ranges<T> (u, ds...);
}
@ -869,10 +872,18 @@ hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2, T lo3, T hi3, T lo4, T hi4)
* Overflow checking.
*/
/* Consider __builtin_mul_overflow use here also */
static inline bool
hb_unsigned_mul_overflows (unsigned int count, unsigned int size)
hb_unsigned_mul_overflows (unsigned int count, unsigned int size, unsigned *result = nullptr)
{
#if (defined(__GNUC__) && (__GNUC__ >= 4)) || defined(__clang__)
unsigned stack_result;
if (!result)
result = &stack_result;
return __builtin_mul_overflow (count, size, result);
#endif
if (result)
*result = count * size;
return (size > 0) && (count >= ((unsigned int) -1) / size);
}
@ -1164,9 +1175,12 @@ hb_qsort (void *base, size_t nel, size_t width,
}
template <typename T, typename T2, typename T3> static inline void
hb_stable_sort (T *array, unsigned int len, int(*compar)(const T2 *, const T2 *), T3 *array2)
template <typename T, typename T2, typename T3 = int> static inline void
hb_stable_sort (T *array, unsigned int len, int(*compar)(const T2 *, const T2 *), T3 *array2 = nullptr)
{
static_assert (hb_is_trivially_copy_assignable (T), "");
static_assert (hb_is_trivially_copy_assignable (T3), "");
for (unsigned int i = 1; i < len; i++)
{
unsigned int j = i;
@ -1189,12 +1203,6 @@ hb_stable_sort (T *array, unsigned int len, int(*compar)(const T2 *, const T2 *)
}
}
template <typename T> static inline void
hb_stable_sort (T *array, unsigned int len, int(*compar)(const T *, const T *))
{
hb_stable_sort (array, len, compar, (int *) nullptr);
}
static inline hb_bool_t
hb_codepoint_parse (const char *s, unsigned int len, int base, hb_codepoint_t *out)
{
@ -1322,47 +1330,4 @@ struct
HB_FUNCOBJ (hb_dec);
/* Compiler-assisted vectorization. */
/* Type behaving similar to vectorized vars defined using __attribute__((vector_size(...))),
* basically a fixed-size bitset. */
template <typename elt_t, unsigned int byte_size>
struct hb_vector_size_t
{
elt_t& operator [] (unsigned int i) { return v[i]; }
const elt_t& operator [] (unsigned int i) const { return v[i]; }
void clear (unsigned char v = 0) { memset (this, v, sizeof (*this)); }
template <typename Op>
hb_vector_size_t process (const Op& op) const
{
hb_vector_size_t r;
for (unsigned int i = 0; i < ARRAY_LENGTH (v); i++)
r.v[i] = op (v[i]);
return r;
}
template <typename Op>
hb_vector_size_t process (const Op& op, const hb_vector_size_t &o) const
{
hb_vector_size_t r;
for (unsigned int i = 0; i < ARRAY_LENGTH (v); i++)
r.v[i] = op (v[i], o.v[i]);
return r;
}
hb_vector_size_t operator | (const hb_vector_size_t &o) const
{ return process (hb_bitwise_or, o); }
hb_vector_size_t operator & (const hb_vector_size_t &o) const
{ return process (hb_bitwise_and, o); }
hb_vector_size_t operator ^ (const hb_vector_size_t &o) const
{ return process (hb_bitwise_xor, o); }
hb_vector_size_t operator ~ () const
{ return process (hb_bitwise_neg); }
private:
static_assert (0 == byte_size % sizeof (elt_t), "");
elt_t v[byte_size / sizeof (elt_t)];
};
#endif /* HB_ALGS_HH */

View File

@ -100,10 +100,18 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
/* Ouch. The operator== compares the contents of the array. For range-based for loops,
* it's best if we can just compare arrayZ, though comparing contents is still fast,
* but also would require that Type has operator==. As such, we optimize this operator
* for range-based for loop and just compare arrayZ and length. */
* for range-based for loop and just compare arrayZ and length.
*
* The above comment is outdated now because we implemented separate begin/end to
* objects that were using hb_array_t for range-based loop before. */
bool operator != (const hb_array_t& o) const
{ return this->arrayZ != o.arrayZ || this->length != o.length; }
/* Faster range-based for loop without bounds-check. */
Type *begin () const { return arrayZ; }
Type *end () const { return arrayZ + length; }
/* Extra operators.
*/
Type * operator & () const { return arrayZ; }
@ -112,11 +120,11 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
HB_INTERNAL bool operator == (const hb_array_t &o) const;
uint32_t hash () const {
uint32_t hash () const
{
uint32_t current = 0;
for (unsigned int i = 0; i < this->length; i++) {
current = current * 31 + hb_hash (this->arrayZ[i]);
}
for (auto &v : *this)
current = current * 31 + hb_hash (v);
return current;
}
@ -184,23 +192,18 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
hb_sorted_array_t<Type> qsort (int (*cmp_)(const void*, const void*))
{
//static_assert (hb_enable_if (hb_is_trivially_copy_assignable(Type)), "");
if (likely (length))
hb_qsort (arrayZ, length, this->get_item_size (), cmp_);
return hb_sorted_array_t<Type> (*this);
}
hb_sorted_array_t<Type> qsort ()
{
//static_assert (hb_enable_if (hb_is_trivially_copy_assignable(Type)), "");
if (likely (length))
hb_qsort (arrayZ, length, this->get_item_size (), Type::cmp);
return hb_sorted_array_t<Type> (*this);
}
void qsort (unsigned int start, unsigned int end)
{
end = hb_min (end, length);
assert (start <= end);
if (likely (start < end))
hb_qsort (arrayZ + start, end - start, this->get_item_size (), Type::cmp);
}
/*
* Other methods.
@ -262,17 +265,31 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
void fini ()
{ hb_free ((void *) arrayZ); arrayZ = nullptr; length = 0; }
template <typename hb_serialize_context_t>
template <typename hb_serialize_context_t,
typename U = Type,
hb_enable_if (!(sizeof (U) < sizeof (long long) && hb_is_trivially_copy_assignable(hb_decay<Type>)))>
hb_array_t copy (hb_serialize_context_t *c) const
{
TRACE_SERIALIZE (this);
auto* out = c->start_embed (arrayZ);
if (unlikely (!c->extend_size (out, get_size ()))) return_trace (hb_array_t ());
if (unlikely (!c->extend_size (out, get_size (), false))) return_trace (hb_array_t ());
for (unsigned i = 0; i < length; i++)
out[i] = arrayZ[i]; /* TODO: add version that calls c->copy() */
return_trace (hb_array_t (out, length));
}
template <typename hb_serialize_context_t,
typename U = Type,
hb_enable_if (sizeof (U) < sizeof (long long) && hb_is_trivially_copy_assignable(hb_decay<Type>))>
hb_array_t copy (hb_serialize_context_t *c) const
{
TRACE_SERIALIZE (this);
auto* out = c->start_embed (arrayZ);
if (unlikely (!c->extend_size (out, get_size (), false))) return_trace (hb_array_t ());
hb_memcpy (out, arrayZ, get_size ());
return_trace (hb_array_t (out, length));
}
template <typename hb_sanitize_context_t>
bool sanitize (hb_sanitize_context_t *c) const
{ return c->check_array (arrayZ, length); }
@ -295,8 +312,8 @@ hb_array (T (&array_)[length_])
template <typename Type>
struct hb_sorted_array_t :
hb_iter_t<hb_sorted_array_t<Type>, Type&>,
hb_array_t<Type>
hb_array_t<Type>,
hb_iter_t<hb_sorted_array_t<Type>, Type&>
{
typedef hb_iter_t<hb_sorted_array_t, Type&> iter_base_t;
HB_ITER_USING (iter_base_t);
@ -316,8 +333,8 @@ struct hb_sorted_array_t :
template <typename U,
hb_enable_if (hb_is_cr_convertible(U, Type))>
constexpr hb_sorted_array_t (const hb_array_t<U> &o) :
hb_iter_t<hb_sorted_array_t, Type&> (),
hb_array_t<Type> (o) {}
hb_array_t<Type> (o),
hb_iter_t<hb_sorted_array_t, Type&> () {}
template <typename U,
hb_enable_if (hb_is_cr_convertible(U, Type))>
hb_sorted_array_t& operator = (const hb_array_t<U> &o)
@ -329,6 +346,11 @@ struct hb_sorted_array_t :
bool operator != (const hb_sorted_array_t& o) const
{ return this->arrayZ != o.arrayZ || this->length != o.length; }
/* Faster range-based for loop without bounds-check. */
Type *begin () const { return this->arrayZ; }
Type *end () const { return this->arrayZ + this->length; }
hb_sorted_array_t sub_array (unsigned int start_offset, unsigned int *seg_count /* IN/OUT */) const
{ return hb_sorted_array_t (((const hb_array_t<Type> *) (this))->sub_array (start_offset, seg_count)); }
hb_sorted_array_t sub_array (unsigned int start_offset, unsigned int seg_count) const
@ -421,18 +443,42 @@ inline bool hb_array_t<const unsigned char>::operator == (const hb_array_t<const
return 0 == hb_memcmp (arrayZ, o.arrayZ, length);
}
/* Specialize hash() for byte arrays. */
template <>
inline uint32_t hb_array_t<const char>::hash () const {
inline uint32_t hb_array_t<const char>::hash () const
{
uint32_t current = 0;
for (unsigned int i = 0; i < this->length; i++)
current = current * 31 + (uint32_t) (this->arrayZ[i] * 2654435761u);
unsigned i = 0;
#if defined(__OPTIMIZE__) && !defined(HB_NO_PACKED) && \
((defined(__GNUC__) && __GNUC__ >= 5) || defined(__clang__))
struct __attribute__((packed)) packed_uint32_t { uint32_t v; };
for (; i + 4 <= this->length; i += 4)
current = current * 31 + hb_hash ((uint32_t) ((packed_uint32_t *) &this->arrayZ[i])->v);
#endif
for (; i < this->length; i++)
current = current * 31 + hb_hash (this->arrayZ[i]);
return current;
}
template <>
inline uint32_t hb_array_t<const unsigned char>::hash () const {
inline uint32_t hb_array_t<const unsigned char>::hash () const
{
uint32_t current = 0;
for (unsigned int i = 0; i < this->length; i++)
current = current * 31 + (uint32_t) (this->arrayZ[i] * 2654435761u);
unsigned i = 0;
#if defined(__OPTIMIZE__) && !defined(HB_NO_PACKED) && \
((defined(__GNUC__) && __GNUC__ >= 5) || defined(__clang__))
struct __attribute__((packed)) packed_uint32_t { uint32_t v; };
for (; i + 4 <= this->length; i += 4)
current = current * 31 + hb_hash ((uint32_t) ((packed_uint32_t *) &this->arrayZ[i])->v);
#endif
for (; i < this->length; i++)
current = current * 31 + hb_hash (this->arrayZ[i]);
return current;
}

View File

@ -30,6 +30,53 @@
#include "hb.hh"
/* Compiler-assisted vectorization. */
/* Type behaving similar to vectorized vars defined using __attribute__((vector_size(...))),
* basically a fixed-size bitset. */
template <typename elt_t, unsigned int byte_size>
struct hb_vector_size_t
{
elt_t& operator [] (unsigned int i) { return v[i]; }
const elt_t& operator [] (unsigned int i) const { return v[i]; }
void clear (unsigned char v = 0) { hb_memset (this, v, sizeof (*this)); }
template <typename Op>
hb_vector_size_t process (const Op& op) const
{
hb_vector_size_t r;
for (unsigned int i = 0; i < ARRAY_LENGTH (v); i++)
r.v[i] = op (v[i]);
return r;
}
template <typename Op>
hb_vector_size_t process (const Op& op, const hb_vector_size_t &o) const
{
hb_vector_size_t r;
for (unsigned int i = 0; i < ARRAY_LENGTH (v); i++)
r.v[i] = op (v[i], o.v[i]);
return r;
}
hb_vector_size_t operator | (const hb_vector_size_t &o) const
{ return process (hb_bitwise_or, o); }
hb_vector_size_t operator & (const hb_vector_size_t &o) const
{ return process (hb_bitwise_and, o); }
hb_vector_size_t operator ^ (const hb_vector_size_t &o) const
{ return process (hb_bitwise_xor, o); }
hb_vector_size_t operator ~ () const
{ return process (hb_bitwise_neg); }
hb_array_t<const elt_t> iter () const
{ return hb_array (v); }
private:
static_assert (0 == byte_size % sizeof (elt_t), "");
elt_t v[byte_size / sizeof (elt_t)];
};
struct hb_bit_page_t
{
void init0 () { v.clear (); }
@ -40,17 +87,17 @@ struct hb_bit_page_t
bool is_empty () const
{
for (unsigned i = 0; i < len (); i++)
if (v[i])
return false;
return true;
return
+ hb_iter (v)
| hb_none
;
}
uint32_t hash () const
{
uint32_t h = 0;
for (unsigned i = 0; i < len (); i++)
h = h * 31 + hb_hash (v[i]);
return h;
return
+ hb_iter (v)
| hb_reduce ([] (uint32_t h, const elt_t &_) { return h * 31 + hb_hash (_); }, (uint32_t) 0u)
;
}
void add (hb_codepoint_t g) { elt (g) |= mask (g); }
@ -69,7 +116,7 @@ struct hb_bit_page_t
*la |= ~(mask (a) - 1);
la++;
memset (la, 0xff, (char *) lb - (char *) la);
hb_memset (la, 0xff, (char *) lb - (char *) la);
*lb |= ((mask (b) << 1) - 1);
}
@ -85,7 +132,7 @@ struct hb_bit_page_t
*la &= mask (a) - 1;
la++;
memset (la, 0, (char *) lb - (char *) la);
hb_memset (la, 0, (char *) lb - (char *) la);
*lb &= ~((mask (b) << 1) - 1);
}
@ -101,13 +148,13 @@ struct hb_bit_page_t
hb_codepoint_t *p,
unsigned int size) const
{
unsigned int start_v = start_value >> ELT_BITS_LOG_2;
unsigned int start_v = start_value / ELT_BITS;
unsigned int start_bit = start_value & ELT_MASK;
unsigned int count = 0;
for (unsigned i = start_v; i < len () && count < size; i++)
{
elt_t bits = v[i];
uint32_t v_base = base | (i << ELT_BITS_LOG_2);
uint32_t v_base = base | (i * ELT_BITS);
for (unsigned int j = start_bit; j < ELT_BITS && count < size; j++)
{
if ((elt_t(1) << j) & bits) {
@ -132,13 +179,13 @@ struct hb_bit_page_t
unsigned int size,
hb_codepoint_t *next_value) const
{
unsigned int start_v = start_value >> ELT_BITS_LOG_2;
unsigned int start_v = start_value / ELT_BITS;
unsigned int start_bit = start_value & ELT_MASK;
unsigned int count = 0;
for (unsigned i = start_v; i < len () && count < size; i++)
{
elt_t bits = v[i];
uint32_t v_offset = i << ELT_BITS_LOG_2;
uint32_t v_offset = i * ELT_BITS;
for (unsigned int j = start_bit; j < ELT_BITS && count < size; j++)
{
if ((elt_t(1) << j) & bits)
@ -161,7 +208,10 @@ struct hb_bit_page_t
bool is_equal (const hb_bit_page_t &other) const
{
return 0 == hb_memcmp (&v, &other.v, sizeof (v));
for (unsigned i = 0; i < len (); i++)
if (v[i] != other.v[i])
return false;
return true;
}
bool is_subset (const hb_bit_page_t &larger_page) const
{
@ -173,10 +223,10 @@ struct hb_bit_page_t
unsigned int get_population () const
{
unsigned int pop = 0;
for (unsigned int i = 0; i < len (); i++)
pop += hb_popcount (v[i]);
return pop;
return
+ hb_iter (v)
| hb_reduce ([] (unsigned pop, const elt_t &_) { return pop + hb_popcount (_); }, 0u)
;
}
bool next (hb_codepoint_t *codepoint) const
@ -262,8 +312,6 @@ struct hb_bit_page_t
typedef hb_vector_size_t<elt_t, PAGE_BITS / 8> vector_t;
static constexpr unsigned ELT_BITS = sizeof (elt_t) * 8;
static constexpr unsigned ELT_BITS_LOG_2 = 6;
static_assert (1 << ELT_BITS_LOG_2 == ELT_BITS, "");
static constexpr unsigned ELT_MASK = ELT_BITS - 1;
static constexpr unsigned BITS = sizeof (vector_t) * 8;

View File

@ -123,10 +123,8 @@ struct hb_bit_set_invertible_t
bool get (hb_codepoint_t g) const { return s.get (g) ^ inverted; }
/* Has interface. */
static constexpr bool SENTINEL = false;
typedef bool value_t;
value_t operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
bool operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k]; }
/* Predicate. */
bool operator () (hb_codepoint_t k) const { return has (k); }

View File

@ -85,10 +85,10 @@ struct hb_bit_set_t
void err () { if (successful) successful = false; } /* TODO Remove */
bool in_error () const { return !successful; }
bool resize (unsigned int count)
bool resize (unsigned int count, bool clear = true)
{
if (unlikely (!successful)) return false;
if (unlikely (!pages.resize (count) || !page_map.resize (count)))
if (unlikely (!pages.resize (count, clear) || !page_map.resize (count, clear)))
{
pages.resize (page_map.length);
successful = false;
@ -330,10 +330,8 @@ struct hb_bit_set_t
}
/* Has interface. */
static constexpr bool SENTINEL = false;
typedef bool value_t;
value_t operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
bool operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k]; }
/* Predicate. */
bool operator () (hb_codepoint_t k) const { return has (k); }
@ -352,7 +350,7 @@ struct hb_bit_set_t
{
if (unlikely (!successful)) return;
unsigned int count = other.pages.length;
if (unlikely (!resize (count)))
if (unlikely (!resize (count, false)))
return;
population = other.population;
@ -391,7 +389,7 @@ struct hb_bit_set_t
bool is_subset (const hb_bit_set_t &larger_set) const
{
if (has_population () && larger_set.has_population () &&
population != larger_set.population)
population > larger_set.population)
return false;
uint32_t spi = 0;
@ -540,21 +538,21 @@ struct hb_bit_set_t
b = nb;
for (; a && b; )
{
if (page_map[a - 1].major == other.page_map[b - 1].major)
if (page_map.arrayZ[a - 1].major == other.page_map.arrayZ[b - 1].major)
{
a--;
b--;
count--;
page_map[count] = page_map[a];
page_map.arrayZ[count] = page_map.arrayZ[a];
page_at (count).v = op (page_at (a).v, other.page_at (b).v);
}
else if (page_map[a - 1].major > other.page_map[b - 1].major)
else if (page_map.arrayZ[a - 1].major > other.page_map.arrayZ[b - 1].major)
{
a--;
if (passthru_left)
{
count--;
page_map[count] = page_map[a];
page_map.arrayZ[count] = page_map.arrayZ[a];
}
}
else
@ -563,8 +561,8 @@ struct hb_bit_set_t
if (passthru_right)
{
count--;
page_map[count].major = other.page_map[b].major;
page_map[count].index = next_page++;
page_map.arrayZ[count].major = other.page_map.arrayZ[b].major;
page_map.arrayZ[count].index = next_page++;
page_at (count).v = other.page_at (b).v;
}
}
@ -574,15 +572,15 @@ struct hb_bit_set_t
{
a--;
count--;
page_map[count] = page_map [a];
page_map.arrayZ[count] = page_map.arrayZ[a];
}
if (passthru_right)
while (b)
{
b--;
count--;
page_map[count].major = other.page_map[b].major;
page_map[count].index = next_page++;
page_map.arrayZ[count].major = other.page_map.arrayZ[b].major;
page_map.arrayZ[count].index = next_page++;
page_at (count).v = other.page_at (b).v;
}
assert (!count);
@ -605,8 +603,6 @@ struct hb_bit_set_t
bool next (hb_codepoint_t *codepoint) const
{
// TODO: this should be merged with prev() as both implementations
// are very similar.
if (unlikely (*codepoint == INVALID)) {
*codepoint = get_min ();
return *codepoint != INVALID;
@ -640,7 +636,7 @@ struct hb_bit_set_t
for (; i < page_map.length; i++)
{
const page_map_t &current = page_map.arrayZ[i];
const page_map_t &current = page_map_array[i];
hb_codepoint_t m = pages_array[current.index].get_min ();
if (m != INVALID)
{
@ -663,21 +659,21 @@ struct hb_bit_set_t
page_map_t map = {get_major (*codepoint), 0};
unsigned int i;
page_map.bfind (map, &i, HB_NOT_FOUND_STORE_CLOSEST);
if (i < page_map.length && page_map[i].major == map.major)
if (i < page_map.length && page_map.arrayZ[i].major == map.major)
{
if (pages[page_map[i].index].previous (codepoint))
if (pages[page_map.arrayZ[i].index].previous (codepoint))
{
*codepoint += page_map[i].major * page_t::PAGE_BITS;
*codepoint += page_map.arrayZ[i].major * page_t::PAGE_BITS;
return true;
}
}
i--;
for (; (int) i >= 0; i--)
{
hb_codepoint_t m = pages[page_map[i].index].get_max ();
hb_codepoint_t m = pages.arrayZ[page_map.arrayZ[i].index].get_max ();
if (m != INVALID)
{
*codepoint = page_map[i].major * page_t::PAGE_BITS + m;
*codepoint = page_map.arrayZ[i].major * page_t::PAGE_BITS + m;
return true;
}
}
@ -905,7 +901,7 @@ struct hb_bit_set_t
{
auto &cached_page = page_map.arrayZ[i];
if (cached_page.major == major)
return &pages[cached_page.index];
return &pages.arrayZ[cached_page.index];
}
page_map_t map = {major, pages.length};
@ -917,15 +913,15 @@ struct hb_bit_set_t
if (unlikely (!resize (pages.length + 1)))
return nullptr;
pages[map.index].init0 ();
memmove (page_map + i + 1,
page_map + i,
pages.arrayZ[map.index].init0 ();
memmove (page_map.arrayZ + i + 1,
page_map.arrayZ + i,
(page_map.length - 1 - i) * page_map.item_size);
page_map[i] = map;
}
last_page_lookup = i;
return &pages[page_map[i].index];
return &pages.arrayZ[page_map.arrayZ[i].index];
}
const page_t *page_for (hb_codepoint_t g) const
{
@ -939,7 +935,7 @@ struct hb_bit_set_t
{
auto &cached_page = page_map.arrayZ[i];
if (cached_page.major == major)
return &pages[cached_page.index];
return &pages.arrayZ[cached_page.index];
}
page_map_t key = {major};
@ -947,10 +943,18 @@ struct hb_bit_set_t
return nullptr;
last_page_lookup = i;
return &pages[page_map[i].index];
return &pages.arrayZ[page_map[i].index];
}
page_t &page_at (unsigned int i)
{
assert (i < page_map.length);
return pages.arrayZ[page_map.arrayZ[i].index];
}
const page_t &page_at (unsigned int i) const
{
assert (i < page_map.length);
return pages.arrayZ[page_map.arrayZ[i].index];
}
page_t &page_at (unsigned int i) { return pages[page_map[i].index]; }
const page_t &page_at (unsigned int i) const { return pages[page_map[i].index]; }
unsigned int get_major (hb_codepoint_t g) const { return g >> page_t::PAGE_BITS_LOG_2; }
unsigned int page_remainder (hb_codepoint_t g) const { return g & page_t::PAGE_BITMASK; }
hb_codepoint_t major_start (unsigned int major) const { return major << page_t::PAGE_BITS_LOG_2; }

View File

@ -495,7 +495,7 @@ hb_blob_t::try_make_writable ()
DEBUG_MSG_FUNC (BLOB, this, "dupped successfully -> %p\n", this->data);
memcpy (new_data, this->data, this->length);
hb_memcpy (new_data, this->data, this->length);
this->destroy_user_data ();
this->mode = HB_MEMORY_MODE_WRITABLE;
this->data = new_data;

View File

@ -32,7 +32,7 @@
#include "hb.hh"
#line 36 "hb-buffer-deserialize-json.hh"
#line 33 "hb-buffer-deserialize-json.hh"
static const unsigned char _deserialize_json_trans_keys[] = {
0u, 0u, 9u, 123u, 9u, 34u, 97u, 117u, 120u, 121u, 34u, 34u, 9u, 58u, 9u, 57u,
48u, 57u, 9u, 125u, 9u, 125u, 9u, 125u, 34u, 34u, 9u, 58u, 9u, 57u, 48u, 57u,
@ -557,12 +557,12 @@ _hb_buffer_deserialize_json (hb_buffer_t *buffer,
hb_glyph_info_t info = {0};
hb_glyph_position_t pos = {0};
#line 561 "hb-buffer-deserialize-json.hh"
#line 554 "hb-buffer-deserialize-json.hh"
{
cs = deserialize_json_start;
}
#line 566 "hb-buffer-deserialize-json.hh"
#line 557 "hb-buffer-deserialize-json.hh"
{
int _slen;
int _trans;
@ -590,8 +590,8 @@ _resume:
case 1:
#line 38 "hb-buffer-deserialize-json.rl"
{
memset (&info, 0, sizeof (info));
memset (&pos , 0, sizeof (pos ));
hb_memset (&info, 0, sizeof (info));
hb_memset (&pos , 0, sizeof (pos ));
}
break;
case 5:
@ -774,7 +774,7 @@ _resume:
*end_ptr = p;
}
break;
#line 778 "hb-buffer-deserialize-json.hh"
#line 735 "hb-buffer-deserialize-json.hh"
}
_again:

View File

@ -32,32 +32,30 @@
#include "hb.hh"
#line 36 "hb-buffer-deserialize-text.hh"
#line 33 "hb-buffer-deserialize-text.hh"
static const unsigned char _deserialize_text_trans_keys[] = {
0u, 0u, 9u, 91u, 85u, 85u, 43u, 43u, 48u, 102u, 9u, 85u, 48u, 57u, 45u, 57u,
48u, 57u, 48u, 57u, 48u, 57u, 45u, 57u, 48u, 57u, 44u, 44u, 45u, 57u, 48u, 57u,
44u, 57u, 43u, 124u, 45u, 57u, 48u, 57u, 9u, 124u, 9u, 124u, 0u, 0u, 9u, 85u,
0u, 0u, 9u, 91u, 85u, 85u, 43u, 43u, 48u, 102u, 9u, 85u, 48u, 57u, 48u, 57u,
45u, 57u, 48u, 57u, 45u, 57u, 48u, 57u, 48u, 57u, 45u, 57u, 48u, 57u, 44u, 44u,
45u, 57u, 48u, 57u, 44u, 57u, 43u, 124u, 9u, 124u, 9u, 124u, 0u, 0u, 9u, 85u,
9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u,
9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u,
9u, 124u, 9u, 124u, 9u, 124u, 0
0
};
static const char _deserialize_text_key_spans[] = {
0, 83, 1, 1, 55, 77, 10, 13,
10, 10, 10, 13, 10, 1, 13, 10,
14, 82, 13, 10, 116, 116, 0, 77,
0, 83, 1, 1, 55, 77, 10, 10,
13, 10, 13, 10, 10, 13, 10, 1,
13, 10, 14, 82, 116, 116, 0, 77,
116, 116, 116, 116, 116, 116, 116, 116,
116, 116, 116, 116, 116, 116, 116, 116,
116, 116, 116
116, 116, 116, 116, 116, 116, 116, 116
};
static const short _deserialize_text_index_offsets[] = {
0, 0, 84, 86, 88, 144, 222, 233,
247, 258, 269, 280, 294, 305, 307, 321,
332, 347, 430, 444, 455, 572, 689, 690,
244, 258, 269, 283, 294, 305, 319, 330,
332, 346, 357, 372, 455, 572, 689, 690,
768, 885, 1002, 1119, 1236, 1353, 1470, 1587,
1704, 1821, 1938, 2055, 2172, 2289, 2406, 2523,
2640, 2757, 2874
1704, 1821, 1938, 2055, 2172, 2289, 2406, 2523
};
static const char _deserialize_text_indicies[] = {
@ -90,34 +88,34 @@ static const char _deserialize_text_indicies[] = {
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 4, 1, 8,
9, 9, 9, 9, 9, 9, 9, 9,
9, 1, 10, 1, 1, 11, 12, 12,
12, 12, 12, 12, 12, 12, 12, 1,
9, 1, 10, 11, 11, 11, 11, 11,
11, 11, 11, 11, 1, 12, 1, 1,
13, 14, 14, 14, 14, 14, 14, 14,
14, 14, 1, 15, 16, 16, 16, 16,
16, 16, 16, 16, 16, 1, 17, 18,
18, 18, 18, 18, 18, 18, 18, 18,
1, 19, 1, 1, 20, 21, 21, 21,
16, 16, 16, 16, 16, 1, 17, 1,
1, 18, 19, 19, 19, 19, 19, 19,
19, 19, 19, 1, 20, 21, 21, 21,
21, 21, 21, 21, 21, 21, 1, 22,
23, 23, 23, 23, 23, 23, 23, 23,
23, 1, 24, 1, 25, 1, 1, 26,
27, 27, 27, 27, 27, 27, 27, 27,
27, 1, 28, 29, 29, 29, 29, 29,
29, 29, 29, 29, 1, 24, 1, 1,
1, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 1, 30, 30, 1, 1,
23, 1, 24, 1, 1, 25, 26, 26,
26, 26, 26, 26, 26, 26, 26, 1,
27, 28, 28, 28, 28, 28, 28, 28,
28, 28, 1, 29, 1, 30, 1, 1,
31, 32, 32, 32, 32, 32, 32, 32,
32, 32, 1, 33, 34, 34, 34, 34,
34, 34, 34, 34, 34, 1, 29, 1,
1, 1, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 1, 35, 35, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 30, 1,
1, 30, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 35,
1, 1, 35, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 30, 30, 1,
1, 1, 1, 1, 1, 1, 35, 35,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 30, 1, 31,
1, 1, 32, 33, 33, 33, 33, 33,
33, 33, 33, 33, 1, 34, 35, 35,
35, 35, 35, 35, 35, 35, 35, 1,
1, 1, 1, 1, 1, 1, 35, 1,
36, 36, 36, 36, 36, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 36,
@ -201,53 +199,24 @@ static const char _deserialize_text_indicies[] = {
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 56, 48,
57, 57, 57, 57, 57, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 57,
30, 30, 58, 30, 30, 30, 30, 30,
30, 30, 59, 1, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 60, 30, 30, 61,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 62, 63, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 64, 30, 57, 57, 57,
57, 57, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 57, 30, 30, 58,
30, 30, 30, 30, 30, 30, 30, 59,
1, 30, 30, 30, 65, 66, 66, 66,
66, 66, 66, 66, 66, 66, 30, 30,
30, 60, 30, 30, 61, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
62, 63, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
64, 30, 67, 67, 67, 67, 67, 1,
57, 57, 57, 57, 57, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 57,
35, 35, 58, 35, 35, 35, 35, 35,
35, 35, 59, 1, 35, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 35, 60, 35, 35, 61,
35, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 62, 63, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 64, 35, 65, 65, 65,
65, 65, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 67, 1, 1, 68, 1, 1, 1,
1, 1, 1, 1, 1, 69, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 70, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 71, 1, 72,
72, 72, 72, 72, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 72, 1,
1, 1, 1, 1, 65, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
@ -255,228 +224,210 @@ static const char _deserialize_text_indicies[] = {
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 42, 1, 1, 1, 1,
1, 66, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 73, 1, 74, 74, 74, 74,
74, 48, 48, 48, 48, 48, 48, 48,
67, 1, 68, 68, 68, 68, 68, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 68, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 42, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 69, 1, 70,
70, 70, 70, 70, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 74, 48, 48, 50, 48,
48, 48, 48, 48, 48, 48, 51, 1,
48, 48, 48, 48, 48, 48, 70, 48,
48, 50, 48, 48, 48, 48, 48, 48,
48, 51, 1, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 52, 48, 48, 53, 48,
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
52, 48, 48, 53, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 54, 55, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 54,
55, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 56,
48, 75, 75, 75, 75, 75, 1, 1,
48, 48, 56, 48, 71, 71, 71, 71,
71, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
75, 1, 1, 76, 1, 1, 1, 1,
1, 1, 1, 77, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
78, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 45, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 79, 1, 80, 80,
80, 80, 80, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 80, 1, 1,
1, 1, 1, 71, 1, 1, 72, 1,
1, 1, 1, 1, 1, 1, 1, 73,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
74, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 75,
1, 76, 76, 76, 76, 76, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
76, 1, 1, 77, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 78, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 79, 1, 76, 76,
76, 76, 76, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 76, 1, 1,
77, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 78, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 79, 1, 71, 71, 71, 71, 71,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 71, 1, 1, 72, 1, 1,
1, 1, 1, 1, 1, 1, 73, 1,
1, 1, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 74,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 75, 1,
80, 80, 80, 80, 80, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 80,
1, 1, 81, 1, 1, 1, 1, 1,
1, 1, 82, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 83,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 82, 1, 80, 80, 80, 80, 80,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 80, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 83, 83, 83, 83, 83, 83,
83, 83, 83, 83, 1, 1, 1, 1,
1, 1, 1, 1, 45, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 81,
1, 1, 1, 84, 1, 85, 85, 85,
85, 85, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 82, 1,
84, 84, 84, 84, 84, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 84,
1, 1, 85, 1, 1, 1, 1, 1,
1, 1, 86, 1, 1, 1, 1, 1,
1, 1, 1, 1, 85, 1, 1, 86,
1, 1, 1, 1, 1, 1, 1, 87,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 88, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
89, 1, 85, 85, 85, 85, 85, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 85, 1, 1, 86, 1, 1, 1,
1, 1, 1, 1, 87, 1, 1, 1,
1, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 88, 1, 84, 84, 84,
84, 84, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 88, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 84, 1, 1, 85,
1, 1, 1, 1, 1, 1, 1, 86,
1, 1, 1, 1, 29, 29, 29, 29,
29, 29, 29, 29, 29, 29, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 89, 1, 80,
80, 80, 80, 80, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 80, 1,
1, 81, 1, 1, 1, 1, 1, 1,
1, 82, 1, 1, 1, 1, 90, 90,
90, 90, 90, 90, 90, 90, 90, 90,
1, 1, 1, 1, 1, 1, 83, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 45, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 84, 1, 65, 65, 65, 65,
65, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 65, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 91, 91, 91, 91, 91,
91, 91, 91, 91, 91, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 87, 1, 1, 1, 1, 1, 1,
66, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
88, 1, 75, 75, 75, 75, 75, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 75, 1, 1, 76, 1, 1, 1,
1, 1, 1, 1, 77, 1, 1, 1,
1, 89, 89, 89, 89, 89, 89, 89,
89, 89, 89, 1, 1, 1, 1, 1,
1, 78, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 45, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 79, 1, 90,
90, 90, 90, 90, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 90, 1,
1, 91, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 92, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 93, 1, 90, 90, 90, 90,
90, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 90, 1, 1, 91, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 35, 35, 35, 35, 35,
35, 35, 35, 35, 35, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
92, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 93,
1, 67, 67, 67, 67, 67, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
67, 1, 1, 68, 1, 1, 1, 1,
1, 1, 1, 1, 69, 1, 1, 1,
14, 14, 14, 14, 14, 14, 14, 14,
14, 14, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 70, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 71, 1, 94, 94,
94, 94, 94, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 94, 30, 30,
58, 30, 30, 30, 30, 30, 30, 30,
59, 1, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 60, 30, 30, 61, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 62, 95, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 96, 30, 94, 94, 94, 94, 94,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 94, 30, 30, 58, 30, 30,
30, 30, 30, 30, 30, 59, 1, 30,
30, 30, 97, 97, 97, 97, 97, 97,
97, 97, 97, 97, 30, 30, 30, 60,
30, 30, 61, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 62, 95,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 96, 30,
0
1, 1, 1, 1, 1, 1, 1, 67,
1, 0
};
static const char _deserialize_text_trans_targs[] = {
1, 0, 2, 26, 3, 4, 20, 5,
24, 25, 8, 29, 40, 29, 40, 32,
37, 33, 34, 12, 13, 16, 13, 16,
14, 15, 35, 36, 35, 36, 27, 19,
38, 39, 38, 39, 21, 20, 6, 22,
24, 25, 28, 39, 9, 31, 34, 31,
34, 11, 32, 33, 32, 33, 35, 38,
14, 15, 18, 15, 18, 16, 17, 36,
37, 36, 37, 27, 21, 20, 6, 22,
23, 21, 22, 23, 21, 22, 23, 25,
27, 27, 28, 7, 9, 11, 17, 22,
31, 27, 28, 7, 9, 11, 17, 22,
31, 41, 42, 30, 10, 18, 22, 31,
30, 31, 31, 30, 10, 7, 11, 31,
30, 22, 31, 34, 30, 10, 7, 22,
31, 37, 30, 10, 22, 31, 27, 22,
31, 42
27, 27, 7, 8, 12, 13, 19, 22,
30, 27, 7, 8, 12, 13, 19, 22,
30, 29, 22, 30, 29, 30, 30, 29,
7, 10, 22, 30, 29, 7, 22, 30,
29, 7, 8, 13, 30, 29, 7, 8,
22, 30, 38, 39
};
static const char _deserialize_text_trans_actions[] = {
0, 0, 0, 0, 1, 0, 2, 0,
2, 2, 3, 4, 4, 5, 5, 4,
4, 4, 4, 3, 3, 3, 0, 0,
6, 3, 4, 4, 5, 5, 5, 3,
4, 4, 5, 5, 7, 8, 9, 7,
2, 2, 3, 3, 4, 3, 3, 5,
5, 4, 3, 3, 5, 5, 3, 3,
4, 4, 4, 0, 0, 6, 4, 3,
3, 5, 5, 5, 7, 8, 9, 7,
7, 0, 0, 0, 10, 10, 10, 8,
12, 13, 14, 15, 15, 15, 16, 11,
11, 18, 19, 20, 20, 20, 0, 17,
17, 4, 4, 21, 22, 22, 21, 21,
0, 0, 13, 10, 23, 23, 23, 10,
24, 24, 24, 5, 25, 26, 26, 25,
25, 5, 27, 28, 27, 27, 30, 29,
29, 5
12, 13, 14, 14, 14, 14, 15, 11,
11, 17, 18, 18, 18, 18, 0, 16,
16, 19, 19, 19, 0, 0, 13, 20,
21, 21, 20, 20, 22, 23, 22, 22,
10, 24, 24, 24, 10, 25, 26, 26,
25, 25, 5, 5
};
static const char _deserialize_text_eof_actions[] = {
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 7, 0, 0, 0,
10, 10, 11, 17, 17, 21, 0, 11,
10, 24, 24, 25, 25, 10, 27, 27,
21, 29, 29
10, 10, 11, 16, 19, 0, 11, 20,
22, 22, 20, 10, 25, 25, 10, 19
};
static const int deserialize_text_start = 1;
@ -509,12 +460,12 @@ _hb_buffer_deserialize_text (hb_buffer_t *buffer,
hb_glyph_info_t info = {0};
hb_glyph_position_t pos = {0};
#line 513 "hb-buffer-deserialize-text.hh"
#line 457 "hb-buffer-deserialize-text.hh"
{
cs = deserialize_text_start;
}
#line 518 "hb-buffer-deserialize-text.hh"
#line 460 "hb-buffer-deserialize-text.hh"
{
int _slen;
int _trans;
@ -542,11 +493,11 @@ _resume:
case 1:
#line 38 "hb-buffer-deserialize-text.rl"
{
memset (&info, 0, sizeof (info));
memset (&pos , 0, sizeof (pos ));
hb_memset (&info, 0, sizeof (info));
hb_memset (&pos , 0, sizeof (pos ));
}
break;
case 3:
case 4:
#line 51 "hb-buffer-deserialize-text.rl"
{
tok = p;
@ -560,7 +511,7 @@ _resume:
#line 56 "hb-buffer-deserialize-text.rl"
{ if (unlikely (!buffer->ensure_unicode ())) return false; }
break;
case 20:
case 18:
#line 58 "hb-buffer-deserialize-text.rl"
{
/* TODO Unescape delimiters. */
@ -574,7 +525,7 @@ _resume:
#line 66 "hb-buffer-deserialize-text.rl"
{if (!parse_hex (tok, p, &info.codepoint )) return false; }
break;
case 23:
case 24:
#line 68 "hb-buffer-deserialize-text.rl"
{ if (!parse_uint (tok, p, &info.cluster )) return false; }
break;
@ -586,26 +537,26 @@ _resume:
#line 70 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.y_offset )) return false; }
break;
case 22:
case 21:
#line 71 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.x_advance)) return false; }
break;
case 28:
case 23:
#line 72 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.y_advance)) return false; }
break;
case 16:
case 15:
#line 38 "hb-buffer-deserialize-text.rl"
{
memset (&info, 0, sizeof (info));
memset (&pos , 0, sizeof (pos ));
hb_memset (&info, 0, sizeof (info));
hb_memset (&pos , 0, sizeof (pos ));
}
#line 51 "hb-buffer-deserialize-text.rl"
{
tok = p;
}
break;
case 4:
case 3:
#line 51 "hb-buffer-deserialize-text.rl"
{
tok = p;
@ -621,7 +572,7 @@ _resume:
#line 56 "hb-buffer-deserialize-text.rl"
{ if (unlikely (!buffer->ensure_unicode ())) return false; }
break;
case 17:
case 16:
#line 58 "hb-buffer-deserialize-text.rl"
{
/* TODO Unescape delimiters. */
@ -638,18 +589,6 @@ _resume:
buffer->pos[buffer->len - 1] = pos;
*end_ptr = p;
}
break;
case 19:
#line 58 "hb-buffer-deserialize-text.rl"
{
/* TODO Unescape delimiters. */
if (!hb_font_glyph_from_string (font,
tok, p - tok,
&info.codepoint))
return false;
}
#line 55 "hb-buffer-deserialize-text.rl"
{ if (unlikely (!buffer->ensure_glyphs ())) return false; }
break;
case 7:
#line 66 "hb-buffer-deserialize-text.rl"
@ -687,7 +626,7 @@ _resume:
*end_ptr = p;
}
break;
case 21:
case 20:
#line 71 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.x_advance)) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
@ -699,7 +638,7 @@ _resume:
*end_ptr = p;
}
break;
case 27:
case 22:
#line 72 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.y_advance)) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
@ -711,7 +650,7 @@ _resume:
*end_ptr = p;
}
break;
case 24:
case 19:
#line 73 "hb-buffer-deserialize-text.rl"
{ if (!parse_uint (tok, p, &info.mask )) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
@ -726,8 +665,8 @@ _resume:
case 12:
#line 38 "hb-buffer-deserialize-text.rl"
{
memset (&info, 0, sizeof (info));
memset (&pos , 0, sizeof (pos ));
hb_memset (&info, 0, sizeof (info));
hb_memset (&pos , 0, sizeof (pos ));
}
#line 51 "hb-buffer-deserialize-text.rl"
{
@ -736,11 +675,11 @@ _resume:
#line 55 "hb-buffer-deserialize-text.rl"
{ if (unlikely (!buffer->ensure_glyphs ())) return false; }
break;
case 15:
case 14:
#line 38 "hb-buffer-deserialize-text.rl"
{
memset (&info, 0, sizeof (info));
memset (&pos , 0, sizeof (pos ));
hb_memset (&info, 0, sizeof (info));
hb_memset (&pos , 0, sizeof (pos ));
}
#line 51 "hb-buffer-deserialize-text.rl"
{
@ -755,7 +694,7 @@ _resume:
return false;
}
break;
case 18:
case 17:
#line 58 "hb-buffer-deserialize-text.rl"
{
/* TODO Unescape delimiters. */
@ -773,33 +712,13 @@ _resume:
return false;
buffer->pos[buffer->len - 1] = pos;
*end_ptr = p;
}
break;
case 29:
#line 58 "hb-buffer-deserialize-text.rl"
{
/* TODO Unescape delimiters. */
if (!hb_font_glyph_from_string (font,
tok, p - tok,
&info.codepoint))
return false;
}
#line 73 "hb-buffer-deserialize-text.rl"
{ if (!parse_uint (tok, p, &info.mask )) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
{
buffer->add_info (info);
if (unlikely (!buffer->successful))
return false;
buffer->pos[buffer->len - 1] = pos;
*end_ptr = p;
}
break;
case 11:
#line 38 "hb-buffer-deserialize-text.rl"
{
memset (&info, 0, sizeof (info));
memset (&pos , 0, sizeof (pos ));
hb_memset (&info, 0, sizeof (info));
hb_memset (&pos , 0, sizeof (pos ));
}
#line 51 "hb-buffer-deserialize-text.rl"
{
@ -813,49 +732,6 @@ _resume:
&info.codepoint))
return false;
}
#line 43 "hb-buffer-deserialize-text.rl"
{
buffer->add_info (info);
if (unlikely (!buffer->successful))
return false;
buffer->pos[buffer->len - 1] = pos;
*end_ptr = p;
}
break;
case 14:
#line 38 "hb-buffer-deserialize-text.rl"
{
memset (&info, 0, sizeof (info));
memset (&pos , 0, sizeof (pos ));
}
#line 51 "hb-buffer-deserialize-text.rl"
{
tok = p;
}
#line 58 "hb-buffer-deserialize-text.rl"
{
/* TODO Unescape delimiters. */
if (!hb_font_glyph_from_string (font,
tok, p - tok,
&info.codepoint))
return false;
}
#line 55 "hb-buffer-deserialize-text.rl"
{ if (unlikely (!buffer->ensure_glyphs ())) return false; }
break;
case 30:
#line 58 "hb-buffer-deserialize-text.rl"
{
/* TODO Unescape delimiters. */
if (!hb_font_glyph_from_string (font,
tok, p - tok,
&info.codepoint))
return false;
}
#line 73 "hb-buffer-deserialize-text.rl"
{ if (!parse_uint (tok, p, &info.mask )) return false; }
#line 55 "hb-buffer-deserialize-text.rl"
{ if (unlikely (!buffer->ensure_glyphs ())) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
{
buffer->add_info (info);
@ -868,8 +744,8 @@ _resume:
case 13:
#line 38 "hb-buffer-deserialize-text.rl"
{
memset (&info, 0, sizeof (info));
memset (&pos , 0, sizeof (pos ));
hb_memset (&info, 0, sizeof (info));
hb_memset (&pos , 0, sizeof (pos ));
}
#line 51 "hb-buffer-deserialize-text.rl"
{
@ -894,7 +770,7 @@ _resume:
*end_ptr = p;
}
break;
#line 898 "hb-buffer-deserialize-text.hh"
#line 715 "hb-buffer-deserialize-text.hh"
}
_again:
@ -906,7 +782,7 @@ _again:
if ( p == eof )
{
switch ( _deserialize_text_eof_actions[cs] ) {
case 17:
case 16:
#line 58 "hb-buffer-deserialize-text.rl"
{
/* TODO Unescape delimiters. */
@ -960,7 +836,7 @@ _again:
*end_ptr = p;
}
break;
case 21:
case 20:
#line 71 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.x_advance)) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
@ -972,7 +848,7 @@ _again:
*end_ptr = p;
}
break;
case 27:
case 22:
#line 72 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.y_advance)) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
@ -984,27 +860,7 @@ _again:
*end_ptr = p;
}
break;
case 24:
#line 73 "hb-buffer-deserialize-text.rl"
{ if (!parse_uint (tok, p, &info.mask )) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
{
buffer->add_info (info);
if (unlikely (!buffer->successful))
return false;
buffer->pos[buffer->len - 1] = pos;
*end_ptr = p;
}
break;
case 29:
#line 58 "hb-buffer-deserialize-text.rl"
{
/* TODO Unescape delimiters. */
if (!hb_font_glyph_from_string (font,
tok, p - tok,
&info.codepoint))
return false;
}
case 19:
#line 73 "hb-buffer-deserialize-text.rl"
{ if (!parse_uint (tok, p, &info.mask )) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
@ -1019,8 +875,8 @@ _again:
case 11:
#line 38 "hb-buffer-deserialize-text.rl"
{
memset (&info, 0, sizeof (info));
memset (&pos , 0, sizeof (pos ));
hb_memset (&info, 0, sizeof (info));
hb_memset (&pos , 0, sizeof (pos ));
}
#line 51 "hb-buffer-deserialize-text.rl"
{
@ -1043,7 +899,7 @@ _again:
*end_ptr = p;
}
break;
#line 1047 "hb-buffer-deserialize-text.hh"
#line 825 "hb-buffer-deserialize-text.hh"
}
}

View File

@ -183,7 +183,7 @@ _hb_buffer_serialize_glyphs_json (hb_buffer_t *buffer,
unsigned int l = p - b;
if (buf_size > l)
{
memcpy (buf, b, l);
hb_memcpy (buf, b, l);
buf += l;
buf_size -= l;
*buf_consumed += l;
@ -241,7 +241,7 @@ _hb_buffer_serialize_unicode_json (hb_buffer_t *buffer,
unsigned int l = p - b;
if (buf_size > l)
{
memcpy (buf, b, l);
hb_memcpy (buf, b, l);
buf += l;
buf_size -= l;
*buf_consumed += l;
@ -329,7 +329,7 @@ _hb_buffer_serialize_glyphs_text (hb_buffer_t *buffer,
unsigned int l = p - b;
if (buf_size > l)
{
memcpy (buf, b, l);
hb_memcpy (buf, b, l);
buf += l;
buf_size -= l;
*buf_consumed += l;
@ -381,7 +381,7 @@ _hb_buffer_serialize_unicode_text (hb_buffer_t *buffer,
unsigned int l = p - b;
if (buf_size > l)
{
memcpy (buf, b, l);
hb_memcpy (buf, b, l);
buf += l;
buf_size -= l;
*buf_consumed += l;

View File

@ -186,7 +186,7 @@ buffer_verify_unsafe_to_break (hb_buffer_t *buffer,
bool ret = true;
hb_buffer_diff_flags_t diff = hb_buffer_diff (reconstruction, buffer, (hb_codepoint_t) -1, 0);
if (diff)
if (diff & ~HB_BUFFER_DIFF_FLAG_GLYPH_FLAGS_MISMATCH)
{
buffer_verify_error (buffer, font, BUFFER_VERIFY_ERROR "unsafe-to-break test failed.");
ret = false;
@ -313,7 +313,6 @@ buffer_verify_unsafe_to_concat (hb_buffer_t *buffer,
bool ret = true;
hb_buffer_diff_flags_t diff;
/*
* Shape the two fragment streams.
*/
@ -382,7 +381,7 @@ buffer_verify_unsafe_to_concat (hb_buffer_t *buffer,
* Diff results.
*/
diff = hb_buffer_diff (reconstruction, buffer, (hb_codepoint_t) -1, 0);
if (diff)
if (diff & ~HB_BUFFER_DIFF_FLAG_GLYPH_FLAGS_MISMATCH)
{
buffer_verify_error (buffer, font, BUFFER_VERIFY_ERROR "unsafe-to-concat test failed.");
ret = false;

View File

@ -172,12 +172,13 @@ hb_buffer_t::enlarge (unsigned int size)
while (size >= new_allocated)
new_allocated += (new_allocated >> 1) + 32;
static_assert ((sizeof (info[0]) == sizeof (pos[0])), "");
if (unlikely (hb_unsigned_mul_overflows (new_allocated, sizeof (info[0]))))
unsigned new_bytes;
if (unlikely (hb_unsigned_mul_overflows (new_allocated, sizeof (info[0]), &new_bytes)))
goto done;
new_pos = (hb_glyph_position_t *) hb_realloc (pos, new_allocated * sizeof (pos[0]));
new_info = (hb_glyph_info_t *) hb_realloc (info, new_allocated * sizeof (info[0]));
static_assert (sizeof (info[0]) == sizeof (pos[0]), "");
new_pos = (hb_glyph_position_t *) hb_realloc (pos, new_bytes);
new_info = (hb_glyph_info_t *) hb_realloc (info, new_bytes);
done:
if (unlikely (!new_pos || !new_info))
@ -208,7 +209,7 @@ hb_buffer_t::make_room_for (unsigned int num_in,
assert (have_output);
out_info = (hb_glyph_info_t *) pos;
memcpy (out_info, info, out_len * sizeof (out_info[0]));
hb_memcpy (out_info, info, out_len * sizeof (out_info[0]));
}
return true;
@ -229,7 +230,7 @@ hb_buffer_t::shift_forward (unsigned int count)
* Ideally, we should at least set Default_Ignorable bits on
* these, as well as consistent cluster values. But the former
* is layering violation... */
memset (info + len, 0, (idx + count - len) * sizeof (info[0]));
hb_memset (info + len, 0, (idx + count - len) * sizeof (info[0]));
}
len += count;
idx += count;
@ -298,8 +299,8 @@ hb_buffer_t::clear ()
out_len = 0;
out_info = info;
memset (context, 0, sizeof context);
memset (context_len, 0, sizeof context_len);
hb_memset (context, 0, sizeof context);
hb_memset (context_len, 0, sizeof context_len);
deallocate_var_all ();
serial = 0;
@ -313,15 +314,14 @@ hb_buffer_t::enter ()
serial = 0;
shaping_failed = false;
scratch_flags = HB_BUFFER_SCRATCH_FLAG_DEFAULT;
if (likely (!hb_unsigned_mul_overflows (len, HB_BUFFER_MAX_LEN_FACTOR)))
unsigned mul;
if (likely (!hb_unsigned_mul_overflows (len, HB_BUFFER_MAX_LEN_FACTOR, &mul)))
{
max_len = hb_max (len * HB_BUFFER_MAX_LEN_FACTOR,
(unsigned) HB_BUFFER_MAX_LEN_MIN);
max_len = hb_max (mul, (unsigned) HB_BUFFER_MAX_LEN_MIN);
}
if (likely (!hb_unsigned_mul_overflows (len, HB_BUFFER_MAX_OPS_FACTOR)))
if (likely (!hb_unsigned_mul_overflows (len, HB_BUFFER_MAX_OPS_FACTOR, &mul)))
{
max_ops = hb_max (len * HB_BUFFER_MAX_OPS_FACTOR,
(unsigned) HB_BUFFER_MAX_OPS_MIN);
max_ops = hb_max (mul, (unsigned) HB_BUFFER_MAX_OPS_MIN);
}
}
void
@ -345,7 +345,7 @@ hb_buffer_t::add (hb_codepoint_t codepoint,
glyph = &info[len];
memset (glyph, 0, sizeof (*glyph));
hb_memset (glyph, 0, sizeof (*glyph));
glyph->codepoint = codepoint;
glyph->mask = 0;
glyph->cluster = cluster;
@ -605,6 +605,53 @@ done:
skip_glyph ();
}
void
hb_buffer_t::delete_glyphs_inplace (bool (*filter) (const hb_glyph_info_t *info))
{
/* Merge clusters and delete filtered glyphs.
* NOTE! We can't use out-buffer as we have positioning data. */
unsigned int j = 0;
unsigned int count = len;
for (unsigned int i = 0; i < count; i++)
{
if (filter (&info[i]))
{
/* Merge clusters.
* Same logic as delete_glyph(), but for in-place removal. */
unsigned int cluster = info[i].cluster;
if (i + 1 < count && cluster == info[i + 1].cluster)
continue; /* Cluster survives; do nothing. */
if (j)
{
/* Merge cluster backward. */
if (cluster < info[j - 1].cluster)
{
unsigned int mask = info[i].mask;
unsigned int old_cluster = info[j - 1].cluster;
for (unsigned k = j; k && info[k - 1].cluster == old_cluster; k--)
set_cluster (info[k - 1], cluster, mask);
}
continue;
}
if (i + 1 < count)
merge_clusters (i, i + 2); /* Merge cluster forward. */
continue;
}
if (j != i)
{
info[j] = info[i];
pos[j] = pos[i];
}
j++;
}
len = j;
}
void
hb_buffer_t::guess_segment_properties ()
{
@ -933,7 +980,6 @@ hb_buffer_get_unicode_funcs (const hb_buffer_t *buffer)
void
hb_buffer_set_direction (hb_buffer_t *buffer,
hb_direction_t direction)
{
if (unlikely (hb_object_is_immutable (buffer)))
return;
@ -1385,9 +1431,9 @@ hb_buffer_set_length (hb_buffer_t *buffer,
/* Wipe the new space */
if (length > buffer->len) {
memset (buffer->info + buffer->len, 0, sizeof (buffer->info[0]) * (length - buffer->len));
hb_memset (buffer->info + buffer->len, 0, sizeof (buffer->info[0]) * (length - buffer->len));
if (buffer->have_positions)
memset (buffer->pos + buffer->len, 0, sizeof (buffer->pos[0]) * (length - buffer->len));
hb_memset (buffer->pos + buffer->len, 0, sizeof (buffer->pos[0]) * (length - buffer->len));
}
buffer->len = length;
@ -1795,7 +1841,9 @@ hb_buffer_add_latin1 (hb_buffer_t *buffer,
* marks at stat of run.
*
* This function does not check the validity of @text, it is up to the caller
* to ensure it contains a valid Unicode code points.
* to ensure it contains a valid Unicode scalar values. In contrast,
* hb_buffer_add_utf32() can be used that takes similar input but performs
* sanity-check on the input.
*
* Since: 0.9.31
**/
@ -1858,9 +1906,9 @@ hb_buffer_append (hb_buffer_t *buffer,
hb_segment_properties_overlay (&buffer->props, &source->props);
memcpy (buffer->info + orig_len, source->info + start, (end - start) * sizeof (buffer->info[0]));
hb_memcpy (buffer->info + orig_len, source->info + start, (end - start) * sizeof (buffer->info[0]));
if (buffer->have_positions)
memcpy (buffer->pos + orig_len, source->pos + start, (end - start) * sizeof (buffer->pos[0]));
hb_memcpy (buffer->pos + orig_len, source->pos + start, (end - start) * sizeof (buffer->pos[0]));
if (source->content_type == HB_BUFFER_CONTENT_TYPE_UNICODE)
{
@ -2048,7 +2096,7 @@ hb_buffer_diff (hb_buffer_t *buffer,
result |= HB_BUFFER_DIFF_FLAG_CODEPOINT_MISMATCH;
if (buf_info->cluster != ref_info->cluster)
result |= HB_BUFFER_DIFF_FLAG_CLUSTER_MISMATCH;
if ((buf_info->mask & ~ref_info->mask & HB_GLYPH_FLAG_DEFINED))
if ((buf_info->mask ^ ref_info->mask) & HB_GLYPH_FLAG_DEFINED)
result |= HB_BUFFER_DIFF_FLAG_GLYPH_FLAGS_MISMATCH;
if (contains && ref_info->codepoint == dottedcircle_glyph)
result |= HB_BUFFER_DIFF_FLAG_DOTTED_CIRCLE_PRESENT;
@ -2103,6 +2151,13 @@ hb_buffer_set_message_func (hb_buffer_t *buffer,
hb_buffer_message_func_t func,
void *user_data, hb_destroy_func_t destroy)
{
if (unlikely (hb_object_is_immutable (buffer)))
{
if (destroy)
destroy (user_data);
return;
}
if (buffer->message_destroy)
buffer->message_destroy (buffer->message_data);

View File

@ -32,6 +32,7 @@
#include "hb.hh"
#include "hb-unicode.hh"
#include "hb-set-digest.hh"
#ifndef HB_BUFFER_MAX_LEN_FACTOR
@ -207,6 +208,14 @@ struct hb_buffer_t
hb_glyph_info_t &prev () { return out_info[out_len ? out_len - 1 : 0]; }
hb_glyph_info_t prev () const { return out_info[out_len ? out_len - 1 : 0]; }
hb_set_digest_t digest () const
{
hb_set_digest_t d;
d.init ();
d.add_array (&info[0].codepoint, len, sizeof (info[0]));
return d;
}
HB_INTERNAL void similar (const hb_buffer_t &src);
HB_INTERNAL void reset ();
HB_INTERNAL void clear ();
@ -402,6 +411,8 @@ struct hb_buffer_t
HB_INTERNAL void merge_out_clusters (unsigned int start, unsigned int end);
/* Merge clusters for deleting current glyph, and skip it. */
HB_INTERNAL void delete_glyph ();
HB_INTERNAL void delete_glyphs_inplace (bool (*filter) (const hb_glyph_info_t *info));
/* Adds glyph flags in mask to infos with clusters between start and end.

View File

@ -46,7 +46,7 @@ struct hb_cache_t
>::type;
static_assert ((key_bits >= cache_bits), "");
static_assert ((key_bits + value_bits - cache_bits <= 8 * sizeof (item_t)), "");
static_assert ((key_bits + value_bits <= cache_bits + 8 * sizeof (item_t)), "");
void init () { clear (); }
void fini () {}

View File

@ -284,65 +284,56 @@ struct UnsizedByteStr : UnsizedArrayOf <HBUINT8>
/* A byte string associated with the current offset and an error condition */
struct byte_str_ref_t
{
byte_str_ref_t () { init (); }
void init ()
{
str = hb_ubytes_t ();
offset = 0;
error = false;
}
void fini () {}
byte_str_ref_t ()
: str () {}
byte_str_ref_t (const hb_ubytes_t &str_, unsigned int offset_ = 0)
: str (str_), offset (offset_), error (false) {}
: str (str_) { set_offset (offset_); }
void reset (const hb_ubytes_t &str_, unsigned int offset_ = 0)
{
str = str_;
offset = offset_;
error = false;
set_offset (offset_);
}
const unsigned char& operator [] (int i) {
if (unlikely ((unsigned int) (offset + i) >= str.length))
if (unlikely ((unsigned int) (get_offset () + i) >= str.length))
{
set_error ();
return Null (unsigned char);
}
return str[offset + i];
return str.arrayZ[get_offset () + i];
}
unsigned char head_unchecked () const { return str.arrayZ[get_offset ()]; }
/* Conversion to hb_ubytes_t */
operator hb_ubytes_t () const { return str.sub_array (offset, str.length - offset); }
operator hb_ubytes_t () const { return str.sub_array (get_offset ()); }
hb_ubytes_t sub_array (unsigned int offset_, unsigned int len_) const
{ return str.sub_array (offset_, len_); }
bool avail (unsigned int count=1) const
{ return (!in_error () && offset + count <= str.length); }
{ return get_offset () + count <= str.length; }
void inc (unsigned int count=1)
{
if (likely (!in_error () && (offset <= str.length) && (offset + count <= str.length)))
{
offset += count;
}
else
{
offset = str.length;
set_error ();
}
/* Automatically puts us in error if count is out-of-range. */
set_offset (get_offset () + count);
}
void set_error () { error = true; }
bool in_error () const { return error; }
/* We (ab)use ubytes backwards_length as a cursor (called offset),
* as well as to store error condition. */
hb_ubytes_t str;
unsigned int offset; /* beginning of the sub-string within str */
unsigned get_offset () const { return str.backwards_length; }
void set_offset (unsigned offset) { str.backwards_length = offset; }
void set_error () { str.backwards_length = str.length + 1; }
bool in_error () const { return str.backwards_length > str.length; }
unsigned total_size () const { return str.length; }
protected:
bool error;
hb_ubytes_t str;
};
using byte_str_array_t = hb_vector_t<hb_ubytes_t>;
@ -491,8 +482,15 @@ struct arg_stack_t : cff_stack_t<ARG, 513>
/* an operator prefixed by its operands in a byte string */
struct op_str_t
{
hb_ubytes_t str;
/* This used to have a hb_ubytes_t. Using a pointer and length
* in a particular order, saves 8 bytes in this struct and more
* in our parsed_cs_op_t subclass. */
const unsigned char *ptr = nullptr;
op_code_t op;
uint8_t length = 0;
};
/* base of OP_SERIALIZER */
@ -503,9 +501,11 @@ struct op_serializer_t
{
TRACE_SERIALIZE (this);
HBUINT8 *d = c->allocate_size<HBUINT8> (opstr.str.length);
unsigned char *d = c->allocate_size<unsigned char> (opstr.length);
if (unlikely (!d)) return_trace (false);
memcpy (d, &opstr.str[0], opstr.str.length);
/* Faster than hb_memcpy for small strings. */
for (unsigned i = 0; i < opstr.length; i++)
d[i] = opstr.ptr[i];
return_trace (true);
}
};
@ -529,16 +529,20 @@ struct parsed_values_t
{
VAL *val = values.push ();
val->op = op;
val->str = str_ref.str.sub_array (opStart, str_ref.offset - opStart);
opStart = str_ref.offset;
auto arr = str_ref.sub_array (opStart, str_ref.get_offset () - opStart);
val->ptr = arr.arrayZ;
val->length = arr.length;
opStart = str_ref.get_offset ();
}
void add_op (op_code_t op, const byte_str_ref_t& str_ref, const VAL &v)
{
VAL *val = values.push (v);
val->op = op;
val->str = str_ref.sub_array ( opStart, str_ref.offset - opStart);
opStart = str_ref.offset;
auto arr = str_ref.sub_array (opStart, str_ref.get_offset () - opStart);
val->ptr = arr.arrayZ;
val->length = arr.length;
opStart = str_ref.get_offset ();
}
bool has_op (op_code_t op) const
@ -549,8 +553,7 @@ struct parsed_values_t
}
unsigned get_count () const { return values.length; }
const VAL &get_value (unsigned int i) const { return values[i]; }
const VAL &operator [] (unsigned int i) const { return get_value (i); }
const VAL &operator [] (unsigned int i) const { return values[i]; }
unsigned int opStart;
hb_vector_t<VAL> values;
@ -565,23 +568,23 @@ struct interp_env_t
str_ref.reset (str_);
}
bool in_error () const
{ return error || str_ref.in_error () || argStack.in_error (); }
{ return str_ref.in_error () || argStack.in_error (); }
void set_error () { error = true; }
void set_error () { str_ref.set_error (); }
op_code_t fetch_op ()
{
op_code_t op = OpCode_Invalid;
if (unlikely (!str_ref.avail ()))
return OpCode_Invalid;
op = (op_code_t)(unsigned char)str_ref[0];
op = (op_code_t) str_ref.head_unchecked ();
str_ref.inc ();
if (op == OpCode_escape) {
if (unlikely (!str_ref.avail ()))
return OpCode_Invalid;
op = Make_OpCode_ESC(str_ref[1]);
op = Make_OpCode_ESC (str_ref.head_unchecked ());
str_ref.inc ();
}
str_ref.inc ();
return op;
}
@ -596,8 +599,6 @@ struct interp_env_t
str_ref;
arg_stack_t<ARG>
argStack;
protected:
bool error = false;
};
using num_interp_env_t = interp_env_t<>;

View File

@ -40,13 +40,15 @@ struct blend_arg_t : number_t
void set_real (double v) { reset_blends (); number_t::set_real (v); }
void set_blends (unsigned int numValues_, unsigned int valueIndex_,
unsigned int numBlends, hb_array_t<const blend_arg_t> blends_)
hb_array_t<const blend_arg_t> blends_)
{
numValues = numValues_;
valueIndex = valueIndex_;
deltas.resize (numBlends);
unsigned numBlends = blends_.length;
if (unlikely (!deltas.resize (numBlends)))
return;
for (unsigned int i = 0; i < numBlends; i++)
deltas[i] = blends_[i];
deltas.arrayZ[i] = blends_.arrayZ[i];
}
bool blending () const { return deltas.length > 0; }
@ -61,7 +63,6 @@ struct blend_arg_t : number_t
hb_vector_t<number_t> deltas;
};
typedef interp_env_t<blend_arg_t> BlendInterpEnv;
typedef biased_subrs_t<CFF2Subrs> cff2_biased_subrs_t;
template <typename ELEM>
@ -154,8 +155,9 @@ struct cff2_cs_interp_env_t : cs_interp_env_t<ELEM, CFF2Subrs>
{
if (likely (scalars.length == deltas.length))
{
for (unsigned int i = 0; i < scalars.length; i++)
v += (double) scalars[i] * deltas[i].to_real ();
unsigned count = scalars.length;
for (unsigned i = 0; i < count; i++)
v += (double) scalars.arrayZ[i] * deltas.arrayZ[i].to_real ();
}
}
return v;
@ -220,7 +222,7 @@ struct cff2_cs_opset_t : cs_opset_t<ELEM, OPSET, cff2_cs_interp_env_t<ELEM>, PAR
const hb_array_t<const ELEM> blends,
unsigned n, unsigned i)
{
arg.set_blends (n, i, blends.length, blends);
arg.set_blends (n, i, blends);
}
template <typename T = ELEM,
hb_enable_if (!hb_is_same (T, blend_arg_t))>

View File

@ -285,7 +285,7 @@ struct hb_language_item_t {
lang = (hb_language_t) hb_malloc(len);
if (likely (lang))
{
memcpy((unsigned char *) lang, s, len);
hb_memcpy((unsigned char *) lang, s, len);
for (unsigned char *p = (unsigned char *) lang; *p; p++)
*p = canon_map[*p];
}
@ -379,7 +379,7 @@ hb_language_from_string (const char *str, int len)
/* NUL-terminate it. */
char strbuf[64];
len = hb_min (len, (int) sizeof (strbuf) - 1);
memcpy (strbuf, str, len);
hb_memcpy (strbuf, str, len);
strbuf[len] = '\0';
item = lang_find_or_insert (strbuf);
}
@ -976,7 +976,7 @@ hb_feature_from_string (const char *str, int len,
}
if (feature)
memset (feature, 0, sizeof (*feature));
hb_memset (feature, 0, sizeof (*feature));
return false;
}
@ -1025,7 +1025,7 @@ hb_feature_to_string (hb_feature_t *feature,
}
assert (len < ARRAY_LENGTH (s));
len = hb_min (len, size - 1);
memcpy (buf, s, len);
hb_memcpy (buf, s, len);
buf[len] = '\0';
}
@ -1088,7 +1088,7 @@ hb_variation_from_string (const char *str, int len,
}
if (variation)
memset (variation, 0, sizeof (*variation));
hb_memset (variation, 0, sizeof (*variation));
return false;
}
@ -1136,7 +1136,7 @@ get_C_locale ()
/**
* hb_variation_to_string:
* @variation: an #hb_variation_t to convert
* @buf: (array length=size) (out): output string
* @buf: (array length=size) (out caller-allocates): output string
* @size: the allocated size of @buf
*
* Converts an #hb_variation_t into a `NULL`-terminated string in the format
@ -1166,7 +1166,7 @@ hb_variation_to_string (hb_variation_t *variation,
assert (len < ARRAY_LENGTH (s));
len = hb_min (len, size - 1);
memcpy (buf, s, len);
hb_memcpy (buf, s, len);
buf[len] = '\0';
}

View File

@ -35,8 +35,9 @@
#include "config.h"
#endif
#ifndef HB_BORING_EXPANSION
#define HB_NO_BORING_EXPANSION
#ifndef HB_EXPERIMENTAL_API
#define HB_NO_BEYOND_64K
#define HB_NO_VAR_COMPOSITES
#endif
#ifdef HB_TINY
@ -84,6 +85,7 @@
#define HB_NO_OT_SHAPE_FRACTIONS
#define HB_NO_STYLE
#define HB_NO_SUBSET_LAYOUT
#define HB_NO_VERTICAL
#define HB_NO_VAR
#endif
@ -104,7 +106,7 @@
#ifdef HB_NO_BORING_EXPANSION
#define HB_NO_BEYOND_64K
#define HB_NO_VARIATIONS2
#define HB_NO_AVAR2
#endif
#ifdef HB_DISABLE_DEPRECATED
@ -113,6 +115,11 @@
#define HB_IF_NOT_DEPRECATED(x) x
#endif
#ifdef HB_NO_SHAPER
#define HB_NO_OT_SHAPE
#define HB_NO_AAT_SHAPE
#endif
#ifdef HB_NO_AAT
#define HB_NO_OT_NAME_LANGUAGE_AAT
#define HB_NO_AAT_SHAPE
@ -159,6 +166,7 @@
#define HB_NO_OT_SHAPER_HEBREW_FALLBACK
#define HB_NO_OT_SHAPER_THAI_FALLBACK
#define HB_NO_OT_SHAPER_VOWEL_CONSTRAINTS
#define HB_NO_OT_SHAPER_MYANMAR_ZAWGYI
#endif
#ifdef NDEBUG

View File

@ -347,10 +347,13 @@ _hb_coretext_shaper_font_data_create (hb_font_t *font)
hb_ot_var_axis_info_t info;
unsigned int c = 1;
hb_ot_var_get_axis_infos (font->face, i, &c, &info);
CFDictionarySetValue (variations,
CFNumberCreate (kCFAllocatorDefault, kCFNumberIntType, &info.tag),
CFNumberCreate (kCFAllocatorDefault, kCFNumberFloatType, &font->design_coords[i])
);
float v = hb_clamp (font->design_coords[i], info.min_value, info.max_value);
CFNumberRef tag_number = CFNumberCreate (kCFAllocatorDefault, kCFNumberIntType, &info.tag);
CFNumberRef value_number = CFNumberCreate (kCFAllocatorDefault, kCFNumberFloatType, &v);
CFDictionarySetValue (variations, tag_number, value_number);
CFRelease (tag_number);
CFRelease (value_number);
}
CFDictionaryRef attributes =
@ -648,7 +651,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan,
} else {
active_feature_t *feature = active_features.lsearch (event->feature);
if (feature)
active_features.remove (feature - active_features.arrayZ);
active_features.remove_ordered (feature - active_features.arrayZ);
}
}
}

View File

@ -80,6 +80,56 @@ hb_draw_close_path_nil (hb_draw_funcs_t *dfuncs HB_UNUSED, void *draw_data HB_UN
void *user_data HB_UNUSED) {}
static bool
_hb_draw_funcs_set_preamble (hb_draw_funcs_t *dfuncs,
bool func_is_null,
void **user_data,
hb_destroy_func_t *destroy)
{
if (hb_object_is_immutable (dfuncs))
{
if (*destroy)
(*destroy) (*user_data);
return false;
}
if (func_is_null)
{
if (*destroy)
(*destroy) (*user_data);
*destroy = nullptr;
*user_data = nullptr;
}
return true;
}
static bool
_hb_draw_funcs_set_middle (hb_draw_funcs_t *dfuncs,
void *user_data,
hb_destroy_func_t destroy)
{
if (user_data && !dfuncs->user_data)
{
dfuncs->user_data = (decltype (dfuncs->user_data)) hb_calloc (1, sizeof (*dfuncs->user_data));
if (unlikely (!dfuncs->user_data))
goto fail;
}
if (destroy && !dfuncs->destroy)
{
dfuncs->destroy = (decltype (dfuncs->destroy)) hb_calloc (1, sizeof (*dfuncs->destroy));
if (unlikely (!dfuncs->destroy))
goto fail;
}
return true;
fail:
if (destroy)
(destroy) (user_data);
return false;
}
#define HB_DRAW_FUNC_IMPLEMENT(name) \
\
void \
@ -88,43 +138,24 @@ hb_draw_funcs_set_##name##_func (hb_draw_funcs_t *dfuncs, \
void *user_data, \
hb_destroy_func_t destroy) \
{ \
if (hb_object_is_immutable (dfuncs)) \
return; \
if (!_hb_draw_funcs_set_preamble (dfuncs, !func, &user_data, &destroy))\
return; \
\
if (dfuncs->destroy && dfuncs->destroy->name) \
dfuncs->destroy->name (!dfuncs->user_data ? nullptr : dfuncs->user_data->name); \
\
if (user_data && !dfuncs->user_data) \
{ \
dfuncs->user_data = (decltype (dfuncs->user_data)) hb_calloc (1, sizeof (*dfuncs->user_data)); \
if (unlikely (!dfuncs->user_data)) \
goto fail; \
} \
if (destroy && !dfuncs->destroy) \
{ \
dfuncs->destroy = (decltype (dfuncs->destroy)) hb_calloc (1, sizeof (*dfuncs->destroy)); \
if (unlikely (!dfuncs->destroy)) \
goto fail; \
} \
if (!_hb_draw_funcs_set_middle (dfuncs, user_data, destroy)) \
return; \
\
if (func) { \
if (func) \
dfuncs->func.name = func; \
if (dfuncs->user_data) \
dfuncs->user_data->name = user_data; \
if (dfuncs->destroy) \
dfuncs->destroy->name = destroy; \
} else { \
else \
dfuncs->func.name = hb_draw_##name##_nil; \
if (dfuncs->user_data) \
dfuncs->user_data->name = nullptr; \
if (dfuncs->destroy) \
dfuncs->destroy->name = nullptr; \
} \
return; \
\
fail: \
if (destroy) \
destroy (user_data); \
\
if (dfuncs->user_data) \
dfuncs->user_data->name = user_data; \
if (dfuncs->destroy) \
dfuncs->destroy->name = destroy; \
}
HB_DRAW_FUNCS_IMPLEMENT_CALLBACKS

View File

@ -288,6 +288,7 @@ hb_face_destroy (hb_face_t *face)
{
if (!hb_object_destroy (face)) return;
#ifndef HB_NO_SHAPER
for (hb_face_t::plan_node_t *node = face->shape_plans; node; )
{
hb_face_t::plan_node_t *next = node->next;
@ -295,6 +296,7 @@ hb_face_destroy (hb_face_t *face)
hb_free (node);
node = next;
}
#endif
face->data.fini ();
face->table.fini ();
@ -636,7 +638,7 @@ hb_face_collect_variation_unicodes (hb_face_t *face,
struct face_table_info_t
{
hb_blob_t* data;
unsigned order;
signed order;
};
struct hb_face_builder_data_t
@ -784,16 +786,16 @@ hb_face_builder_create ()
hb_bool_t
hb_face_builder_add_table (hb_face_t *face, hb_tag_t tag, hb_blob_t *blob)
{
if (tag == HB_MAP_VALUE_INVALID)
if (unlikely (face->destroy != (hb_destroy_func_t) _hb_face_builder_data_destroy))
return false;
if (unlikely (face->destroy != (hb_destroy_func_t) _hb_face_builder_data_destroy))
if (tag == HB_MAP_VALUE_INVALID)
return false;
hb_face_builder_data_t *data = (hb_face_builder_data_t *) face->user_data;
hb_blob_t* previous = data->tables.get (tag).data;
if (!data->tables.set (tag, face_table_info_t {hb_blob_reference (blob), 0}))
if (!data->tables.set (tag, face_table_info_t {hb_blob_reference (blob), -1}))
{
hb_blob_destroy (blob);
return false;
@ -819,13 +821,16 @@ void
hb_face_builder_sort_tables (hb_face_t *face,
const hb_tag_t *tags)
{
if (unlikely (face->destroy != (hb_destroy_func_t) _hb_face_builder_data_destroy))
return;
hb_face_builder_data_t *data = (hb_face_builder_data_t *) face->user_data;
// Sort all unspecified tables after any specified tables.
for (auto& info : data->tables.values_ref())
info.order = -1;
info.order = (unsigned) -1;
unsigned order = 0;
signed order = 0;
for (const hb_tag_t* tag = tags;
*tag;
tag++)

View File

@ -65,7 +65,9 @@ struct hb_face_t
hb_shape_plan_t *shape_plan;
plan_node_t *next;
};
#ifndef HB_NO_SHAPER
hb_atomic_ptr_t<plan_node_t> shape_plans;
#endif
hb_blob_t *reference_table (hb_tag_t tag) const
{

View File

@ -75,16 +75,6 @@ _hb_fallback_shape (hb_shape_plan_t *shape_plan HB_UNUSED,
const hb_feature_t *features HB_UNUSED,
unsigned int num_features HB_UNUSED)
{
/* TODO
*
* - Apply fallback kern.
* - Handle Variation Selectors?
* - Apply normalization?
*
* This will make the fallback shaper into a dumb "TrueType"
* shaper which many people unfortunately still request.
*/
hb_codepoint_t space;
bool has_space = (bool) font->get_nominal_glyph (' ', &space);

View File

@ -71,7 +71,7 @@ hb_font_get_font_h_extents_nil (hb_font_t *font HB_UNUSED,
hb_font_extents_t *extents,
void *user_data HB_UNUSED)
{
memset (extents, 0, sizeof (*extents));
hb_memset (extents, 0, sizeof (*extents));
return false;
}
@ -96,7 +96,7 @@ hb_font_get_font_v_extents_nil (hb_font_t *font HB_UNUSED,
hb_font_extents_t *extents,
void *user_data HB_UNUSED)
{
memset (extents, 0, sizeof (*extents));
hb_memset (extents, 0, sizeof (*extents));
return false;
}
@ -409,7 +409,7 @@ hb_font_get_glyph_extents_nil (hb_font_t *font HB_UNUSED,
hb_glyph_extents_t *extents,
void *user_data HB_UNUSED)
{
memset (extents, 0, sizeof (*extents));
hb_memset (extents, 0, sizeof (*extents));
return false;
}
@ -518,6 +518,7 @@ typedef struct hb_font_get_glyph_shape_default_adaptor_t {
void *draw_data;
float x_scale;
float y_scale;
float slant;
} hb_font_get_glyph_shape_default_adaptor_t;
static void
@ -530,9 +531,10 @@ hb_draw_move_to_default (hb_draw_funcs_t *dfuncs HB_UNUSED,
hb_font_get_glyph_shape_default_adaptor_t *adaptor = (hb_font_get_glyph_shape_default_adaptor_t *) draw_data;
float x_scale = adaptor->x_scale;
float y_scale = adaptor->y_scale;
float slant = adaptor->slant;
adaptor->draw_funcs->emit_move_to (adaptor->draw_data, *st,
x_scale * to_x, y_scale * to_y);
x_scale * to_x + slant * to_y, y_scale * to_y);
}
static void
@ -544,12 +546,13 @@ hb_draw_line_to_default (hb_draw_funcs_t *dfuncs HB_UNUSED, void *draw_data,
hb_font_get_glyph_shape_default_adaptor_t *adaptor = (hb_font_get_glyph_shape_default_adaptor_t *) draw_data;
float x_scale = adaptor->x_scale;
float y_scale = adaptor->y_scale;
float slant = adaptor->slant;
st->current_x *= x_scale;
st->current_y *= y_scale;
st->current_x = st->current_x * x_scale + st->current_y * slant;
st->current_y = st->current_y * y_scale;
adaptor->draw_funcs->emit_line_to (adaptor->draw_data, *st,
x_scale * to_x, y_scale * to_y);
x_scale * to_x + slant * to_y, y_scale * to_y);
}
static void
@ -562,13 +565,14 @@ hb_draw_quadratic_to_default (hb_draw_funcs_t *dfuncs HB_UNUSED, void *draw_data
hb_font_get_glyph_shape_default_adaptor_t *adaptor = (hb_font_get_glyph_shape_default_adaptor_t *) draw_data;
float x_scale = adaptor->x_scale;
float y_scale = adaptor->y_scale;
float slant = adaptor->slant;
st->current_x *= x_scale;
st->current_y *= y_scale;
st->current_x = st->current_x * x_scale + st->current_y * slant;
st->current_y = st->current_y * y_scale;
adaptor->draw_funcs->emit_quadratic_to (adaptor->draw_data, *st,
x_scale * control_x, y_scale * control_y,
x_scale * to_x, y_scale * to_y);
x_scale * control_x + slant * control_y, y_scale * control_y,
x_scale * to_x + slant * to_y, y_scale * to_y);
}
static void
@ -582,14 +586,15 @@ hb_draw_cubic_to_default (hb_draw_funcs_t *dfuncs HB_UNUSED, void *draw_data,
hb_font_get_glyph_shape_default_adaptor_t *adaptor = (hb_font_get_glyph_shape_default_adaptor_t *) draw_data;
float x_scale = adaptor->x_scale;
float y_scale = adaptor->y_scale;
float slant = adaptor->slant;
st->current_x *= x_scale;
st->current_y *= y_scale;
st->current_x = st->current_x * x_scale + st->current_y * slant;
st->current_y = st->current_y * y_scale;
adaptor->draw_funcs->emit_cubic_to (adaptor->draw_data, *st,
x_scale * control1_x, y_scale * control1_y,
x_scale * control2_x, y_scale * control2_y,
x_scale * to_x, y_scale * to_y);
x_scale * control1_x + slant * control1_y, y_scale * control1_y,
x_scale * control2_x + slant * control2_y, y_scale * control2_y,
x_scale * to_x + slant * to_y, y_scale * to_y);
}
static void
@ -623,8 +628,10 @@ hb_font_get_glyph_shape_default (hb_font_t *font,
hb_font_get_glyph_shape_default_adaptor_t adaptor = {
draw_funcs,
draw_data,
(float) font->x_scale / (float) font->parent->x_scale,
(float) font->y_scale / (float) font->parent->y_scale
font->parent->x_scale ? (float) font->x_scale / (float) font->parent->x_scale : 0.f,
font->parent->y_scale ? (float) font->y_scale / (float) font->parent->y_scale : 0.f,
font->parent->y_scale ? (font->slant - font->parent->slant) *
(float) font->x_scale / (float) font->parent->y_scale : 0.f
};
font->parent->get_glyph_shape (glyph,
@ -822,6 +829,56 @@ hb_font_funcs_is_immutable (hb_font_funcs_t *ffuncs)
}
static bool
_hb_font_funcs_set_preamble (hb_font_funcs_t *ffuncs,
bool func_is_null,
void **user_data,
hb_destroy_func_t *destroy)
{
if (hb_object_is_immutable (ffuncs))
{
if (*destroy)
(*destroy) (*user_data);
return false;
}
if (func_is_null)
{
if (*destroy)
(*destroy) (*user_data);
*destroy = nullptr;
*user_data = nullptr;
}
return true;
}
static bool
_hb_font_funcs_set_middle (hb_font_funcs_t *ffuncs,
void *user_data,
hb_destroy_func_t destroy)
{
if (user_data && !ffuncs->user_data)
{
ffuncs->user_data = (decltype (ffuncs->user_data)) hb_calloc (1, sizeof (*ffuncs->user_data));
if (unlikely (!ffuncs->user_data))
goto fail;
}
if (destroy && !ffuncs->destroy)
{
ffuncs->destroy = (decltype (ffuncs->destroy)) hb_calloc (1, sizeof (*ffuncs->destroy));
if (unlikely (!ffuncs->destroy))
goto fail;
}
return true;
fail:
if (destroy)
(destroy) (user_data);
return false;
}
#define HB_FONT_FUNC_IMPLEMENT(name) \
\
void \
@ -830,51 +887,24 @@ hb_font_funcs_set_##name##_func (hb_font_funcs_t *ffuncs, \
void *user_data, \
hb_destroy_func_t destroy) \
{ \
if (hb_object_is_immutable (ffuncs)) \
goto fail; \
\
if (!func) \
{ \
if (destroy) \
destroy (user_data); \
destroy = nullptr; \
user_data = nullptr; \
} \
if (!_hb_font_funcs_set_preamble (ffuncs, !func, &user_data, &destroy))\
return; \
\
if (ffuncs->destroy && ffuncs->destroy->name) \
ffuncs->destroy->name (!ffuncs->user_data ? nullptr : ffuncs->user_data->name); \
\
if (user_data && !ffuncs->user_data) \
{ \
ffuncs->user_data = (decltype (ffuncs->user_data)) hb_calloc (1, sizeof (*ffuncs->user_data)); \
if (unlikely (!ffuncs->user_data)) \
goto fail; \
} \
if (destroy && !ffuncs->destroy) \
{ \
ffuncs->destroy = (decltype (ffuncs->destroy)) hb_calloc (1, sizeof (*ffuncs->destroy)); \
if (unlikely (!ffuncs->destroy)) \
goto fail; \
} \
if (!_hb_font_funcs_set_middle (ffuncs, user_data, destroy)) \
return; \
\
if (func) { \
if (func) \
ffuncs->get.f.name = func; \
if (ffuncs->user_data) \
ffuncs->user_data->name = user_data; \
if (ffuncs->destroy) \
ffuncs->destroy->name = destroy; \
} else { \
else \
ffuncs->get.f.name = hb_font_get_##name##_default; \
if (ffuncs->user_data) \
ffuncs->user_data->name = nullptr; \
if (ffuncs->destroy) \
ffuncs->destroy->name = nullptr; \
} \
return; \
\
fail: \
if (destroy) \
destroy (user_data); \
\
if (ffuncs->user_data) \
ffuncs->user_data->name = user_data; \
if (ffuncs->destroy) \
ffuncs->destroy->name = destroy; \
}
HB_FONT_FUNCS_IMPLEMENT_CALLBACKS
@ -1323,7 +1353,7 @@ hb_font_get_glyph_from_name (hb_font_t *font,
* @draw_data: User data to pass to draw callbacks
*
* Fetches the glyph shape that corresponds to a glyph in the specified @font.
* The shape is returned by way of calls to the callsbacks of the @dfuncs
* The shape is returned by way of calls to the callbacks of the @dfuncs
* objects, with @draw_data passed to them.
*
* Since: 4.0.0
@ -1780,8 +1810,8 @@ hb_font_create_sub_font (hb_font_t *parent)
float *design_coords = (float *) hb_calloc (num_coords, sizeof (parent->design_coords[0]));
if (likely (coords && design_coords))
{
memcpy (coords, parent->coords, num_coords * sizeof (parent->coords[0]));
memcpy (design_coords, parent->design_coords, num_coords * sizeof (parent->design_coords[0]));
hb_memcpy (coords, parent->coords, num_coords * sizeof (parent->coords[0]));
hb_memcpy (design_coords, parent->design_coords, num_coords * sizeof (parent->design_coords[0]));
_hb_font_adopt_var_coords (font, coords, design_coords, num_coords);
}
else
@ -2443,7 +2473,7 @@ hb_font_set_var_coords_design (hb_font_t *font,
}
if (coords_length)
memcpy (design_coords, coords, coords_length * sizeof (font->design_coords[0]));
hb_memcpy (design_coords, coords, coords_length * sizeof (font->design_coords[0]));
hb_ot_var_normalize_coords (font->face, coords_length, coords, normalized);
_hb_font_adopt_var_coords (font, normalized, design_coords, coords_length);
@ -2519,8 +2549,8 @@ hb_font_set_var_coords_normalized (hb_font_t *font,
if (coords_length)
{
memcpy (copy, coords, coords_length * sizeof (coords[0]));
memcpy (unmapped, coords, coords_length * sizeof (coords[0]));
hb_memcpy (copy, coords, coords_length * sizeof (coords[0]));
hb_memcpy (unmapped, coords, coords_length * sizeof (coords[0]));
}
/* Best effort design coords simulation */

View File

@ -206,14 +206,14 @@ struct hb_font_t
hb_bool_t get_font_h_extents (hb_font_extents_t *extents)
{
memset (extents, 0, sizeof (*extents));
hb_memset (extents, 0, sizeof (*extents));
return klass->get.f.font_h_extents (this, user_data,
extents,
!klass->user_data ? nullptr : klass->user_data->font_h_extents);
}
hb_bool_t get_font_v_extents (hb_font_extents_t *extents)
{
memset (extents, 0, sizeof (*extents));
hb_memset (extents, 0, sizeof (*extents));
return klass->get.f.font_v_extents (this, user_data,
extents,
!klass->user_data ? nullptr : klass->user_data->font_v_extents);
@ -342,7 +342,7 @@ struct hb_font_t
hb_bool_t get_glyph_extents (hb_codepoint_t glyph,
hb_glyph_extents_t *extents)
{
memset (extents, 0, sizeof (*extents));
hb_memset (extents, 0, sizeof (*extents));
return klass->get.f.glyph_extents (this, user_data,
glyph,
extents,

View File

@ -89,7 +89,7 @@ struct hb_ft_font_t
bool unref; /* Whether to destroy ft_face when done. */
bool transform; /* Whether to apply FT_Face's transform. */
mutable hb_mutex_t lock;
mutable hb_mutex_t lock; /* Protects members below. */
FT_Face ft_face;
mutable unsigned cached_serial;
mutable hb_ft_advance_cache_t advance_cache;
@ -732,16 +732,18 @@ hb_ft_get_font_h_extents (hb_font_t *font HB_UNUSED,
static int
_hb_ft_move_to (const FT_Vector *to,
hb_draw_session_t *drawing)
void *arg)
{
hb_draw_session_t *drawing = (hb_draw_session_t *) arg;
drawing->move_to (to->x, to->y);
return FT_Err_Ok;
}
static int
_hb_ft_line_to (const FT_Vector *to,
hb_draw_session_t *drawing)
void *arg)
{
hb_draw_session_t *drawing = (hb_draw_session_t *) arg;
drawing->line_to (to->x, to->y);
return FT_Err_Ok;
}
@ -749,8 +751,9 @@ _hb_ft_line_to (const FT_Vector *to,
static int
_hb_ft_conic_to (const FT_Vector *control,
const FT_Vector *to,
hb_draw_session_t *drawing)
void *arg)
{
hb_draw_session_t *drawing = (hb_draw_session_t *) arg;
drawing->quadratic_to (control->x, control->y,
to->x, to->y);
return FT_Err_Ok;
@ -760,8 +763,9 @@ static int
_hb_ft_cubic_to (const FT_Vector *control1,
const FT_Vector *control2,
const FT_Vector *to,
hb_draw_session_t *drawing)
void *arg)
{
hb_draw_session_t *drawing = (hb_draw_session_t *) arg;
drawing->cubic_to (control1->x, control1->y,
control2->x, control2->y,
to->x, to->y);
@ -787,10 +791,10 @@ hb_ft_get_glyph_shape (hb_font_t *font HB_UNUSED,
return;
const FT_Outline_Funcs outline_funcs = {
(FT_Outline_MoveToFunc) _hb_ft_move_to,
(FT_Outline_LineToFunc) _hb_ft_line_to,
(FT_Outline_ConicToFunc) _hb_ft_conic_to,
(FT_Outline_CubicToFunc) _hb_ft_cubic_to,
_hb_ft_move_to,
_hb_ft_line_to,
_hb_ft_conic_to,
_hb_ft_cubic_to,
0, /* shift */
0, /* delta */
};
@ -975,8 +979,9 @@ hb_ft_face_create_referenced (FT_Face ft_face)
}
static void
hb_ft_face_finalize (FT_Face ft_face)
hb_ft_face_finalize (void *arg)
{
FT_Face ft_face = (FT_Face) arg;
hb_face_destroy ((hb_face_t *) ft_face->generic.data);
}
@ -1008,7 +1013,7 @@ hb_ft_face_create_cached (FT_Face ft_face)
ft_face->generic.finalizer (ft_face);
ft_face->generic.data = hb_ft_face_create (ft_face, nullptr);
ft_face->generic.finalizer = (FT_Generic_Finalizer) hb_ft_face_finalize;
ft_face->generic.finalizer = hb_ft_face_finalize;
}
return hb_face_reference ((hb_face_t *) ft_face->generic.data);
@ -1217,8 +1222,9 @@ get_ft_library ()
}
static void
_release_blob (FT_Face ft_face)
_release_blob (void *arg)
{
FT_Face ft_face = (FT_Face) arg;
hb_blob_destroy ((hb_blob_t *) ft_face->generic.data);
}
@ -1271,7 +1277,7 @@ hb_ft_font_set_funcs (hb_font_t *font)
ft_face->generic.data = blob;
ft_face->generic.finalizer = (FT_Generic_Finalizer) _release_blob;
ft_face->generic.finalizer = _release_blob;
_hb_ft_font_set_funcs (font, ft_face, true);
hb_ft_font_set_load_flags (font, FT_LOAD_DEFAULT | FT_LOAD_NO_HINTING);

View File

@ -129,32 +129,9 @@ hb_glib_unicode_compose (hb_unicode_funcs_t *ufuncs HB_UNUSED,
{
#if GLIB_CHECK_VERSION(2,29,12)
return g_unichar_compose (a, b, ab);
#else
return false;
#endif
/* We don't ifdef-out the fallback code such that compiler always
* sees it and makes sure it's compilable. */
gchar utf8[12];
gchar *normalized;
int len;
hb_bool_t ret;
len = g_unichar_to_utf8 (a, utf8);
len += g_unichar_to_utf8 (b, utf8 + len);
normalized = g_utf8_normalize (utf8, len, G_NORMALIZE_NFC);
len = g_utf8_strlen (normalized, -1);
if (unlikely (!len))
return false;
if (len == 1) {
*ab = g_utf8_get_char (normalized);
ret = true;
} else {
ret = false;
}
g_free (normalized);
return ret;
}
static hb_bool_t
@ -166,55 +143,9 @@ hb_glib_unicode_decompose (hb_unicode_funcs_t *ufuncs HB_UNUSED,
{
#if GLIB_CHECK_VERSION(2,29,12)
return g_unichar_decompose (ab, a, b);
#else
return false;
#endif
/* We don't ifdef-out the fallback code such that compiler always
* sees it and makes sure it's compilable. */
gchar utf8[6];
gchar *normalized;
int len;
hb_bool_t ret;
len = g_unichar_to_utf8 (ab, utf8);
normalized = g_utf8_normalize (utf8, len, G_NORMALIZE_NFD);
len = g_utf8_strlen (normalized, -1);
if (unlikely (!len))
return false;
if (len == 1) {
*a = g_utf8_get_char (normalized);
*b = 0;
ret = *a != ab;
} else if (len == 2) {
*a = g_utf8_get_char (normalized);
*b = g_utf8_get_char (g_utf8_next_char (normalized));
/* Here's the ugly part: if ab decomposes to a single character and
* that character decomposes again, we have to detect that and undo
* the second part :-(. */
gchar *recomposed = g_utf8_normalize (normalized, -1, G_NORMALIZE_NFC);
hb_codepoint_t c = g_utf8_get_char (recomposed);
if (c != ab && c != *a) {
*a = c;
*b = 0;
}
g_free (recomposed);
ret = true;
} else {
/* If decomposed to more than two characters, take the last one,
* and recompose the rest to get the first component. */
gchar *end = g_utf8_offset_to_pointer (normalized, len - 1);
gchar *recomposed;
*b = g_utf8_get_char (end);
recomposed = g_utf8_normalize (normalized, end - normalized, G_NORMALIZE_NFC);
/* We expect that recomposed has exactly one character now. */
*a = g_utf8_get_char (recomposed);
g_free (recomposed);
ret = true;
}
g_free (normalized);
return ret;
}

View File

@ -318,7 +318,7 @@ _hb_graphite2_shape (hb_shape_plan_t *shape_plan HB_UNUSED,
#undef ALLOCATE_ARRAY
memset (clusters, 0, sizeof (clusters[0]) * buffer->len);
hb_memset (clusters, 0, sizeof (clusters[0]) * buffer->len);
hb_codepoint_t *pg = gids;
clusters[0].cluster = buffer->info[0].cluster;

View File

@ -73,8 +73,10 @@ struct hb_iter_t
/* Operators. */
iter_t iter () const { return *thiz(); }
iter_t operator + () const { return *thiz(); }
iter_t begin () const { return *thiz(); }
iter_t end () const { return thiz()->__end__ (); }
iter_t _begin () const { return *thiz(); }
iter_t begin () const { return _begin (); }
iter_t _end () const { return thiz()->__end__ (); }
iter_t end () const { return _end (); }
explicit operator bool () const { return thiz()->__more__ (); }
unsigned len () const { return thiz()->__len__ (); }
/* The following can only be enabled if item_t is reference type. Otherwise
@ -118,7 +120,9 @@ struct hb_iter_t
#define HB_ITER_USING(Name) \
using item_t = typename Name::item_t; \
using Name::_begin; \
using Name::begin; \
using Name::_end; \
using Name::end; \
using Name::get_item_size; \
using Name::is_iterator; \
@ -377,7 +381,7 @@ struct hb_map_iter_t :
void __forward__ (unsigned n) { it += n; }
void __prev__ () { --it; }
void __rewind__ (unsigned n) { it -= n; }
hb_map_iter_t __end__ () const { return hb_map_iter_t (it.end (), f); }
hb_map_iter_t __end__ () const { return hb_map_iter_t (it._end (), f); }
bool operator != (const hb_map_iter_t& o) const
{ return it != o.it; }
@ -440,7 +444,7 @@ struct hb_filter_iter_t :
bool __more__ () const { return bool (it); }
void __next__ () { do ++it; while (it && !hb_has (p.get (), hb_get (f.get (), *it))); }
void __prev__ () { do --it; while (it && !hb_has (p.get (), hb_get (f.get (), *it))); }
hb_filter_iter_t __end__ () const { return hb_filter_iter_t (it.end (), p, f); }
hb_filter_iter_t __end__ () const { return hb_filter_iter_t (it._end (), p, f); }
bool operator != (const hb_filter_iter_t& o) const
{ return it != o.it; }
@ -553,7 +557,7 @@ struct hb_zip_iter_t :
void __forward__ (unsigned n) { a += n; b += n; }
void __prev__ () { --a; --b; }
void __rewind__ (unsigned n) { a -= n; b -= n; }
hb_zip_iter_t __end__ () const { return hb_zip_iter_t (a.end (), b.end ()); }
hb_zip_iter_t __end__ () const { return hb_zip_iter_t (a._end (), b._end ()); }
/* Note, we should stop if ANY of the iters reaches end. As such two compare
* unequal if both items are unequal, NOT if either is unequal. */
bool operator != (const hb_zip_iter_t& o) const
@ -637,7 +641,7 @@ struct hb_concat_iter_t :
}
}
hb_concat_iter_t __end__ () const { return hb_concat_iter_t (a.end (), b.end ()); }
hb_concat_iter_t __end__ () const { return hb_concat_iter_t (a._end (), b._end ()); }
bool operator != (const hb_concat_iter_t& o) const
{
return a != o.a

View File

@ -136,6 +136,13 @@ static inline Type& StructAfter(TObject &X)
/*
* Lazy loaders.
*
* The lazy-loaders are thread-safe pointer-like objects that create their
* instead on-demand. They also support access to a "data" object that is
* necessary for creating their instance. The data object, if specified,
* is accessed via pointer math, located at a location before the position
* of the loader itself. This avoids having to store a pointer to data
* for every lazy-loader. Multiple lazy-loaders can access the same data.
*/
template <typename Data, unsigned int WheresData>
@ -228,7 +235,8 @@ struct hb_lazy_loader_t : hb_data_wrapper_t<Data, WheresData>
bool cmpexch (Stored *current, Stored *value) const
{
/* This *must* be called when there are no other threads accessing. */
/* This function can only be safely called directly if no
* other thread is accessing. */
return this->instance.cmpexch (current, value);
}
@ -261,7 +269,7 @@ struct hb_lazy_loader_t : hb_data_wrapper_t<Data, WheresData>
hb_free (p);
}
// private:
private:
/* Must only have one pointer. */
hb_atomic_ptr_t<Stored *> instance;
};
@ -283,7 +291,7 @@ struct hb_table_lazy_loader_t : hb_lazy_loader_t<T,
{
auto c = hb_sanitize_context_t ();
if (core)
c.set_num_glyphs (0); // So we don't recurse ad infinitum...
c.set_num_glyphs (0); // So we don't recurse ad infinitum, or doesn't need num_glyphs
return c.reference_table<T> (face);
}
static void destroy (hb_blob_t *p) { hb_blob_destroy (p); }

View File

@ -43,9 +43,9 @@ struct hb_hashmap_t
hb_hashmap_t () { init (); }
~hb_hashmap_t () { fini (); }
hb_hashmap_t (const hb_hashmap_t& o) : hb_hashmap_t () { resize (population); hb_copy (o, *this); }
hb_hashmap_t (const hb_hashmap_t& o) : hb_hashmap_t () { resize (o.population); hb_copy (o, *this); }
hb_hashmap_t (hb_hashmap_t&& o) : hb_hashmap_t () { hb_swap (*this, o); }
hb_hashmap_t& operator= (const hb_hashmap_t& o) { resize (population); hb_copy (o, *this); return *this; }
hb_hashmap_t& operator= (const hb_hashmap_t& o) { reset (); resize (o.population); hb_copy (o, *this); return *this; }
hb_hashmap_t& operator= (hb_hashmap_t&& o) { hb_swap (*this, o); return *this; }
hb_hashmap_t (std::initializer_list<hb_pair_t<K, V>> lst) : hb_hashmap_t ()
@ -71,6 +71,11 @@ struct hb_hashmap_t
uint32_t is_tombstone_ : 1;
V value;
item_t () : key (),
hash (0),
is_used_ (false), is_tombstone_ (false),
value () {}
bool is_used () const { return is_used_; }
void set_used (bool is_used) { is_used_ = is_used; }
bool is_tombstone () const { return is_tombstone_; }
@ -88,17 +93,8 @@ struct hb_hashmap_t
return minus_1;
};
void clear ()
{
new (std::addressof (key)) K ();
new (std::addressof (value)) V ();
hash = 0;
is_used_ = false;
is_tombstone_ = false;
}
bool operator == (const K &o) { return hb_deref (key) == hb_deref (o); }
bool operator == (const item_t &o) { return *this == o.key; }
bool operator == (const K &o) const { return hb_deref (key) == hb_deref (o); }
bool operator == (const item_t &o) const { return *this == o.key; }
hb_pair_t<K, V> get_pair() const { return hb_pair_t<K, V> (key, value); }
hb_pair_t<const K &, const V &> get_pair_ref() const { return hb_pair_t<const K &, const V &> (key, value); }
@ -107,8 +103,8 @@ struct hb_hashmap_t
};
hb_object_header_t header;
bool successful; /* Allocations successful */
unsigned int population; /* Not including tombstones. */
unsigned int successful : 1; /* Allocations successful */
unsigned int population : 31; /* Not including tombstones. */
unsigned int occupancy; /* Including tombstones. */
unsigned int mask;
unsigned int prime;
@ -118,7 +114,10 @@ struct hb_hashmap_t
{
if (unlikely (!a.successful || !b.successful))
return;
hb_swap (a.population, b.population);
unsigned tmp = a.population;
a.population = b.population;
b.population = tmp;
//hb_swap (a.population, b.population);
hb_swap (a.occupancy, b.occupancy);
hb_swap (a.mask, b.mask);
hb_swap (a.prime, b.prime);
@ -160,7 +159,9 @@ struct hb_hashmap_t
{
if (unlikely (!successful)) return false;
unsigned int power = hb_bit_storage (hb_max (population, new_population) * 2 + 8);
if (new_population != 0 && (new_population + new_population / 2) < mask) return true;
unsigned int power = hb_bit_storage (hb_max ((unsigned) population, new_population) * 2 + 8);
unsigned int new_size = 1u << power;
item_t *new_items = (item_t *) hb_malloc ((size_t) new_size * sizeof (item_t));
if (unlikely (!new_items))
@ -169,9 +170,9 @@ struct hb_hashmap_t
return false;
}
for (auto &_ : hb_iter (new_items, new_size))
_.clear ();
new (&_) item_t ();
unsigned int old_size = mask + 1;
unsigned int old_size = size ();
item_t *old_items = items;
/* Switch to new, empty, array. */
@ -181,47 +182,82 @@ struct hb_hashmap_t
items = new_items;
/* Insert back old items. */
if (old_items)
for (unsigned int i = 0; i < old_size; i++)
for (unsigned int i = 0; i < old_size; i++)
{
if (old_items[i].is_real ())
{
if (old_items[i].is_real ())
{
set_with_hash (old_items[i].key,
old_items[i].hash,
std::move (old_items[i].value));
}
old_items[i].~item_t ();
set_with_hash (std::move (old_items[i].key),
old_items[i].hash,
std::move (old_items[i].value));
}
old_items[i].~item_t ();
}
hb_free (old_items);
return true;
}
template <typename VV>
bool set (K key, VV&& value) { return set_with_hash (key, hb_hash (key), std::forward<VV> (value)); }
const V& get (K key) const
template <typename KK, typename VV>
bool set_with_hash (KK&& key, uint32_t hash, VV&& value, bool is_delete=false)
{
if (unlikely (!items)) return item_t::default_value ();
unsigned int i = bucket_for (key);
return items[i].is_real () && items[i] == key ? items[i].value : item_t::default_value ();
if (unlikely (!successful)) return false;
if (unlikely ((occupancy + occupancy / 2) >= mask && !resize ())) return false;
item_t &item = item_for_hash (key, hash);
if (is_delete && !(item == key))
return true; /* Trying to delete non-existent key. */
if (item.is_used ())
{
occupancy--;
if (!item.is_tombstone ())
population--;
}
item.key = std::forward<KK> (key);
item.value = std::forward<VV> (value);
item.hash = hash;
item.set_used (true);
item.set_tombstone (is_delete);
occupancy++;
if (!is_delete)
population++;
return true;
}
void del (K key) { set_with_hash (key, hb_hash (key), item_t::default_value (), true); }
template <typename VV>
bool set (const K &key, VV&& value) { return set_with_hash (key, hb_hash (key), std::forward<VV> (value)); }
template <typename VV>
bool set (K &&key, VV&& value) { return set_with_hash (std::move (key), hb_hash (key), std::forward<VV> (value)); }
const V& get_with_hash (const K &key, uint32_t hash) const
{
if (unlikely (!items)) return item_t::default_value ();
auto &item = item_for_hash (key, hash);
return item.is_real () && item == key ? item.value : item_t::default_value ();
}
const V& get (const K &key) const
{
if (unlikely (!items)) return item_t::default_value ();
return get_with_hash (key, hb_hash (key));
}
void del (const K &key) { set_with_hash (key, hb_hash (key), item_t::default_value (), true); }
/* Has interface. */
typedef const V& value_t;
value_t operator [] (K k) const { return get (k); }
const V& operator [] (K k) const { return get (k); }
template <typename VV=V>
bool has (K key, VV **vp = nullptr) const
{
if (unlikely (!items))
return false;
unsigned int i = bucket_for (key);
if (items[i].is_real () && items[i] == key)
auto &item = item_for_hash (key, hb_hash (key));
if (item.is_real () && item == key)
{
if (vp) *vp = &items[i].value;
if (vp) *vp = std::addressof (item.value);
return true;
}
else
@ -230,13 +266,18 @@ struct hb_hashmap_t
/* Projection. */
V operator () (K k) const { return get (k); }
unsigned size () const { return mask ? mask + 1 : 0; }
void clear ()
{
if (unlikely (!successful)) return;
if (items)
for (auto &_ : hb_iter (items, mask + 1))
_.clear ();
for (auto &_ : hb_iter (items, size ()))
{
/* Reconstruct items. */
_.~item_t ();
new (&_) item_t ();
}
population = occupancy = 0;
}
@ -246,11 +287,10 @@ struct hb_hashmap_t
uint32_t hash () const
{
uint32_t h = 0;
for (const auto &item : + hb_array (items, mask ? mask + 1 : 0)
| hb_filter (&item_t::is_real))
h ^= item.total_hash ();
return h;
return
+ iter_items ()
| hb_reduce ([] (uint32_t h, const item_t &_) { return h ^ _.total_hash (); }, (uint32_t) 0u)
;
}
bool is_equal (const hb_hashmap_t &other) const
@ -258,7 +298,7 @@ struct hb_hashmap_t
if (population != other.population) return false;
for (auto pair : iter ())
if (get (pair.first) != pair.second)
if (other.get (pair.first) != pair.second)
return false;
return true;
@ -271,87 +311,54 @@ struct hb_hashmap_t
/*
* Iterator
*/
auto iter () const HB_AUTO_RETURN
auto iter_items () const HB_AUTO_RETURN
(
+ hb_array (items, mask ? mask + 1 : 0)
+ hb_iter (items, size ())
| hb_filter (&item_t::is_real)
| hb_map (&item_t::get_pair)
)
auto iter_ref () const HB_AUTO_RETURN
(
+ hb_array (items, mask ? mask + 1 : 0)
| hb_filter (&item_t::is_real)
+ iter_items ()
| hb_map (&item_t::get_pair_ref)
)
auto keys () const HB_AUTO_RETURN
auto iter () const HB_AUTO_RETURN
(
+ hb_array (items, mask ? mask + 1 : 0)
| hb_filter (&item_t::is_real)
| hb_map (&item_t::key)
| hb_map (hb_ridentity)
+ iter_items ()
| hb_map (&item_t::get_pair)
)
auto keys_ref () const HB_AUTO_RETURN
(
+ hb_array (items, mask ? mask + 1 : 0)
| hb_filter (&item_t::is_real)
+ iter_items ()
| hb_map (&item_t::key)
)
auto values () const HB_AUTO_RETURN
auto keys () const HB_AUTO_RETURN
(
+ hb_array (items, mask ? mask + 1 : 0)
| hb_filter (&item_t::is_real)
| hb_map (&item_t::value)
+ keys_ref ()
| hb_map (hb_ridentity)
)
auto values_ref () const HB_AUTO_RETURN
(
+ hb_array (items, mask ? mask + 1 : 0)
| hb_filter (&item_t::is_real)
+ iter_items ()
| hb_map (&item_t::value)
)
auto values () const HB_AUTO_RETURN
(
+ values_ref ()
| hb_map (hb_ridentity)
)
/* Sink interface. */
hb_hashmap_t& operator << (const hb_pair_t<K, V>& v)
{ set (v.first, v.second); return *this; }
hb_hashmap_t& operator << (const hb_pair_t<K, V&&>& v)
{ set (v.first, std::move (v.second)); return *this; }
hb_hashmap_t& operator << (const hb_pair_t<K&&, V>& v)
{ set (std::move (v.first), v.second); return *this; }
hb_hashmap_t& operator << (const hb_pair_t<K&&, V&&>& v)
{ set (std::move (v.first), std::move (v.second)); return *this; }
protected:
template <typename VV>
bool set_with_hash (K key, uint32_t hash, VV&& value, bool is_delete=false)
{
if (unlikely (!successful)) return false;
if (unlikely ((occupancy + occupancy / 2) >= mask && !resize ())) return false;
unsigned int i = bucket_for_hash (key, hash);
if (is_delete && items[i].key != key)
return true; /* Trying to delete non-existent key. */
if (items[i].is_used ())
{
occupancy--;
if (!items[i].is_tombstone ())
population--;
}
items[i].key = key;
items[i].value = std::forward<VV> (value);
items[i].hash = hash;
items[i].set_used (true);
items[i].set_tombstone (is_delete);
occupancy++;
if (!is_delete)
population++;
return true;
}
unsigned int bucket_for (const K &key) const
{
return bucket_for_hash (key, hb_hash (key));
}
unsigned int bucket_for_hash (const K &key, uint32_t hash) const
item_t& item_for_hash (const K &key, uint32_t hash) const
{
hash &= 0x3FFFFFFF; // We only store lower 30bit of hash
unsigned int i = hash % prime;
@ -360,12 +367,12 @@ struct hb_hashmap_t
while (items[i].is_used ())
{
if (items[i].hash == hash && items[i] == key)
return i;
return items[i];
if (tombstone == (unsigned) -1 && items[i].is_tombstone ())
tombstone = i;
i = (i + ++step) & mask;
}
return tombstone == (unsigned) -1 ? i : tombstone;
return items[tombstone == (unsigned) -1 ? i : tombstone];
}
static unsigned int prime_for (unsigned int shift)

View File

@ -112,8 +112,7 @@ template <typename T> auto _hb_try_add_pointer (hb_priority<1>) -> hb_type_ident
template <typename T> using hb_add_pointer = decltype (_hb_try_add_pointer<T> (hb_prioritize));
/* TODO Add feature-parity to std::decay. */
template <typename T> using hb_decay = hb_remove_const<hb_remove_reference<T>>;
template <typename T> using hb_decay = typename std::decay<T>::type;
#define hb_is_convertible(From,To) std::is_convertible<From, To>::value

View File

@ -166,7 +166,7 @@ hb_ms_setup_features (const hb_feature_t *features,
{
auto *feature = active_features.lsearch (event->feature);
if (feature)
active_features.remove (feature - active_features.arrayZ);
active_features.remove_ordered (feature - active_features.arrayZ);
}
}

92
thirdparty/harfbuzz/src/hb-multimap.hh vendored Normal file
View File

@ -0,0 +1,92 @@
/*
* Copyright © 2022 Behdad Esfahbod
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*/
#ifndef HB_MULTIMAP_HH
#define HB_MULTIMAP_HH
#include "hb.hh"
#include "hb-map.hh"
#include "hb-vector.hh"
/*
* hb_multimap_t
*/
struct hb_multimap_t
{
void add (hb_codepoint_t k, hb_codepoint_t v)
{
hb_codepoint_t *i;
if (multiples_indices.has (k, &i))
{
multiples_values[*i].push (v);
return;
}
hb_codepoint_t *old_v;
if (singulars.has (k, &old_v))
{
hb_codepoint_t old = *old_v;
singulars.del (k);
multiples_indices.set (k, multiples_values.length);
auto *vec = multiples_values.push ();
vec->push (old);
vec->push (v);
return;
}
singulars.set (k, v);
}
hb_array_t<const hb_codepoint_t> get (hb_codepoint_t k) const
{
hb_codepoint_t *v;
if (singulars.has (k, &v))
return hb_array (v, 1);
hb_codepoint_t *i;
if (multiples_indices.has (k, &i))
return multiples_values[*i].as_array ();
return hb_array_t<hb_codepoint_t> ();
}
bool in_error () const
{
return singulars.in_error () || multiples_indices.in_error () || multiples_values.in_error ();
}
protected:
hb_map_t singulars;
hb_map_t multiples_indices;
hb_vector_t<hb_vector_t<hb_codepoint_t>> multiples_values;
};
#endif /* HB_MULTIMAP_HH */

View File

@ -108,10 +108,11 @@ struct hb_mutex_t
struct hb_lock_t
{
hb_lock_t (hb_mutex_t &mutex_) : mutex (mutex_) { mutex.lock (); }
~hb_lock_t () { mutex.unlock (); }
hb_lock_t (hb_mutex_t &mutex_) : mutex (&mutex_) { mutex->lock (); }
hb_lock_t (hb_mutex_t *mutex_) : mutex (mutex_) { if (mutex) mutex->lock (); }
~hb_lock_t () { if (mutex) mutex->unlock (); }
private:
hb_mutex_t &mutex;
hb_mutex_t *mutex;
};

View File

@ -31,7 +31,7 @@
#include "hb.hh"
#line 35 "hb-number-parser.hh"
#line 32 "hb-number-parser.hh"
static const unsigned char _double_parser_trans_keys[] = {
0u, 0u, 43u, 57u, 46u, 57u, 48u, 57u, 43u, 57u, 48u, 57u, 48u, 101u, 48u, 57u,
46u, 101u, 0
@ -135,12 +135,12 @@ strtod_rl (const char *p, const char **end_ptr /* IN/OUT */)
int cs;
#line 139 "hb-number-parser.hh"
#line 132 "hb-number-parser.hh"
{
cs = double_parser_start;
}
#line 144 "hb-number-parser.hh"
#line 135 "hb-number-parser.hh"
{
int _slen;
int _trans;
@ -198,7 +198,7 @@ _resume:
exp_overflow = true;
}
break;
#line 202 "hb-number-parser.hh"
#line 187 "hb-number-parser.hh"
}
_again:

View File

@ -80,7 +80,7 @@ struct hb_lockable_set_t
if (item)
{
item_t old = *item;
*item = items[items.length - 1];
*item = std::move (items.tail ());
items.pop ();
l.unlock ();
old.fini ();
@ -123,7 +123,7 @@ struct hb_lockable_set_t
l.lock ();
while (items.length)
{
item_t old = items[items.length - 1];
item_t old = items.tail ();
items.pop ();
l.unlock ();
old.fini ();

View File

@ -90,7 +90,7 @@ typedef struct OpenTypeOffsetTable
{
if (table_count)
{
+ tables.sub_array (start_offset, table_count)
+ tables.as_array ().sub_array (start_offset, table_count)
| hb_map (&TableRecord::tag)
| hb_sink (hb_array (table_tags, *table_count))
;
@ -158,7 +158,7 @@ typedef struct OpenTypeOffsetTable
return_trace (false);
if (likely (len))
memcpy (start, blob->data, len);
hb_memcpy (start, blob->data, len);
/* 4-byte alignment. */
c->align (4);

View File

@ -148,7 +148,7 @@ struct HBFixed : Type
static_assert (Type::static_size * 8 > fraction_bits, "");
HBFixed& operator = (typename Type::type i ) { Type::operator= (i); return *this; }
float to_float () const { return ((int32_t) Type::v) / shift; }
float to_float (float offset = 0) const { return ((int32_t) Type::v + offset) / shift; }
void set_float (float f) { Type::v = roundf (f * shift); }
public:
DEFINE_SIZE_STATIC (Type::static_size);
@ -157,6 +157,9 @@ struct HBFixed : Type
/* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
using F2DOT14 = HBFixed<HBINT16, 14>;
/* 16-bit signed fixed number with the low 12 bits of fraction (4.12). */
using F4DOT12 = HBFixed<HBINT16, 12>;
/* 32-bit signed fixed-point number (16.16). */
using F16DOT16 = HBFixed<HBINT32, 16>;
@ -209,6 +212,12 @@ typedef Index NameID;
struct VarIdx : HBUINT32 {
static constexpr unsigned NO_VARIATION = 0xFFFFFFFFu;
static_assert (NO_VARIATION == HB_OT_LAYOUT_NO_VARIATIONS_INDEX, "");
static uint32_t add (uint32_t i, unsigned short v)
{
if (i == NO_VARIATION) return i;
return i + v;
}
VarIdx& operator = (uint32_t i) { HBUINT32::operator= (i); return *this; }
};
DECLARE_NULL_NAMESPACE_BYTES (OT, VarIdx);
@ -493,10 +502,10 @@ struct UnsizedArrayOf
void qsort (unsigned int len, unsigned int start = 0, unsigned int end = (unsigned int) -1)
{ as_array (len).qsort (start, end); }
bool serialize (hb_serialize_context_t *c, unsigned int items_len)
bool serialize (hb_serialize_context_t *c, unsigned int items_len, bool clear = true)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend (this, items_len))) return_trace (false);
if (unlikely (!c->extend_size (this, get_size (items_len), clear))) return_trace (false);
return_trace (true);
}
template <typename Iterator,
@ -504,8 +513,8 @@ struct UnsizedArrayOf
bool serialize (hb_serialize_context_t *c, Iterator items)
{
TRACE_SERIALIZE (this);
unsigned count = items.len ();
if (unlikely (!serialize (c, count))) return_trace (false);
unsigned count = hb_len (items);
if (unlikely (!serialize (c, count, false))) return_trace (false);
/* TODO Umm. Just exhaust the iterator instead? Being extra
* cautious right now.. */
for (unsigned i = 0; i < count; i++, ++items)
@ -646,14 +655,9 @@ struct ArrayOf
operator iter_t () const { return iter (); }
operator writer_t () { return writer (); }
hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int count) const
{ return as_array ().sub_array (start_offset, count); }
hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const
{ return as_array ().sub_array (start_offset, count); }
hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int count)
{ return as_array ().sub_array (start_offset, count); }
hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */)
{ return as_array ().sub_array (start_offset, count); }
/* Faster range-based for loop. */
const Type *begin () const { return arrayZ; }
const Type *end () const { return arrayZ + len; }
template <typename T>
Type &lsearch (const T &x, Type &not_found = Crap (Type))
@ -667,15 +671,15 @@ struct ArrayOf
unsigned int to_store = (unsigned int) -1) const
{ return as_array ().lfind (x, i, not_found, to_store); }
void qsort (unsigned int start = 0, unsigned int end = (unsigned int) -1)
{ as_array ().qsort (start, end); }
void qsort ()
{ as_array ().qsort (); }
HB_NODISCARD bool serialize (hb_serialize_context_t *c, unsigned items_len)
HB_NODISCARD bool serialize (hb_serialize_context_t *c, unsigned items_len, bool clear = true)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (this))) return_trace (false);
c->check_assign (len, items_len, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW);
if (unlikely (!c->extend (this))) return_trace (false);
if (unlikely (!c->extend_size (this, get_size (), clear))) return_trace (false);
return_trace (true);
}
template <typename Iterator,
@ -683,8 +687,8 @@ struct ArrayOf
HB_NODISCARD bool serialize (hb_serialize_context_t *c, Iterator items)
{
TRACE_SERIALIZE (this);
unsigned count = items.len ();
if (unlikely (!serialize (c, count))) return_trace (false);
unsigned count = hb_len (items);
if (unlikely (!serialize (c, count, false))) return_trace (false);
/* TODO Umm. Just exhaust the iterator instead? Being extra
* cautious right now.. */
for (unsigned i = 0; i < count; i++, ++items)
@ -828,21 +832,25 @@ struct HeadlessArrayOf
operator iter_t () const { return iter (); }
operator writer_t () { return writer (); }
bool serialize (hb_serialize_context_t *c, unsigned int items_len)
/* Faster range-based for loop. */
const Type *begin () const { return arrayZ; }
const Type *end () const { return arrayZ + get_length (); }
HB_NODISCARD bool serialize (hb_serialize_context_t *c, unsigned int items_len, bool clear = true)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (this))) return_trace (false);
c->check_assign (lenP1, items_len + 1, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW);
if (unlikely (!c->extend (this))) return_trace (false);
if (unlikely (!c->extend_size (this, get_size (), clear))) return_trace (false);
return_trace (true);
}
template <typename Iterator,
hb_requires (hb_is_source_of (Iterator, Type))>
bool serialize (hb_serialize_context_t *c, Iterator items)
HB_NODISCARD bool serialize (hb_serialize_context_t *c, Iterator items)
{
TRACE_SERIALIZE (this);
unsigned count = items.len ();
if (unlikely (!serialize (c, count))) return_trace (false);
unsigned count = hb_len (items);
if (unlikely (!serialize (c, count, false))) return_trace (false);
/* TODO Umm. Just exhaust the iterator instead? Being extra
* cautious right now.. */
for (unsigned i = 0; i < count; i++, ++items)
@ -944,14 +952,9 @@ struct SortedArrayOf : ArrayOf<Type, LenType>
operator iter_t () const { return iter (); }
operator writer_t () { return writer (); }
hb_sorted_array_t<const Type> sub_array (unsigned int start_offset, unsigned int count) const
{ return as_array ().sub_array (start_offset, count); }
hb_sorted_array_t<const Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const
{ return as_array ().sub_array (start_offset, count); }
hb_sorted_array_t<Type> sub_array (unsigned int start_offset, unsigned int count)
{ return as_array ().sub_array (start_offset, count); }
hb_sorted_array_t<Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */)
{ return as_array ().sub_array (start_offset, count); }
/* Faster range-based for loop. */
const Type *begin () const { return this->arrayZ; }
const Type *end () const { return this->arrayZ + this->len; }
bool serialize (hb_serialize_context_t *c, unsigned int items_len)
{

View File

@ -66,95 +66,25 @@ struct CFFIndex
{
TRACE_SERIALIZE (this);
unsigned int size = get_size ();
CFFIndex *out = c->allocate_size<CFFIndex> (size);
CFFIndex *out = c->allocate_size<CFFIndex> (size, false);
if (likely (out))
memcpy (out, this, size);
hb_memcpy (out, this, size);
return_trace (out);
}
template <typename Iterable,
hb_requires (hb_is_iterable (Iterable))>
bool serialize (hb_serialize_context_t *c,
unsigned int offSize_,
const byte_str_array_t &byteArray)
const Iterable &iterable)
{
TRACE_SERIALIZE (this);
if (byteArray.length == 0)
{
COUNT *dest = c->allocate_min<COUNT> ();
if (unlikely (!dest)) return_trace (false);
*dest = 0;
return_trace (true);
}
/* serialize CFFIndex header */
if (unlikely (!c->extend_min (this))) return_trace (false);
this->count = byteArray.length;
this->offSize = offSize_;
if (unlikely (!c->allocate_size<HBUINT8> (offSize_ * (byteArray.length + 1))))
return_trace (false);
/* serialize indices */
unsigned int offset = 1;
unsigned int i = 0;
for (; i < byteArray.length; i++)
{
set_offset_at (i, offset);
offset += byteArray[i].get_size ();
}
set_offset_at (i, offset);
/* serialize data */
for (unsigned int i = 0; i < byteArray.length; i++)
{
const hb_ubytes_t &bs = byteArray[i];
unsigned char *dest = c->allocate_size<unsigned char> (bs.length);
if (unlikely (!dest)) return_trace (false);
memcpy (dest, &bs[0], bs.length);
}
return_trace (true);
}
bool serialize (hb_serialize_context_t *c,
unsigned int offSize_,
const str_buff_vec_t &buffArray)
{
byte_str_array_t byteArray;
byteArray.init ();
byteArray.resize (buffArray.length);
for (unsigned int i = 0; i < byteArray.length; i++)
byteArray[i] = hb_ubytes_t (buffArray[i].arrayZ, buffArray[i].length);
bool result = this->serialize (c, offSize_, byteArray);
byteArray.fini ();
return result;
}
template <typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
bool serialize (hb_serialize_context_t *c,
Iterator it)
{
TRACE_SERIALIZE (this);
serialize_header(c, + it | hb_map ([] (const hb_ubytes_t &_) { return _.length; }));
auto it = hb_iter (iterable);
serialize_header(c, + it | hb_map (hb_iter) | hb_map (hb_len));
for (const auto &_ : +it)
_.copy (c);
hb_iter (_).copy (c);
return_trace (true);
}
bool serialize (hb_serialize_context_t *c,
const byte_str_array_t &byteArray)
{ return serialize (c, + hb_iter (byteArray)); }
bool serialize (hb_serialize_context_t *c,
const str_buff_vec_t &buffArray)
{
auto it =
+ hb_iter (buffArray)
| hb_map ([] (const str_buff_t &_) { return hb_ubytes_t (_.arrayZ, _.length); })
;
return serialize (c, it);
}
template <typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
bool serialize_header (hb_serialize_context_t *c,
@ -171,7 +101,7 @@ struct CFFIndex
if (!this->count) return_trace (true);
if (unlikely (!c->extend (this->offSize))) return_trace (false);
this->offSize = off_size;
if (unlikely (!c->allocate_size<HBUINT8> (off_size * (this->count + 1))))
if (unlikely (!c->allocate_size<HBUINT8> (off_size * (this->count + 1), false)))
return_trace (false);
/* serialize indices */
@ -179,14 +109,27 @@ struct CFFIndex
unsigned int i = 0;
for (unsigned _ : +it)
{
CFFIndex<COUNT>::set_offset_at (i++, offset);
set_offset_at (i++, offset);
offset += _;
}
CFFIndex<COUNT>::set_offset_at (i, offset);
set_offset_at (i, offset);
return_trace (true);
}
template <typename Iterable,
hb_requires (hb_is_iterable (Iterable))>
static unsigned total_size (const Iterable &iterable)
{
auto it = + hb_iter (iterable) | hb_map (hb_iter) | hb_map (hb_len);
if (!it) return 0;
unsigned total = + it | hb_reduce (hb_add, 0);
unsigned off_size = (hb_bit_storage (total + 1) + 7) / 8;
return min_size + HBUINT8::static_size + (hb_len (it) + 1) * off_size + total;
}
void set_offset_at (unsigned int index, unsigned int offset)
{
assert (index <= count);
@ -207,10 +150,14 @@ struct CFFIndex
unsigned int size = offSize;
const HBUINT8 *p = offsets + size * index;
unsigned int offset = 0;
for (; size; size--)
offset = (offset << 8) + *p++;
return offset;
switch (size)
{
case 1: return * (HBUINT8 *) p;
case 2: return * (HBUINT16 *) p;
case 3: return * (HBUINT24 *) p;
case 4: return * (HBUINT32 *) p;
default: return 0;
}
}
unsigned int length_at (unsigned int index) const
@ -229,6 +176,7 @@ struct CFFIndex
hb_ubytes_t operator [] (unsigned int index) const
{
if (unlikely (index >= count)) return hb_ubytes_t ();
_hb_compiler_memory_r_barrier ();
unsigned length = length_at (index);
if (unlikely (!length)) return hb_ubytes_t ();
return hb_ubytes_t (data_base () + offset_at (index) - 1, length);
@ -280,7 +228,7 @@ struct CFFIndexOf : CFFIndex<COUNT>
if (unlikely (!c->extend_min (this))) return_trace (false);
this->count = dataArrayLen;
this->offSize = offSize_;
if (unlikely (!c->allocate_size<HBUINT8> (offSize_ * (dataArrayLen + 1))))
if (unlikely (!c->allocate_size<HBUINT8> (offSize_ * (dataArrayLen + 1), false)))
return_trace (false);
/* serialize indices */
@ -288,10 +236,10 @@ struct CFFIndexOf : CFFIndex<COUNT>
unsigned int i = 0;
for (; i < dataArrayLen; i++)
{
CFFIndex<COUNT>::set_offset_at (i, offset);
this->set_offset_at (i, offset);
offset += dataSizeArray[i];
}
CFFIndex<COUNT>::set_offset_at (i, offset);
this->set_offset_at (i, offset);
/* serialize data */
for (unsigned int i = 0; i < dataArrayLen; i++)
@ -324,13 +272,12 @@ struct Dict : UnsizedByteStr
template <typename T, typename V>
static bool serialize_int_op (hb_serialize_context_t *c, op_code_t op, V value, op_code_t intOp)
{
// XXX: not sure why but LLVM fails to compile the following 'unlikely' macro invocation
if (/*unlikely*/ (!serialize_int<T, V> (c, intOp, value)))
if (unlikely ((!serialize_int<T, V> (c, intOp, value))))
return false;
TRACE_SERIALIZE (this);
/* serialize the opcode */
HBUINT8 *p = c->allocate_size<HBUINT8> (OpCode_Size (op));
HBUINT8 *p = c->allocate_size<HBUINT8> (OpCode_Size (op), false);
if (unlikely (!p)) return_trace (false);
if (Is_OpCode_ESC (op))
{
@ -415,9 +362,8 @@ struct FDSelect0 {
TRACE_SANITIZE (this);
if (unlikely (!(c->check_struct (this))))
return_trace (false);
for (unsigned int i = 0; i < c->get_num_glyphs (); i++)
if (unlikely (!fds[i].sanitize (c)))
return_trace (false);
if (unlikely (!c->check_array (fds, c->get_num_glyphs ())))
return_trace (false);
return_trace (true);
}
@ -471,14 +417,20 @@ struct FDSelect3_4
return_trace (true);
}
static int _cmp_range (const void *_key, const void *_item)
{
hb_codepoint_t glyph = * (hb_codepoint_t *) _key;
FDSelect3_4_Range<GID_TYPE, FD_TYPE> *range = (FDSelect3_4_Range<GID_TYPE, FD_TYPE> *) _item;
if (glyph < range[0].first) return -1;
if (glyph < range[1].first) return 0;
return +1;
}
hb_codepoint_t get_fd (hb_codepoint_t glyph) const
{
unsigned int i;
for (i = 1; i < nRanges (); i++)
if (glyph < ranges[i].first)
break;
return (hb_codepoint_t) ranges[i - 1].fd;
auto *range = hb_bsearch (glyph, &ranges[0], nRanges () - 1, sizeof (ranges[0]), _cmp_range);
return range ? range->fd : ranges[nRanges () - 1].fd;
}
GID_TYPE &nRanges () { return ranges.len; }
@ -501,9 +453,9 @@ struct FDSelect
{
TRACE_SERIALIZE (this);
unsigned int size = src.get_size (num_glyphs);
FDSelect *dest = c->allocate_size<FDSelect> (size);
FDSelect *dest = c->allocate_size<FDSelect> (size, false);
if (unlikely (!dest)) return_trace (false);
memcpy (dest, &src, size);
hb_memcpy (dest, &src, size);
return_trace (true);
}

View File

@ -175,7 +175,7 @@ struct Encoding
unsigned int size = src.get_size ();
Encoding *dest = c->allocate_size<Encoding> (size);
if (unlikely (!dest)) return_trace (false);
memcpy (dest, &src, size);
hb_memcpy (dest, &src, size);
return_trace (true);
}
@ -471,7 +471,7 @@ struct Charset
unsigned int size = src.get_size (num_glyphs);
Charset *dest = c->allocate_size<Charset> (size);
if (unlikely (!dest)) return_trace (false);
memcpy (dest, &src, size);
hb_memcpy (dest, &src, size);
return_trace (true);
}
@ -617,7 +617,6 @@ struct CFF1StringIndex : CFF1Index
}
byte_str_array_t bytesArray;
bytesArray.init ();
if (!bytesArray.resize (sidmap.get_population ()))
return_trace (false);
for (unsigned int i = 0; i < strings.count; i++)
@ -628,7 +627,6 @@ struct CFF1StringIndex : CFF1Index
}
bool result = CFF1Index::serialize (c, bytesArray);
bytesArray.fini ();
return_trace (result);
}
};
@ -813,7 +811,7 @@ struct cff1_top_dict_opset_t : top_dict_opset_t<cff1_top_dict_val_t>
break;
default:
env.last_offset = env.str_ref.offset;
env.last_offset = env.str_ref.get_offset ();
top_dict_opset_t<cff1_top_dict_val_t>::process_op (op, env, dictval);
/* Record this operand below if stack is empty, otherwise done */
if (!env.argStack.is_empty ()) return;
@ -1295,10 +1293,10 @@ struct cff1
}
protected:
hb_blob_t *blob = nullptr;
hb_sanitize_context_t sc;
public:
hb_blob_t *blob = nullptr;
const Encoding *encoding = nullptr;
const Charset *charset = nullptr;
const CFF1NameIndex *nameIndex = nullptr;

View File

@ -56,7 +56,7 @@ struct CFF2FDSelect
unsigned int size = src.get_size (num_glyphs);
CFF2FDSelect *dest = c->allocate_size<CFF2FDSelect> (size);
if (unlikely (!dest)) return_trace (false);
memcpy (dest, &src, size);
hb_memcpy (dest, &src, size);
return_trace (true);
}
@ -124,7 +124,7 @@ struct CFF2VariationStore
unsigned int size_ = varStore->get_size ();
CFF2VariationStore *dest = c->allocate_size<CFF2VariationStore> (size_);
if (unlikely (!dest)) return_trace (false);
memcpy (dest, varStore, size_);
hb_memcpy (dest, varStore, size_);
return_trace (true);
}
@ -483,13 +483,18 @@ struct cff2
blob = nullptr;
}
hb_map_t *create_glyph_to_sid_map () const
{
return nullptr;
}
bool is_valid () const { return blob; }
protected:
hb_blob_t *blob = nullptr;
hb_sanitize_context_t sc;
public:
hb_blob_t *blob = nullptr;
cff2_top_dict_values_t topDict;
const CFF2Subrs *globalSubrs = nullptr;
const CFF2VariationStore *varStore = nullptr;

View File

@ -909,7 +909,7 @@ struct DefaultUVS : SortedArray32Of<UnicodeValueRange>
hb_codepoint_t first = arrayZ[i].startUnicodeValue;
hb_codepoint_t last = hb_min ((hb_codepoint_t) (first + arrayZ[i].additionalCount),
(hb_codepoint_t) HB_UNICODE_MAX);
out->add_range (first, hb_min (last, 0x10FFFFu));
out->add_range (first, last);
}
}
@ -925,37 +925,75 @@ struct DefaultUVS : SortedArray32Of<UnicodeValueRange>
if (unlikely (!c->copy<HBUINT32> (len))) return nullptr;
unsigned init_len = c->length ();
hb_codepoint_t lastCode = HB_MAP_VALUE_INVALID;
int count = -1;
for (const UnicodeValueRange& _ : as_array ())
if (this->len > unicodes->get_population () * hb_bit_storage ((unsigned) this->len))
{
for (const unsigned addcnt : hb_range ((unsigned) _.additionalCount + 1))
{
unsigned curEntry = (unsigned) _.startUnicodeValue + addcnt;
if (!unicodes->has (curEntry)) continue;
count += 1;
if (lastCode == HB_MAP_VALUE_INVALID)
lastCode = curEntry;
else if (lastCode + count != curEntry)
{
UnicodeValueRange rec;
rec.startUnicodeValue = lastCode;
rec.additionalCount = count - 1;
c->copy<UnicodeValueRange> (rec);
hb_codepoint_t start = HB_SET_VALUE_INVALID;
hb_codepoint_t end = HB_SET_VALUE_INVALID;
lastCode = curEntry;
count = 0;
for (hb_codepoint_t u = HB_SET_VALUE_INVALID;
unicodes->next (&u);)
{
if (!as_array ().bsearch (u))
continue;
if (start == HB_SET_VALUE_INVALID)
{
start = u;
end = start - 1;
}
if (end + 1 != u || end - start == 255)
{
UnicodeValueRange rec;
rec.startUnicodeValue = start;
rec.additionalCount = end - start;
c->copy<UnicodeValueRange> (rec);
start = u;
}
end = u;
}
if (start != HB_SET_VALUE_INVALID)
{
UnicodeValueRange rec;
rec.startUnicodeValue = start;
rec.additionalCount = end - start;
c->copy<UnicodeValueRange> (rec);
}
}
else
{
hb_codepoint_t lastCode = HB_SET_VALUE_INVALID;
int count = -1;
for (const UnicodeValueRange& _ : *this)
{
hb_codepoint_t curEntry = (hb_codepoint_t) (_.startUnicodeValue - 1);
hb_codepoint_t end = curEntry + _.additionalCount + 2;
for (; unicodes->next (&curEntry) && curEntry < end;)
{
count += 1;
if (lastCode == HB_SET_VALUE_INVALID)
lastCode = curEntry;
else if (lastCode + count != curEntry)
{
UnicodeValueRange rec;
rec.startUnicodeValue = lastCode;
rec.additionalCount = count - 1;
c->copy<UnicodeValueRange> (rec);
lastCode = curEntry;
count = 0;
}
}
}
}
if (lastCode != HB_MAP_VALUE_INVALID)
{
UnicodeValueRange rec;
rec.startUnicodeValue = lastCode;
rec.additionalCount = count;
c->copy<UnicodeValueRange> (rec);
if (lastCode != HB_MAP_VALUE_INVALID)
{
UnicodeValueRange rec;
rec.startUnicodeValue = lastCode;
rec.additionalCount = count;
c->copy<UnicodeValueRange> (rec);
}
}
if (c->length () - init_len == 0)
@ -1474,32 +1512,80 @@ struct EncodingRecord
DEFINE_SIZE_STATIC (8);
};
struct cmap;
struct SubtableUnicodesCache {
private:
const void* base;
hb_hashmap_t<intptr_t, hb::unique_ptr<hb_set_t>> cached_unicodes;
hb_blob_ptr_t<cmap> base_blob;
const char* base;
hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> cached_unicodes;
public:
SubtableUnicodesCache(const void* cmap_base)
: base(cmap_base), cached_unicodes() {}
hb_set_t* set_for (const EncodingRecord* record)
static SubtableUnicodesCache* create (hb_blob_ptr_t<cmap> source_table)
{
if (!cached_unicodes.has ((intptr_t) record))
SubtableUnicodesCache* cache =
(SubtableUnicodesCache*) hb_malloc (sizeof(SubtableUnicodesCache));
new (cache) SubtableUnicodesCache (source_table);
return cache;
}
static void destroy (void* value) {
if (!value) return;
SubtableUnicodesCache* cache = (SubtableUnicodesCache*) value;
cache->~SubtableUnicodesCache ();
hb_free (cache);
}
SubtableUnicodesCache(const void* cmap_base)
: base_blob(),
base ((const char*) cmap_base),
cached_unicodes ()
{}
SubtableUnicodesCache(hb_blob_ptr_t<cmap> base_blob_)
: base_blob(base_blob_),
base ((const char *) base_blob.get()),
cached_unicodes ()
{}
~SubtableUnicodesCache()
{
base_blob.destroy ();
}
bool same_base(const void* other) const
{
return other == (const void*) base;
}
const hb_set_t* set_for (const EncodingRecord* record,
SubtableUnicodesCache& mutable_cache) const
{
if (cached_unicodes.has ((unsigned) ((const char *) record - base)))
return cached_unicodes.get ((unsigned) ((const char *) record - base));
return mutable_cache.set_for (record);
}
const hb_set_t* set_for (const EncodingRecord* record)
{
if (!cached_unicodes.has ((unsigned) ((const char *) record - base)))
{
hb_set_t *s = hb_set_create ();
if (unlikely (s->in_error ()))
return hb_set_get_empty ();
(base+record->subtable).collect_unicodes (s);
if (unlikely (!cached_unicodes.set ((intptr_t) record, hb::unique_ptr<hb_set_t> {s})))
if (unlikely (!cached_unicodes.set ((unsigned) ((const char *) record - base), hb::unique_ptr<hb_set_t> {s})))
return hb_set_get_empty ();
return s;
}
return cached_unicodes.get ((intptr_t) record);
return cached_unicodes.get ((unsigned) ((const char *) record - base));
}
};
@ -1523,13 +1609,30 @@ struct cmap
{
static constexpr hb_tag_t tableTag = HB_OT_TAG_cmap;
static SubtableUnicodesCache* create_filled_cache(hb_blob_ptr_t<cmap> source_table) {
const cmap* cmap = source_table.get();
auto it =
+ hb_iter (cmap->encodingRecord)
| hb_filter ([&](const EncodingRecord& _) {
return cmap::filter_encoding_records_for_subset (cmap, _);
})
;
SubtableUnicodesCache* cache = SubtableUnicodesCache::create(source_table);
for (const EncodingRecord& _ : it)
cache->set_for(&_); // populate the cache for this encoding record.
return cache;
}
template<typename Iterator, typename EncodingRecIter,
hb_requires (hb_is_iterator (EncodingRecIter))>
bool serialize (hb_serialize_context_t *c,
Iterator it,
EncodingRecIter encodingrec_iter,
const void *base,
const hb_subset_plan_t *plan,
hb_subset_plan_t *plan,
bool drop_format_4 = false)
{
if (unlikely (!c->extend_min ((*this)))) return false;
@ -1538,7 +1641,14 @@ struct cmap
unsigned format4objidx = 0, format12objidx = 0, format14objidx = 0;
auto snap = c->snapshot ();
SubtableUnicodesCache unicodes_cache (base);
SubtableUnicodesCache local_unicodes_cache (base);
const SubtableUnicodesCache* unicodes_cache = &local_unicodes_cache;
if (plan->accelerator &&
plan->accelerator->cmap_cache &&
plan->accelerator->cmap_cache->same_base (base))
unicodes_cache = plan->accelerator->cmap_cache;
for (const EncodingRecord& _ : encodingrec_iter)
{
if (c->in_error ())
@ -1547,7 +1657,7 @@ struct cmap
unsigned format = (base+_.subtable).u.format;
if (format != 4 && format != 12 && format != 14) continue;
hb_set_t* unicodes_set = unicodes_cache.set_for (&_);
const hb_set_t* unicodes_set = unicodes_cache->set_for (&_, local_unicodes_cache);
if (!drop_format_4 && format == 4)
{
@ -1566,7 +1676,13 @@ struct cmap
else if (format == 12)
{
if (_can_drop (_, *unicodes_set, base, unicodes_cache, + it | hb_map (hb_first), encodingrec_iter)) continue;
if (_can_drop (_,
*unicodes_set,
base,
*unicodes_cache,
local_unicodes_cache,
+ it | hb_map (hb_first), encodingrec_iter))
continue;
c->copy (_, + it | hb_filter (*unicodes_set, hb_first), 12u, base, plan, &format12objidx);
}
else if (format == 14) c->copy (_, it, 14u, base, plan, &format14objidx);
@ -1585,7 +1701,8 @@ struct cmap
bool _can_drop (const EncodingRecord& cmap12,
const hb_set_t& cmap12_unicodes,
const void* base,
SubtableUnicodesCache& unicodes_cache,
const SubtableUnicodesCache& unicodes_cache,
SubtableUnicodesCache& local_unicodes_cache,
Iterator subset_unicodes,
EncodingRecordIterator encoding_records)
{
@ -1616,7 +1733,7 @@ struct cmap
|| (base+_.subtable).get_language() != target_language)
continue;
hb_set_t* sibling_unicodes = unicodes_cache.set_for (&_);
const hb_set_t* sibling_unicodes = unicodes_cache.set_for (&_, local_unicodes_cache);
auto cmap12 = + subset_unicodes | hb_filter (cmap12_unicodes);
auto sibling = + subset_unicodes | hb_filter (*sibling_unicodes);
@ -1653,17 +1770,9 @@ struct cmap
auto encodingrec_iter =
+ hb_iter (encodingRecord)
| hb_filter ([&] (const EncodingRecord& _)
{
if ((_.platformID == 0 && _.encodingID == 3) ||
(_.platformID == 0 && _.encodingID == 4) ||
(_.platformID == 3 && _.encodingID == 1) ||
(_.platformID == 3 && _.encodingID == 10) ||
(this + _.subtable).u.format == 14)
return true;
return false;
})
| hb_filter ([&](const EncodingRecord& _) {
return cmap::filter_encoding_records_for_subset (this, _);
})
;
if (unlikely (!encodingrec_iter.len ())) return_trace (false);
@ -1692,7 +1801,11 @@ struct cmap
{ return (_.second != HB_MAP_VALUE_INVALID); })
;
return_trace (cmap_prime->serialize (c->serializer, it, encodingrec_iter, this, c->plan));
return_trace (cmap_prime->serialize (c->serializer,
it,
encodingrec_iter,
this,
c->plan));
}
const CmapSubtable *find_best_subtable (bool *symbol = nullptr) const
@ -1928,6 +2041,19 @@ struct cmap
encodingRecord.sanitize (c, this));
}
private:
static bool filter_encoding_records_for_subset(const cmap* cmap,
const EncodingRecord& _)
{
return
(_.platformID == 0 && _.encodingID == 3) ||
(_.platformID == 0 && _.encodingID == 4) ||
(_.platformID == 3 && _.encodingID == 1) ||
(_.platformID == 3 && _.encodingID == 10) ||
(cmap + _.subtable).u.format == 14;
}
protected:
HBUINT16 version; /* Table version number (0). */
SortedArray16Of<EncodingRecord>

View File

@ -67,7 +67,7 @@ _copy_data_to_cbdt (hb_vector_t<char> *cbdt_prime,
{
unsigned int new_len = cbdt_prime->length + length;
if (unlikely (!cbdt_prime->alloc (new_len))) return false;
memcpy (cbdt_prime->arrayZ + cbdt_prime->length, data, length);
hb_memcpy (cbdt_prime->arrayZ + cbdt_prime->length, data, length);
cbdt_prime->length = new_len;
return true;
}
@ -468,13 +468,13 @@ struct IndexSubtableRecord
if (unlikely (!c->serializer->check_success (records->resize (records->length + 1))))
return_trace (false);
(*records)[records->length - 1].firstGlyphIndex = 1;
(*records)[records->length - 1].lastGlyphIndex = 0;
records->tail ().firstGlyphIndex = 1;
records->tail ().lastGlyphIndex = 0;
bitmap_size_context->size += IndexSubtableRecord::min_size;
c->serializer->push ();
if (unlikely (!add_new_subtable (c, bitmap_size_context, &((*records)[records->length - 1]), lookup, base, start)))
if (unlikely (!add_new_subtable (c, bitmap_size_context, &(records->tail ()), lookup, base, start)))
{
c->serializer->pop_discard ();
c->serializer->revert (snap);

View File

@ -39,11 +39,7 @@
#define HB_OT_TAG_COLR HB_TAG('C','O','L','R')
#ifndef HB_COLRV1_MAX_NESTING_LEVEL
#define HB_COLRV1_MAX_NESTING_LEVEL 100
#endif
#ifndef COLRV1_ENABLE_SUBSETTING
#define COLRV1_ENABLE_SUBSETTING 1
#define HB_COLRV1_MAX_NESTING_LEVEL 16
#endif
namespace OT {
@ -188,6 +184,7 @@ struct Variable
protected:
T value;
public:
VarIdx varIdxBase;
public:
DEFINE_SIZE_STATIC (4 + T::static_size);
@ -196,6 +193,8 @@ struct Variable
template <typename T>
struct NoVariable
{
static constexpr uint32_t varIdxBase = VarIdx::NO_VARIATION;
NoVariable<T>* copy (hb_serialize_context_t *c) const
{
TRACE_SERIALIZE (this);
@ -888,6 +887,11 @@ struct PaintComposite
DEFINE_SIZE_STATIC (8);
};
struct ClipBoxData
{
int xMin, yMin, xMax, yMax;
};
struct ClipBoxFormat1
{
bool sanitize (hb_sanitize_context_t *c) const
@ -896,6 +900,14 @@ struct ClipBoxFormat1
return_trace (c->check_struct (this));
}
void get_clip_box (ClipBoxData &clip_box, const VarStoreInstancer &instancer HB_UNUSED) const
{
clip_box.xMin = xMin;
clip_box.yMin = yMin;
clip_box.xMax = xMax;
clip_box.yMax = yMax;
}
public:
HBUINT8 format; /* format = 1(noVar) or 2(Var)*/
FWORD xMin;
@ -906,7 +918,20 @@ struct ClipBoxFormat1
DEFINE_SIZE_STATIC (1 + 4 * FWORD::static_size);
};
struct ClipBoxFormat2 : Variable<ClipBoxFormat1> {};
struct ClipBoxFormat2 : Variable<ClipBoxFormat1>
{
void get_clip_box (ClipBoxData &clip_box, const VarStoreInstancer &instancer) const
{
value.get_clip_box(clip_box, instancer);
if (instancer)
{
clip_box.xMin += _hb_roundf (instancer (varIdxBase, 0));
clip_box.yMin += _hb_roundf (instancer (varIdxBase, 1));
clip_box.xMax += _hb_roundf (instancer (varIdxBase, 2));
clip_box.yMax += _hb_roundf (instancer (varIdxBase, 3));
}
}
};
struct ClipBox
{
@ -932,6 +957,28 @@ struct ClipBox
}
}
bool get_extents (hb_glyph_extents_t *extents,
const VarStoreInstancer &instancer) const
{
ClipBoxData clip_box;
switch (u.format) {
case 1:
u.format1.get_clip_box (clip_box, instancer);
break;
case 2:
u.format2.get_clip_box (clip_box, instancer);
break;
default:
return false;
}
extents->x_bearing = clip_box.xMin;
extents->y_bearing = clip_box.yMax;
extents->width = clip_box.xMax - clip_box.xMin;
extents->height = clip_box.yMin - clip_box.yMax;
return true;
}
protected:
union {
HBUINT8 format; /* Format identifier */
@ -942,6 +989,9 @@ struct ClipBox
struct ClipRecord
{
int cmp (hb_codepoint_t g) const
{ return g < startGlyphID ? -1 : g <= endGlyphID ? 0 : +1; }
ClipRecord* copy (hb_serialize_context_t *c, const void *base) const
{
TRACE_SERIALIZE (this);
@ -957,6 +1007,13 @@ struct ClipRecord
return_trace (c->check_struct (this) && clipBox.sanitize (c, base));
}
bool get_extents (hb_glyph_extents_t *extents,
const void *base,
const VarStoreInstancer &instancer) const
{
return (base+clipBox).get_extents (extents, instancer);
}
public:
HBUINT16 startGlyphID; // first gid clip applies to
HBUINT16 endGlyphID; // last gid clip applies to, inclusive
@ -964,6 +1021,7 @@ struct ClipRecord
public:
DEFINE_SIZE_STATIC (7);
};
DECLARE_NULL_NAMESPACE_BYTES (OT, ClipRecord);
struct ClipList
{
@ -1052,11 +1110,26 @@ struct ClipList
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
// TODO Make a formatted struct!
return_trace (c->check_struct (this) && clips.sanitize (c, this));
}
bool
get_extents (hb_codepoint_t gid,
hb_glyph_extents_t *extents,
const VarStoreInstancer &instancer) const
{
auto *rec = clips.as_array ().bsearch (gid);
if (rec)
{
rec->get_extents (extents, this, instancer);
return true;
}
return false;
}
HBUINT8 format; // Set to 1.
Array32Of<ClipRecord> clips; // Clip records, sorted by startGlyphID
SortedArray32Of<ClipRecord> clips; // Clip records, sorted by startGlyphID
public:
DEFINE_SIZE_ARRAY_SIZED (5, clips);
};
@ -1359,7 +1432,7 @@ struct COLR
(this+baseGlyphsZ).sanitize (c, numBaseGlyphs) &&
(this+layersZ).sanitize (c, numLayers) &&
(version == 0 ||
(COLRV1_ENABLE_SUBSETTING && version == 1 &&
(version == 1 &&
baseGlyphList.sanitize (c, this) &&
layerList.sanitize (c, this) &&
clipList.sanitize (c, this) &&
@ -1516,6 +1589,30 @@ struct COLR
return_trace (true);
}
bool
get_extents (hb_font_t *font, hb_codepoint_t glyph, hb_glyph_extents_t *extents) const
{
if (version != 1)
return false;
VarStoreInstancer instancer (this+varStore,
this+varIdxMap,
hb_array (font->coords, font->num_coords));
if ((this+clipList).get_extents (glyph,
extents,
instancer))
{
extents->x_bearing = font->em_scale_x (extents->x_bearing);
extents->y_bearing = font->em_scale_x (extents->y_bearing);
extents->width = font->em_scale_x (extents->width);
extents->height = font->em_scale_x (extents->height);
return true;
}
return false;
}
protected:
HBUINT16 version; /* Table version number (starts at 0). */
HBUINT16 numBaseGlyphs; /* Number of Base Glyph Records. */

View File

@ -56,9 +56,9 @@ HB_OT_CORE_TABLE (OT, maxp)
#if !defined(HB_NO_FACE_COLLECT_UNICODES) || !defined(HB_NO_OT_FONT)
HB_OT_ACCELERATOR (OT, cmap)
#endif
HB_OT_TABLE (OT, hhea)
HB_OT_CORE_TABLE (OT, hhea)
HB_OT_ACCELERATOR (OT, hmtx)
HB_OT_TABLE (OT, OS2)
HB_OT_CORE_TABLE (OT, OS2)
#if !defined(HB_NO_OT_FONT_GLYPH_NAMES) || !defined(HB_NO_METRICS) || !defined(HB_NO_STYLE)
HB_OT_ACCELERATOR (OT, post)
#endif
@ -66,7 +66,7 @@ HB_OT_ACCELERATOR (OT, post)
HB_OT_ACCELERATOR (OT, name)
#endif
#ifndef HB_NO_STYLE
HB_OT_TABLE (OT, STAT)
HB_OT_CORE_TABLE (OT, STAT)
#endif
#ifndef HB_NO_META
HB_OT_ACCELERATOR (OT, meta)
@ -74,9 +74,9 @@ HB_OT_ACCELERATOR (OT, meta)
/* Vertical layout. */
#ifndef HB_NO_VERTICAL
HB_OT_TABLE (OT, vhea)
HB_OT_CORE_TABLE (OT, vhea)
HB_OT_ACCELERATOR (OT, vmtx)
HB_OT_TABLE (OT, VORG)
HB_OT_CORE_TABLE (OT, VORG)
#endif
/* TrueType outlines. */
@ -91,15 +91,15 @@ HB_OT_ACCELERATOR (OT, cff2)
/* OpenType variations. */
#ifndef HB_NO_VAR
HB_OT_TABLE (OT, fvar)
HB_OT_TABLE (OT, avar)
HB_OT_CORE_TABLE (OT, fvar)
HB_OT_CORE_TABLE (OT, avar)
HB_OT_ACCELERATOR (OT, gvar)
HB_OT_TABLE (OT, MVAR)
HB_OT_CORE_TABLE (OT, MVAR)
#endif
/* Legacy kern. */
#ifndef HB_NO_OT_KERN
HB_OT_TABLE (OT, kern)
HB_OT_CORE_TABLE (OT, kern)
#endif
/* OpenType shaping. */
@ -107,12 +107,12 @@ HB_OT_TABLE (OT, kern)
HB_OT_ACCELERATOR (OT, GDEF)
HB_OT_ACCELERATOR (OT, GSUB)
HB_OT_ACCELERATOR (OT, GPOS)
//HB_OT_TABLE (OT, JSTF)
//HB_OT_CORE_TABLE (OT, JSTF)
#endif
/* OpenType baseline. */
#ifndef HB_NO_BASE
HB_OT_TABLE (OT, BASE)
HB_OT_CORE_TABLE (OT, BASE)
#endif
/* AAT shaping. */
@ -129,8 +129,8 @@ HB_OT_TABLE (AAT, feat)
/* OpenType color fonts. */
#ifndef HB_NO_COLOR
HB_OT_TABLE (OT, COLR)
HB_OT_TABLE (OT, CPAL)
HB_OT_CORE_TABLE (OT, COLR)
HB_OT_CORE_TABLE (OT, CPAL)
HB_OT_ACCELERATOR (OT, CBDT)
HB_OT_ACCELERATOR (OT, sbix)
HB_OT_ACCELERATOR (OT, SVG)
@ -138,7 +138,7 @@ HB_OT_ACCELERATOR (OT, SVG)
/* OpenType math. */
#ifndef HB_NO_MATH
HB_OT_TABLE (OT, MATH)
HB_OT_CORE_TABLE (OT, MATH)
#endif

View File

@ -45,6 +45,7 @@
#include "hb-ot-vorg-table.hh"
#include "hb-ot-color-cbdt-table.hh"
#include "hb-ot-color-sbix-table.hh"
#include "hb-ot-color-colr-table.hh"
/**
@ -349,6 +350,9 @@ hb_ot_get_glyph_extents (hb_font_t *font,
#if !defined(HB_NO_OT_FONT_BITMAP) && !defined(HB_NO_COLOR)
if (ot_face->sbix->get_extents (font, glyph, extents)) return true;
if (ot_face->CBDT->get_extents (font, glyph, extents)) return true;
#endif
#if !defined(HB_NO_COLOR)
if (ot_face->COLR->get_extents (font, glyph, extents)) return true;
#endif
if (ot_face->glyf->get_extents (font, glyph, extents)) return true;
#ifndef HB_NO_OT_FONT_CFF
@ -356,7 +360,6 @@ hb_ot_get_glyph_extents (hb_font_t *font,
if (ot_face->cff2->get_extents (font, glyph, extents)) return true;
#endif
// TODO Hook up side-bearings variations.
return false;
}

View File

@ -478,7 +478,7 @@ struct IndexArray : Array16Of<Index>
{
if (_count)
{
+ this->sub_array (start_offset, _count)
+ this->as_array ().sub_array (start_offset, _count)
| hb_sink (hb_array (_indexes, *_count))
;
}
@ -658,7 +658,7 @@ struct FeatureParamsCharacterVariants
{
if (char_count)
{
+ characters.sub_array (start_offset, char_count)
+ characters.as_array ().sub_array (start_offset, char_count)
| hb_sink (hb_array (chars, *char_count))
;
}
@ -932,7 +932,7 @@ struct RecordArrayOf : SortedArray16Of<Record<Type>>
{
if (record_count)
{
+ this->sub_array (start_offset, record_count)
+ this->as_array ().sub_array (start_offset, record_count)
| hb_map (&Record<Type>::tag)
| hb_sink (hb_array (record_tags, *record_count))
;
@ -980,18 +980,16 @@ struct RecordListOfFeature : RecordListOf<Feature>
auto *out = c->serializer->start_embed (*this);
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
unsigned count = this->len;
+ hb_zip (*this, hb_range (count))
| hb_filter (l->feature_index_map, hb_second)
| hb_apply ([l, out, this] (const hb_pair_t<const Record<Feature>&, unsigned>& _)
+ hb_enumerate (*this)
| hb_filter (l->feature_index_map, hb_first)
| hb_apply ([l, out, this] (const hb_pair_t<unsigned, const Record<Feature>&>& _)
{
const Feature *f_sub = nullptr;
const Feature **f = nullptr;
if (l->feature_substitutes_map->has (_.second, &f))
if (l->feature_substitutes_map->has (_.first, &f))
f_sub = *f;
subset_record_array (l, out, this, f_sub) (_.first);
subset_record_array (l, out, this, f_sub) (_.second);
})
;
@ -1079,7 +1077,7 @@ struct LangSys
auto *out = c->serializer->start_embed (*this);
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
const unsigned *v;
const uint32_t *v;
out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex, &v) ? *v : 0xFFFFu;
if (!l->visitFeatureIndex (featureIndex.len))
@ -1147,7 +1145,6 @@ struct Script
return;
}
unsigned langsys_count = get_lang_sys_count ();
if (has_default_lang_sys ())
{
//only collect features from non-redundant langsys
@ -1156,24 +1153,24 @@ struct Script
d.collect_features (c);
}
for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
for (auto _ : + hb_enumerate (langSys))
{
const LangSys& l = this+_.first.offset;
const LangSys& l = this+_.second.offset;
if (!c->visitLangsys (l.get_feature_count ())) continue;
if (l.compare (d, c->duplicate_feature_map)) continue;
l.collect_features (c);
c->script_langsys_map->get (script_index)->add (_.second);
c->script_langsys_map->get (script_index)->add (_.first);
}
}
else
{
for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
for (auto _ : + hb_enumerate (langSys))
{
const LangSys& l = this+_.first.offset;
const LangSys& l = this+_.second.offset;
if (!c->visitLangsys (l.get_feature_count ())) continue;
l.collect_features (c);
c->script_langsys_map->get (script_index)->add (_.second);
c->script_langsys_map->get (script_index)->add (_.first);
}
}
}
@ -1211,10 +1208,9 @@ struct Script
const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index);
if (active_langsys)
{
unsigned count = langSys.len;
+ hb_zip (langSys, hb_range (count))
| hb_filter (active_langsys, hb_second)
| hb_map (hb_first)
+ hb_enumerate (langSys)
| hb_filter (active_langsys, hb_first)
| hb_map (hb_second)
| hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
| hb_apply (subset_record_array (l, &(out->langSys), this))
;
@ -1250,12 +1246,11 @@ struct RecordListOfScript : RecordListOf<Script>
auto *out = c->serializer->start_embed (*this);
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
unsigned count = this->len;
for (auto _ : + hb_zip (*this, hb_range (count)))
for (auto _ : + hb_enumerate (*this))
{
auto snap = c->serializer->snapshot ();
l->cur_script_index = _.second;
bool ret = _.first.subset (l, this);
l->cur_script_index = _.first;
bool ret = _.second.subset (l, this);
if (!ret) c->serializer->revert (snap);
else out->len++;
}
@ -1388,7 +1383,13 @@ struct Lookup
outMarkFilteringSet = markFilteringSet;
}
return_trace (out->subTable.len);
// Always keep the lookup even if it's empty. The rest of layout subsetting depends on lookup
// indices being consistent with those computed during planning. So if an empty lookup is
// discarded during the subset phase it will invalidate all subsequent lookup indices.
// Generally we shouldn't end up with an empty lookup as we pre-prune them during the planning
// phase, but it can happen in rare cases such as when during closure subtable is considered
// degenerate (see: https://github.com/harfbuzz/harfbuzz/issues/3853)
return true;
}
template <typename TSubTable>
@ -1454,10 +1455,9 @@ struct LookupOffsetList : List16OfOffsetTo<TLookup, OffsetType>
auto *out = c->serializer->start_embed (this);
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
unsigned count = this->len;
+ hb_zip (*this, hb_range (count))
| hb_filter (l->lookup_index_map, hb_second)
| hb_map (hb_first)
+ hb_enumerate (*this)
| hb_filter (l->lookup_index_map, hb_first)
| hb_map (hb_second)
| hb_apply (subset_offset_array (c, *out, this))
;
return_trace (true);
@ -1491,7 +1491,7 @@ static bool ClassDef_remap_and_serialize (hb_serialize_context_t *c,
klass_map->set (0, 0);
unsigned idx = klass_map->has (0) ? 1 : 0;
for (const unsigned k: klasses.iter ())
for (const unsigned k: klasses)
{
if (klass_map->has (k)) continue;
klass_map->set (k, idx);
@ -1524,6 +1524,11 @@ struct ClassDefFormat1_3
return classValue[(unsigned int) (glyph_id - startGlyph)];
}
unsigned get_population () const
{
return classValue.len;
}
template<typename Iterator,
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
bool serialize (hb_serialize_context_t *c,
@ -1548,7 +1553,7 @@ struct ClassDefFormat1_3
startGlyph = glyph_min;
if (unlikely (!classValue.serialize (c, glyph_count))) return_trace (false);
for (const hb_pair_t<hb_codepoint_t, unsigned> gid_klass_pair : + it)
for (const hb_pair_t<hb_codepoint_t, uint32_t> gid_klass_pair : + it)
{
unsigned idx = gid_klass_pair.first - glyph_min;
classValue[idx] = gid_klass_pair.second;
@ -1639,11 +1644,10 @@ struct ClassDefFormat1_3
bool intersects (const hb_set_t *glyphs) const
{
/* TODO Speed up, using hb_set_next()? */
hb_codepoint_t start = startGlyph;
hb_codepoint_t end = startGlyph + classValue.len;
for (hb_codepoint_t iter = startGlyph - 1;
hb_set_next (glyphs, &iter) && iter < end;)
glyphs->next (&iter) && iter < end;)
if (classValue[iter - start]) return true;
return false;
}
@ -1654,10 +1658,10 @@ struct ClassDefFormat1_3
{
/* Match if there's any glyph that is not listed! */
hb_codepoint_t g = HB_SET_VALUE_INVALID;
if (!hb_set_next (glyphs, &g)) return false;
if (!glyphs->next (&g)) return false;
if (g < startGlyph) return true;
g = startGlyph + count - 1;
if (hb_set_next (glyphs, &g)) return true;
if (glyphs->next (&g)) return true;
/* Fall through. */
}
/* TODO Speed up, using set overlap first? */
@ -1675,12 +1679,12 @@ struct ClassDefFormat1_3
if (klass == 0)
{
unsigned start_glyph = startGlyph;
for (unsigned g = HB_SET_VALUE_INVALID;
hb_set_next (glyphs, &g) && g < start_glyph;)
for (uint32_t g = HB_SET_VALUE_INVALID;
glyphs->next (&g) && g < start_glyph;)
intersect_glyphs->add (g);
for (unsigned g = startGlyph + count - 1;
hb_set_next (glyphs, &g);)
for (uint32_t g = startGlyph + count - 1;
glyphs-> next (&g);)
intersect_glyphs->add (g);
return;
@ -1696,7 +1700,7 @@ struct ClassDefFormat1_3
unsigned start_glyph = startGlyph;
unsigned end_glyph = start_glyph + count;
for (unsigned g = startGlyph - 1;
hb_set_next (glyphs, &g) && g < end_glyph;)
glyphs->next (&g) && g < end_glyph;)
if (classValue.arrayZ[g - start_glyph] == klass)
intersect_glyphs->add (g);
#endif
@ -1739,6 +1743,14 @@ struct ClassDefFormat2_4
return rangeRecord.bsearch (glyph_id).value;
}
unsigned get_population () const
{
typename Types::large_int ret = 0;
for (const auto &r : rangeRecord)
ret += r.get_population ();
return ret > UINT_MAX ? UINT_MAX : (unsigned) ret;
}
template<typename Iterator,
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
bool serialize (hb_serialize_context_t *c,
@ -1802,28 +1814,45 @@ struct ClassDefFormat2_4
{
TRACE_SUBSET (this);
const hb_map_t &glyph_map = *c->plan->glyph_map_gsub;
const hb_set_t &glyph_set = *c->plan->glyphset_gsub ();
hb_sorted_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> glyph_and_klass;
hb_set_t orig_klasses;
unsigned num_source_glyphs = c->plan->source->get_num_glyphs ();
unsigned count = rangeRecord.len;
for (unsigned i = 0; i < count; i++)
if (glyph_set.get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2
< get_population ())
{
unsigned klass = rangeRecord[i].value;
if (!klass) continue;
hb_codepoint_t start = rangeRecord[i].first;
hb_codepoint_t end = hb_min (rangeRecord[i].last + 1, num_source_glyphs);
for (hb_codepoint_t g = start; g < end; g++)
for (hb_codepoint_t g : glyph_set)
{
hb_codepoint_t new_gid = glyph_map[g];
unsigned klass = get_class (g);
if (!klass) continue;
hb_codepoint_t new_gid = glyph_map[g];
if (new_gid == HB_MAP_VALUE_INVALID) continue;
if (glyph_filter && !glyph_filter->has (g)) continue;
if (glyph_filter && !glyph_filter->has (g)) continue;
glyph_and_klass.push (hb_pair (new_gid, klass));
orig_klasses.add (klass);
}
}
else
{
unsigned num_source_glyphs = c->plan->source->get_num_glyphs ();
for (auto &range : rangeRecord)
{
unsigned klass = range.value;
if (!klass) continue;
hb_codepoint_t start = range.first;
hb_codepoint_t end = hb_min (range.last + 1, num_source_glyphs);
for (hb_codepoint_t g = start; g < end; g++)
{
hb_codepoint_t new_gid = glyph_map[g];
if (new_gid == HB_MAP_VALUE_INVALID) continue;
if (glyph_filter && !glyph_filter->has (g)) continue;
glyph_and_klass.push (hb_pair (new_gid, klass));
orig_klasses.add (klass);
}
}
}
const hb_set_t& glyphset = *c->plan->glyphset_gsub ();
unsigned glyph_count = glyph_filter
@ -1850,10 +1879,9 @@ struct ClassDefFormat2_4
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{
unsigned int count = rangeRecord.len;
for (unsigned int i = 0; i < count; i++)
if (rangeRecord[i].value)
if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
for (auto &range : rangeRecord)
if (range.value)
if (unlikely (!range.collect_coverage (glyphs)))
return false;
return true;
}
@ -1861,11 +1889,10 @@ struct ClassDefFormat2_4
template <typename set_t>
bool collect_class (set_t *glyphs, unsigned int klass) const
{
unsigned int count = rangeRecord.len;
for (unsigned int i = 0; i < count; i++)
for (auto &range : rangeRecord)
{
if (rangeRecord[i].value == klass)
if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
if (range.value == klass)
if (unlikely (!range.collect_coverage (glyphs)))
return false;
}
return true;
@ -1873,32 +1900,32 @@ struct ClassDefFormat2_4
bool intersects (const hb_set_t *glyphs) const
{
/* TODO Speed up, using hb_set_next() and bsearch()? */
unsigned int count = rangeRecord.len;
for (unsigned int i = 0; i < count; i++)
if (rangeRecord.len > glyphs->get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2)
{
const auto& range = rangeRecord[i];
if (range.intersects (*glyphs) && range.value)
return true;
for (hb_codepoint_t g = HB_SET_VALUE_INVALID; glyphs->next (&g);)
if (get_class (g))
return true;
return false;
}
return false;
return hb_any (+ hb_iter (rangeRecord)
| hb_map ([glyphs] (const RangeRecord<Types> &range) { return range.intersects (*glyphs) && range.value; }));
}
bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
{
unsigned int count = rangeRecord.len;
if (klass == 0)
{
/* Match if there's any glyph that is not listed! */
hb_codepoint_t g = HB_SET_VALUE_INVALID;
for (unsigned int i = 0; i < count; i++)
for (auto &range : rangeRecord)
{
if (!hb_set_next (glyphs, &g))
if (!glyphs->next (&g))
break;
if (g < rangeRecord[i].first)
if (g < range.first)
return true;
g = rangeRecord[i].last;
g = range.last;
}
if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
if (g != HB_SET_VALUE_INVALID && glyphs->next (&g))
return true;
/* Fall through. */
}
@ -1910,49 +1937,49 @@ struct ClassDefFormat2_4
void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
{
unsigned count = rangeRecord.len;
if (klass == 0)
{
hb_codepoint_t g = HB_SET_VALUE_INVALID;
for (unsigned int i = 0; i < count; i++)
for (auto &range : rangeRecord)
{
if (!hb_set_next (glyphs, &g))
if (!glyphs->next (&g))
goto done;
while (g < rangeRecord[i].first)
while (g < range.first)
{
intersect_glyphs->add (g);
if (!hb_set_next (glyphs, &g))
if (!glyphs->next (&g))
goto done;
}
g = rangeRecord[i].last;
g = range.last;
}
while (hb_set_next (glyphs, &g))
while (glyphs->next (&g))
intersect_glyphs->add (g);
done:
return;
}
#if 0
/* The following implementation is faster asymptotically, but slower
* in practice. */
if ((count >> 3) > glyphs->get_population ())
unsigned count = rangeRecord.len;
if (count > glyphs->get_population () * hb_bit_storage (count) * 8)
{
for (hb_codepoint_t g = HB_SET_VALUE_INVALID;
hb_set_next (glyphs, &g);)
if (rangeRecord.as_array ().bfind (g))
glyphs->next (&g);)
{
unsigned i;
if (rangeRecord.as_array ().bfind (g, &i) &&
rangeRecord.arrayZ[i].value == klass)
intersect_glyphs->add (g);
}
return;
}
#endif
for (unsigned int i = 0; i < count; i++)
for (auto &range : rangeRecord)
{
if (rangeRecord[i].value != klass) continue;
if (range.value != klass) continue;
unsigned end = rangeRecord[i].last + 1;
for (hb_codepoint_t g = rangeRecord[i].first - 1;
hb_set_next (glyphs, &g) && g < end;)
unsigned end = range.last + 1;
for (hb_codepoint_t g = range.first - 1;
glyphs->next (&g) && g < end;)
intersect_glyphs->add (g);
}
}
@ -1961,25 +1988,24 @@ struct ClassDefFormat2_4
{
if (glyphs->is_empty ()) return;
unsigned count = rangeRecord.len;
hb_codepoint_t g = HB_SET_VALUE_INVALID;
for (unsigned int i = 0; i < count; i++)
for (auto &range : rangeRecord)
{
if (!hb_set_next (glyphs, &g))
if (!glyphs->next (&g))
break;
if (g < rangeRecord[i].first)
if (g < range.first)
{
intersect_classes->add (0);
break;
}
g = rangeRecord[i].last;
g = range.last;
}
if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
if (g != HB_SET_VALUE_INVALID && glyphs->next (&g))
intersect_classes->add (0);
for (const auto& record : rangeRecord.iter ())
if (record.intersects (*glyphs))
intersect_classes->add (record.value);
for (const auto& range : rangeRecord)
if (range.intersects (*glyphs))
intersect_classes->add (range.value);
}
protected:
@ -1994,10 +2020,8 @@ struct ClassDefFormat2_4
struct ClassDef
{
/* Has interface. */
static constexpr unsigned SENTINEL = 0;
typedef unsigned int value_t;
value_t operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
unsigned operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k]; }
/* Projection. */
hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); }
@ -2015,6 +2039,19 @@ struct ClassDef
}
}
unsigned get_population () const
{
switch (u.format) {
case 1: return u.format1.get_population ();
case 2: return u.format2.get_population ();
#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.get_population ();
case 4: return u.format4.get_population ();
#endif
default:return NOT_COVERED;
}
}
template<typename Iterator,
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
bool serialize (hb_serialize_context_t *c, Iterator it_with_class_zero)
@ -2332,7 +2369,7 @@ struct VarRegionList
{
unsigned int backward = region_map.backward (r);
if (backward >= region_count) return_trace (false);
memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount);
hb_memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount);
}
return_trace (true);
@ -2442,21 +2479,26 @@ struct VarData
unsigned ri_count = src->regionIndices.len;
enum delta_size_t { kZero=0, kNonWord, kWord };
hb_vector_t<delta_size_t> delta_sz;
hb_vector_t<unsigned int> ri_map; /* maps old index to new index */
hb_vector_t<unsigned int> ri_map; /* maps new index to old index */
delta_sz.resize (ri_count);
ri_map.resize (ri_count);
unsigned int new_word_count = 0;
unsigned int r;
const HBUINT8 *src_delta_bytes = src->get_delta_bytes ();
unsigned src_row_size = src->get_row_size ();
unsigned src_word_count = src->wordCount ();
bool src_long_words = src->longWords ();
bool has_long = false;
if (src->longWords ())
if (src_long_words)
{
for (r = 0; r < ri_count; r++)
for (r = 0; r < src_word_count; r++)
{
for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
{
unsigned int old = inner_map.backward (i);
int32_t delta = src->get_item_delta (old, r);
int32_t delta = src->get_item_delta_fast (old, r, src_delta_bytes, src_row_size);
if (delta < -65536 || 65535 < delta)
{
has_long = true;
@ -2470,11 +2512,13 @@ struct VarData
signed max_threshold = has_long ? +65535 : +127;
for (r = 0; r < ri_count; r++)
{
bool short_circuit = src_long_words == has_long && src_word_count <= r;
delta_sz[r] = kZero;
for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
{
unsigned int old = inner_map.backward (i);
int32_t delta = src->get_item_delta (old, r);
int32_t delta = src->get_item_delta_fast (old, r, src_delta_bytes, src_row_size);
if (delta < min_threshold || max_threshold < delta)
{
delta_sz[r] = kWord;
@ -2482,7 +2526,11 @@ struct VarData
break;
}
else if (delta != 0)
{
delta_sz[r] = kNonWord;
if (short_circuit)
break;
}
}
}
@ -2492,7 +2540,8 @@ struct VarData
for (r = 0; r < ri_count; r++)
if (delta_sz[r])
{
ri_map[r] = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
ri_map[new_r] = r;
new_ri_count++;
}
@ -2502,14 +2551,20 @@ struct VarData
if (unlikely (!c->extend (this))) return_trace (false);
for (r = 0; r < ri_count; r++)
if (delta_sz[r]) regionIndices[ri_map[r]] = region_map[src->regionIndices[r]];
for (r = 0; r < new_ri_count; r++)
regionIndices[r] = region_map[src->regionIndices[ri_map[r]]];
for (unsigned int i = 0; i < itemCount; i++)
HBUINT8 *delta_bytes = get_delta_bytes ();
unsigned row_size = get_row_size ();
unsigned count = itemCount;
for (unsigned int i = 0; i < count; i++)
{
unsigned int old = inner_map.backward (i);
for (unsigned int r = 0; r < ri_count; r++)
if (delta_sz[r]) set_item_delta (i, ri_map[r], src->get_item_delta (old, r));
unsigned int old = inner_map.backward (i);
for (unsigned int r = 0; r < new_ri_count; r++)
set_item_delta_fast (i, r,
src->get_item_delta_fast (old, ri_map[r],
src_delta_bytes, src_row_size),
delta_bytes, row_size);
}
return_trace (true);
@ -2517,12 +2572,15 @@ struct VarData
void collect_region_refs (hb_set_t &region_indices, const hb_inc_bimap_t &inner_map) const
{
const HBUINT8 *delta_bytes = get_delta_bytes ();
unsigned row_size = get_row_size ();
for (unsigned int r = 0; r < regionIndices.len; r++)
{
unsigned int region = regionIndices[r];
unsigned int region = regionIndices.arrayZ[r];
if (region_indices.has (region)) continue;
for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
if (get_item_delta (inner_map.backward (i), r) != 0)
if (get_item_delta_fast (inner_map.backward (i), r, delta_bytes, row_size) != 0)
{
region_indices.add (region);
break;
@ -2537,10 +2595,12 @@ struct VarData
HBUINT8 *get_delta_bytes ()
{ return &StructAfter<HBUINT8> (regionIndices); }
int32_t get_item_delta (unsigned int item, unsigned int region) const
int32_t get_item_delta_fast (unsigned int item, unsigned int region,
const HBUINT8 *delta_bytes, unsigned row_size) const
{
if ( item >= itemCount || unlikely (region >= regionIndices.len)) return 0;
const HBINT8 *p = (const HBINT8 *) get_delta_bytes () + item * get_row_size ();
if (unlikely (item >= itemCount || region >= regionIndices.len)) return 0;
const HBINT8 *p = (const HBINT8 *) delta_bytes + item * row_size;
unsigned word_count = wordCount ();
bool is_long = longWords ();
if (is_long)
@ -2558,10 +2618,17 @@ struct VarData
return (p + HBINT16::static_size * word_count)[region - word_count];
}
}
void set_item_delta (unsigned int item, unsigned int region, int32_t delta)
int32_t get_item_delta (unsigned int item, unsigned int region) const
{
HBINT8 *p = (HBINT8 *)get_delta_bytes () + item * get_row_size ();
return get_item_delta_fast (item, region,
get_delta_bytes (),
get_row_size ());
}
void set_item_delta_fast (unsigned int item, unsigned int region, int32_t delta,
HBUINT8 *delta_bytes, unsigned row_size)
{
HBINT8 *p = (HBINT8 *) delta_bytes + item * row_size;
unsigned word_count = wordCount ();
bool is_long = longWords ();
if (is_long)
@ -2579,6 +2646,12 @@ struct VarData
(p + HBINT16::static_size * word_count)[region - word_count] = delta;
}
}
void set_item_delta (unsigned int item, unsigned int region, int32_t delta)
{
set_item_delta_fast (item, region, delta,
get_delta_bytes (),
get_row_size ());
}
bool longWords () const { return wordSizeCount & 0x8000u /* LONG_WORDS */; }
unsigned wordCount () const { return wordSizeCount & 0x7FFFu /* WORD_DELTA_COUNT_MASK */; }
@ -2642,6 +2715,14 @@ struct VariationStore
unsigned int inner = index & 0xFFFF;
return get_delta (outer, inner, coords, coord_count, cache);
}
float get_delta (unsigned int index,
hb_array_t<int> coords,
VarRegionList::cache_t *cache = nullptr) const
{
return get_delta (index,
coords.arrayZ, coords.length,
cache);
}
bool sanitize (hb_sanitize_context_t *c) const
{
@ -2948,7 +3029,7 @@ struct ConditionSet
// all conditions met
if (num_kept_cond == 0) return DROP_COND_WITH_VAR;
//check if condition_set is unique with variations
if (c->conditionset_map->has (p))
//duplicate found, drop the entire record
@ -3420,17 +3501,16 @@ struct VariationDevice
{
TRACE_SERIALIZE (this);
if (!layout_variation_idx_delta_map) return_trace (nullptr);
auto snap = c->snapshot ();
hb_pair_t<unsigned, int> *v;
if (!layout_variation_idx_delta_map->has (varIdx, &v))
return_trace (nullptr);
c->start_zerocopy (this->static_size);
auto *out = c->embed (this);
if (unlikely (!out)) return_trace (nullptr);
/* TODO Just get() and bail if NO_VARIATION. Needs to setup the map to return that. */
if (!layout_variation_idx_delta_map->has (varIdx))
{
c->revert (snap);
return_trace (nullptr);
}
unsigned new_idx = hb_first (layout_variation_idx_delta_map->get (varIdx));
unsigned new_idx = hb_first (*v);
out->varIdx = new_idx;
return_trace (out);
}

View File

@ -73,7 +73,7 @@ struct AttachList
if (point_count)
{
+ points.sub_array (start_offset, point_count)
+ points.as_array ().sub_array (start_offset, point_count)
| hb_sink (hb_array (point_array, *point_count))
;
}
@ -322,7 +322,7 @@ struct LigGlyph
{
if (caret_count)
{
+ carets.sub_array (start_offset, caret_count)
+ carets.as_array ().sub_array (start_offset, caret_count)
| hb_map (hb_add (this))
| hb_map ([&] (const CaretValue &value) { return value.get_caret_value (font, direction, glyph_id, var_store); })
| hb_sink (hb_array (caret_array, *caret_count))

View File

@ -100,8 +100,8 @@ struct hb_closure_context_t :
bool is_lookup_done (unsigned int lookup_index)
{
if (done_lookups_glyph_count->in_error () ||
done_lookups_glyph_set->in_error ())
if (unlikely (done_lookups_glyph_count->in_error () ||
done_lookups_glyph_set->in_error ()))
return true;
/* Have we visited this lookup with the current set of glyphs? */
@ -535,7 +535,12 @@ struct hb_ot_apply_context_t :
bool next (unsigned *unsafe_to = nullptr)
{
assert (num_items > 0);
while (idx + num_items < end)
/* The alternate condition below is faster at string boundaries,
* but produces subpar "unsafe-to-concat" values. */
signed stop = (signed) end - (signed) num_items;
if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
stop = (signed) end - 1;
while ((signed) idx < stop)
{
idx++;
hb_glyph_info_t &info = c->buffer->info[idx];
@ -568,7 +573,12 @@ struct hb_ot_apply_context_t :
bool prev (unsigned *unsafe_from = nullptr)
{
assert (num_items > 0);
while (idx > num_items - 1)
/* The alternate condition below is faster at string boundaries,
* but produces subpar "unsafe-to-concat" values. */
unsigned stop = num_items - 1;
if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
stop = 1 - 1;
while (idx > stop)
{
idx--;
hb_glyph_info_t &info = c->buffer->out_info[idx];
@ -672,6 +682,7 @@ struct hb_ot_apply_context_t :
const GDEF &gdef;
const VariationStore &var_store;
VariationStore::cache_t *var_store_cache;
hb_set_digest_t digest;
hb_direction_t direction;
hb_mask_t lookup_mask = 1;
@ -707,6 +718,7 @@ struct hb_ot_apply_context_t :
nullptr
#endif
),
digest (buffer_->digest ()),
direction (buffer_->props.direction),
has_glyph_classes (gdef.has_glyph_classes ())
{ init_iters (); }
@ -781,8 +793,10 @@ struct hb_ot_apply_context_t :
void _set_glyph_class (hb_codepoint_t glyph_index,
unsigned int class_guess = 0,
bool ligature = false,
bool component = false) const
bool component = false)
{
digest.add (glyph_index);
if (new_syllables != (unsigned) -1)
buffer->cur().syllable() = new_syllables;
@ -815,24 +829,24 @@ struct hb_ot_apply_context_t :
_hb_glyph_info_set_glyph_props (&buffer->cur(), props);
}
void replace_glyph (hb_codepoint_t glyph_index) const
void replace_glyph (hb_codepoint_t glyph_index)
{
_set_glyph_class (glyph_index);
(void) buffer->replace_glyph (glyph_index);
}
void replace_glyph_inplace (hb_codepoint_t glyph_index) const
void replace_glyph_inplace (hb_codepoint_t glyph_index)
{
_set_glyph_class (glyph_index);
buffer->cur().codepoint = glyph_index;
}
void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
unsigned int class_guess) const
unsigned int class_guess)
{
_set_glyph_class (glyph_index, class_guess, true);
(void) buffer->replace_glyph (glyph_index);
}
void output_glyph_for_component (hb_codepoint_t glyph_index,
unsigned int class_guess) const
unsigned int class_guess)
{
_set_glyph_class (glyph_index, class_guess, false, true);
(void) buffer->output_glyph (glyph_index);
@ -844,7 +858,7 @@ struct hb_accelerate_subtables_context_t :
hb_dispatch_context_t<hb_accelerate_subtables_context_t>
{
template <typename Type>
static inline bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
static inline bool apply_to (const void *obj, hb_ot_apply_context_t *c)
{
const Type *typed_obj = (const Type *) obj;
return typed_obj->apply (c);
@ -852,30 +866,30 @@ struct hb_accelerate_subtables_context_t :
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
template <typename T>
static inline auto apply_cached_ (const T *obj, OT::hb_ot_apply_context_t *c, hb_priority<1>) HB_RETURN (bool, obj->apply (c, true) )
static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<1>) HB_RETURN (bool, obj->apply (c, true) )
template <typename T>
static inline auto apply_cached_ (const T *obj, OT::hb_ot_apply_context_t *c, hb_priority<0>) HB_RETURN (bool, obj->apply (c) )
static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<0>) HB_RETURN (bool, obj->apply (c) )
template <typename Type>
static inline bool apply_cached_to (const void *obj, OT::hb_ot_apply_context_t *c)
static inline bool apply_cached_to (const void *obj, hb_ot_apply_context_t *c)
{
const Type *typed_obj = (const Type *) obj;
return apply_cached_ (typed_obj, c, hb_prioritize);
}
template <typename T>
static inline auto cache_func_ (const T *obj, OT::hb_ot_apply_context_t *c, bool enter, hb_priority<1>) HB_RETURN (bool, obj->cache_func (c, enter) )
static inline auto cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<1>) HB_RETURN (bool, obj->cache_func (c, enter) )
template <typename T>
static inline bool cache_func_ (const T *obj, OT::hb_ot_apply_context_t *c, bool enter, hb_priority<0>) { return false; }
static inline bool cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<0>) { return false; }
template <typename Type>
static inline bool cache_func_to (const void *obj, OT::hb_ot_apply_context_t *c, bool enter)
static inline bool cache_func_to (const void *obj, hb_ot_apply_context_t *c, bool enter)
{
const Type *typed_obj = (const Type *) obj;
return cache_func_ (typed_obj, c, enter, hb_prioritize);
}
#endif
typedef bool (*hb_apply_func_t) (const void *obj, OT::hb_ot_apply_context_t *c);
typedef bool (*hb_cache_func_t) (const void *obj, OT::hb_ot_apply_context_t *c, bool enter);
typedef bool (*hb_apply_func_t) (const void *obj, hb_ot_apply_context_t *c);
typedef bool (*hb_cache_func_t) (const void *obj, hb_ot_apply_context_t *c, bool enter);
struct hb_applicable_t
{
@ -901,20 +915,20 @@ struct hb_accelerate_subtables_context_t :
obj_.get_coverage ().collect_coverage (&digest);
}
bool apply (OT::hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c) const
{
return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
}
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
bool apply_cached (OT::hb_ot_apply_context_t *c) const
bool apply_cached (hb_ot_apply_context_t *c) const
{
return digest.may_have (c->buffer->cur().codepoint) && apply_cached_func (obj, c);
}
bool cache_enter (OT::hb_ot_apply_context_t *c) const
bool cache_enter (hb_ot_apply_context_t *c) const
{
return cache_func (obj, c, true);
}
void cache_leave (OT::hb_ot_apply_context_t *c) const
void cache_leave (hb_ot_apply_context_t *c) const
{
cache_func (obj, c, false);
}
@ -988,8 +1002,8 @@ struct hb_accelerate_subtables_context_t :
};
typedef bool (*intersects_func_t) (const hb_set_t *glyphs, unsigned value, const void *data);
typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs);
typedef bool (*intersects_func_t) (const hb_set_t *glyphs, unsigned value, const void *data, void *cache);
typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache);
typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, unsigned value, const void *data);
typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
@ -1012,16 +1026,25 @@ struct ChainContextApplyFuncs
};
static inline bool intersects_glyph (const hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED)
static inline bool intersects_glyph (const hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED, void *cache HB_UNUSED)
{
return glyphs->has (value);
}
static inline bool intersects_class (const hb_set_t *glyphs, unsigned value, const void *data)
static inline bool intersects_class (const hb_set_t *glyphs, unsigned value, const void *data, void *cache)
{
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
return class_def.intersects_class (glyphs, value);
hb_map_t *map = (hb_map_t *) cache;
hb_codepoint_t *cached_v;
if (map->has (value, &cached_v))
return *cached_v;
bool v = class_def.intersects_class (glyphs, value);
map->set (value, v);
return v;
}
static inline bool intersects_coverage (const hb_set_t *glyphs, unsigned value, const void *data)
static inline bool intersects_coverage (const hb_set_t *glyphs, unsigned value, const void *data, void *cache HB_UNUSED)
{
Offset16To<Coverage> coverage;
coverage = value;
@ -1029,17 +1052,36 @@ static inline bool intersects_coverage (const hb_set_t *glyphs, unsigned value,
}
static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs)
static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
{
unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value];
intersected_glyphs->add (g);
}
static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
using intersected_class_cache_t = hb_hashmap_t<unsigned, hb_set_t>;
static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache)
{
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
class_def.intersected_class_glyphs (glyphs, value, intersected_glyphs);
intersected_class_cache_t *map = (intersected_class_cache_t *) cache;
hb_set_t *cached_v;
if (map->has (value, &cached_v))
{
intersected_glyphs->union_ (*cached_v);
return;
}
hb_set_t v;
class_def.intersected_class_glyphs (glyphs, value, &v);
intersected_glyphs->union_ (v);
map->set (value, std::move (v));
}
static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
{
Offset16To<Coverage> coverage;
coverage = value;
@ -1052,10 +1094,11 @@ static inline bool array_is_subset_of (const hb_set_t *glyphs,
unsigned int count,
const HBUINT values[],
intersects_func_t intersects_func,
const void *intersects_data)
const void *intersects_data,
void *cache)
{
for (const auto &_ : + hb_iter (values, count))
if (!intersects_func (glyphs, _, intersects_data)) return false;
if (!intersects_func (glyphs, _, intersects_data, cache)) return false;
return true;
}
@ -1492,7 +1535,8 @@ static void context_closure_recurse_lookups (hb_closure_context_t *c,
unsigned value,
ContextFormat context_format,
const void *data,
intersected_glyphs_func_t intersected_glyphs_func)
intersected_glyphs_func_t intersected_glyphs_func,
void *cache)
{
hb_set_t *covered_seq_indicies = hb_set_create ();
for (unsigned int i = 0; i < lookupCount; i++)
@ -1513,7 +1557,7 @@ static void context_closure_recurse_lookups (hb_closure_context_t *c,
pos_glyphs.add (value);
break;
case ContextFormat::ClassBasedContext:
intersected_glyphs_func (&c->parent_active_glyphs (), data, value, &pos_glyphs);
intersected_glyphs_func (&c->parent_active_glyphs (), data, value, &pos_glyphs, cache);
break;
case ContextFormat::CoverageBasedContext:
pos_glyphs.set (c->parent_active_glyphs ());
@ -1530,7 +1574,7 @@ static void context_closure_recurse_lookups (hb_closure_context_t *c,
input_value = input[seqIndex - 1];
}
intersected_glyphs_func (c->glyphs, input_data, input_value, &pos_glyphs);
intersected_glyphs_func (c->glyphs, input_data, input_value, &pos_glyphs, cache);
}
}
@ -1710,6 +1754,8 @@ struct ContextClosureLookupContext
ContextClosureFuncs funcs;
ContextFormat context_format;
const void *intersects_data;
void *intersects_cache;
void *intersected_glyphs_cache;
};
struct ContextCollectGlyphsLookupContext
@ -1732,7 +1778,9 @@ static inline bool context_intersects (const hb_set_t *glyphs,
{
return array_is_subset_of (glyphs,
inputCount ? inputCount - 1 : 0, input,
lookup_context.funcs.intersects, lookup_context.intersects_data);
lookup_context.funcs.intersects,
lookup_context.intersects_data,
lookup_context.intersects_cache);
}
template <typename HBUINT>
@ -1753,7 +1801,8 @@ static inline void context_closure_lookup (hb_closure_context_t *c,
value,
lookup_context.context_format,
lookup_context.intersects_data,
lookup_context.funcs.intersected_glyphs);
lookup_context.funcs.intersected_glyphs,
lookup_context.intersected_glyphs_cache);
}
template <typename HBUINT>
@ -1777,7 +1826,7 @@ static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
const HBUINT input[], /* Array of input values--start with second glyph */
unsigned int lookupCount HB_UNUSED,
const LookupRecord lookupRecord[] HB_UNUSED,
ContextApplyLookupContext &lookup_context)
const ContextApplyLookupContext &lookup_context)
{
return would_match_input (c,
inputCount, input,
@ -1790,7 +1839,7 @@ static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
const HBUINT input[], /* Array of input values--start with second glyph */
unsigned int lookupCount,
const LookupRecord lookupRecord[],
ContextApplyLookupContext &lookup_context)
const ContextApplyLookupContext &lookup_context)
{
unsigned match_end = 0;
unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
@ -1858,7 +1907,7 @@ struct Rule
}
bool would_apply (hb_would_apply_context_t *c,
ContextApplyLookupContext &lookup_context) const
const ContextApplyLookupContext &lookup_context) const
{
const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
(inputZ.as_array (inputCount ? inputCount - 1 : 0));
@ -1869,7 +1918,7 @@ struct Rule
}
bool apply (hb_ot_apply_context_t *c,
ContextApplyLookupContext &lookup_context) const
const ContextApplyLookupContext &lookup_context) const
{
TRACE_APPLY (this);
const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
@ -1989,7 +2038,7 @@ struct RuleSet
}
bool would_apply (hb_would_apply_context_t *c,
ContextApplyLookupContext &lookup_context) const
const ContextApplyLookupContext &lookup_context) const
{
return
+ hb_iter (rule)
@ -2000,7 +2049,7 @@ struct RuleSet
}
bool apply (hb_ot_apply_context_t *c,
ContextApplyLookupContext &lookup_context) const
const ContextApplyLookupContext &lookup_context) const
{
TRACE_APPLY (this);
return_trace (
@ -2108,7 +2157,7 @@ struct ContextFormat1_4
void closure_lookups (hb_closure_lookups_context_t *c) const
{
struct ContextClosureLookupContext lookup_context = {
{intersects_glyph, intersected_glyph},
{intersects_glyph, nullptr},
ContextFormat::SimpleContext,
nullptr
};
@ -2220,10 +2269,12 @@ struct ContextFormat2_5
const ClassDef &class_def = this+classDef;
hb_map_t cache;
struct ContextClosureLookupContext lookup_context = {
{intersects_class, intersected_class_glyphs},
{intersects_class, nullptr},
ContextFormat::ClassBasedContext,
&class_def
&class_def,
&cache
};
hb_set_t retained_coverage_glyphs;
@ -2259,10 +2310,14 @@ struct ContextFormat2_5
const ClassDef &class_def = this+classDef;
hb_map_t cache;
intersected_class_cache_t intersected_cache;
struct ContextClosureLookupContext lookup_context = {
{intersects_class, intersected_class_glyphs},
ContextFormat::ClassBasedContext,
&class_def
&class_def,
&cache,
&intersected_cache
};
+ hb_enumerate (ruleSet)
@ -2286,10 +2341,12 @@ struct ContextFormat2_5
const ClassDef &class_def = this+classDef;
hb_map_t cache;
struct ContextClosureLookupContext lookup_context = {
{intersects_class, intersected_class_glyphs},
{intersects_class, nullptr},
ContextFormat::ClassBasedContext,
&class_def
&class_def,
&cache
};
+ hb_iter (ruleSet)
@ -2407,6 +2464,7 @@ struct ContextFormat2_5
const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
bool ret = true;
int non_zero_index = -1, index = 0;
auto snapshot = c->serializer->snapshot();
for (const auto& _ : + hb_enumerate (ruleSet)
| hb_filter (klass_map, hb_first))
{
@ -2418,8 +2476,10 @@ struct ContextFormat2_5
}
if (coverage_glyph_classes.has (_.first) &&
o->serialize_subset (c, _.second, this, lookup_map, &klass_map))
o->serialize_subset (c, _.second, this, lookup_map, &klass_map)) {
non_zero_index = index;
snapshot = c->serializer->snapshot();
}
index++;
}
@ -2433,6 +2493,7 @@ struct ContextFormat2_5
out->ruleSet.pop ();
index--;
}
c->serializer->revert (snapshot);
return_trace (bool (out->ruleSet));
}
@ -2469,7 +2530,7 @@ struct ContextFormat3
return false;
struct ContextClosureLookupContext lookup_context = {
{intersects_coverage, intersected_coverage_glyphs},
{intersects_coverage, nullptr},
ContextFormat::CoverageBasedContext,
this
};
@ -2655,6 +2716,8 @@ struct ChainContextClosureLookupContext
ContextClosureFuncs funcs;
ContextFormat context_format;
const void *intersects_data[3];
void *intersects_cache[3];
void *intersected_glyphs_cache;
};
struct ChainContextCollectGlyphsLookupContext
@ -2681,13 +2744,19 @@ static inline bool chain_context_intersects (const hb_set_t *glyphs,
{
return array_is_subset_of (glyphs,
backtrackCount, backtrack,
lookup_context.funcs.intersects, lookup_context.intersects_data[0])
lookup_context.funcs.intersects,
lookup_context.intersects_data[0],
lookup_context.intersects_cache[0])
&& array_is_subset_of (glyphs,
inputCount ? inputCount - 1 : 0, input,
lookup_context.funcs.intersects, lookup_context.intersects_data[1])
lookup_context.funcs.intersects,
lookup_context.intersects_data[1],
lookup_context.intersects_cache[1])
&& array_is_subset_of (glyphs,
lookaheadCount, lookahead,
lookup_context.funcs.intersects, lookup_context.intersects_data[2]);
lookup_context.funcs.intersects,
lookup_context.intersects_data[2],
lookup_context.intersects_cache[2]);
}
template <typename HBUINT>
@ -2714,7 +2783,8 @@ static inline void chain_context_closure_lookup (hb_closure_context_t *c,
value,
lookup_context.context_format,
lookup_context.intersects_data[1],
lookup_context.funcs.intersected_glyphs);
lookup_context.funcs.intersected_glyphs,
lookup_context.intersected_glyphs_cache);
}
template <typename HBUINT>
@ -2752,7 +2822,7 @@ static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c
const HBUINT lookahead[] HB_UNUSED,
unsigned int lookupCount HB_UNUSED,
const LookupRecord lookupRecord[] HB_UNUSED,
ChainContextApplyLookupContext &lookup_context)
const ChainContextApplyLookupContext &lookup_context)
{
return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
&& would_match_input (c,
@ -2770,7 +2840,7 @@ static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
const HBUINT lookahead[],
unsigned int lookupCount,
const LookupRecord lookupRecord[],
ChainContextApplyLookupContext &lookup_context)
const ChainContextApplyLookupContext &lookup_context)
{
unsigned end_index = c->buffer->idx;
unsigned match_end = 0;
@ -2864,7 +2934,7 @@ struct ChainRule
}
bool would_apply (hb_would_apply_context_t *c,
ChainContextApplyLookupContext &lookup_context) const
const ChainContextApplyLookupContext &lookup_context) const
{
const auto &input = StructAfter<decltype (inputX)> (backtrack);
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
@ -2876,7 +2946,8 @@ struct ChainRule
lookup.arrayZ, lookup_context);
}
bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
bool apply (hb_ot_apply_context_t *c,
const ChainContextApplyLookupContext &lookup_context) const
{
TRACE_APPLY (this);
const auto &input = StructAfter<decltype (inputX)> (backtrack);
@ -3042,7 +3113,8 @@ struct ChainRuleSet
;
}
bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
bool would_apply (hb_would_apply_context_t *c,
const ChainContextApplyLookupContext &lookup_context) const
{
return
+ hb_iter (rule)
@ -3052,7 +3124,8 @@ struct ChainRuleSet
;
}
bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
bool apply (hb_ot_apply_context_t *c,
const ChainContextApplyLookupContext &lookup_context) const
{
TRACE_APPLY (this);
return_trace (
@ -3166,7 +3239,7 @@ struct ChainContextFormat1_4
void closure_lookups (hb_closure_lookups_context_t *c) const
{
struct ChainContextClosureLookupContext lookup_context = {
{intersects_glyph, intersected_glyph},
{intersects_glyph, nullptr},
ContextFormat::SimpleContext,
{nullptr, nullptr, nullptr}
};
@ -3278,12 +3351,14 @@ struct ChainContextFormat2_5
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
hb_map_t caches[3] = {};
struct ChainContextClosureLookupContext lookup_context = {
{intersects_class, intersected_class_glyphs},
{intersects_class, nullptr},
ContextFormat::ClassBasedContext,
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def}
&lookahead_class_def},
{&caches[0], &caches[1], &caches[2]}
};
hb_set_t retained_coverage_glyphs;
@ -3321,12 +3396,16 @@ struct ChainContextFormat2_5
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
hb_map_t caches[3] = {};
intersected_class_cache_t intersected_cache;
struct ChainContextClosureLookupContext lookup_context = {
{intersects_class, intersected_class_glyphs},
ContextFormat::ClassBasedContext,
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def}
&lookahead_class_def},
{&caches[0], &caches[1], &caches[2]},
&intersected_cache
};
+ hb_enumerate (ruleSet)
@ -3352,12 +3431,14 @@ struct ChainContextFormat2_5
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
hb_map_t caches[3] = {};
struct ChainContextClosureLookupContext lookup_context = {
{intersects_class, intersected_class_glyphs},
{intersects_class, nullptr},
ContextFormat::ClassBasedContext,
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def}
&lookahead_class_def},
{&caches[0], &caches[1], &caches[2]}
};
+ hb_iter (ruleSet)
@ -3587,7 +3668,7 @@ struct ChainContextFormat3
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
struct ChainContextClosureLookupContext lookup_context = {
{intersects_coverage, intersected_coverage_glyphs},
{intersects_coverage, nullptr},
ContextFormat::CoverageBasedContext,
{this, this, this}
};
@ -3938,13 +4019,14 @@ struct hb_ot_layout_lookup_accelerator_t
template <typename TLookup>
void init (const TLookup &lookup)
{
digest.init ();
lookup.collect_coverage (&digest);
subtables.init ();
OT::hb_accelerate_subtables_context_t c_accelerate_subtables (subtables);
hb_accelerate_subtables_context_t c_accelerate_subtables (subtables);
lookup.dispatch (&c_accelerate_subtables);
digest.init ();
for (auto& subtable : hb_iter (subtables))
digest.add (subtable.digest);
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
cache_user_idx = c_accelerate_subtables.cache_user_idx;
for (unsigned i = 0; i < subtables.length; i++)
@ -3962,21 +4044,25 @@ struct hb_ot_layout_lookup_accelerator_t
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
if (use_cache)
{
for (unsigned int i = 0; i < subtables.length; i++)
if (subtables[i].apply_cached (c))
return true;
return
+ hb_iter (subtables)
| hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply_cached (c); })
| hb_any
;
}
else
#endif
{
for (unsigned int i = 0; i < subtables.length; i++)
if (subtables[i].apply (c))
return true;
return
+ hb_iter (subtables)
| hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply (c); })
| hb_any
;
}
return false;
}
bool cache_enter (OT::hb_ot_apply_context_t *c) const
bool cache_enter (hb_ot_apply_context_t *c) const
{
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
return cache_user_idx != (unsigned) -1 &&
@ -3985,7 +4071,7 @@ struct hb_ot_layout_lookup_accelerator_t
return false;
#endif
}
void cache_leave (OT::hb_ot_apply_context_t *c) const
void cache_leave (hb_ot_apply_context_t *c) const
{
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
subtables[cache_user_idx].cache_leave (c);
@ -3993,8 +4079,8 @@ struct hb_ot_layout_lookup_accelerator_t
}
private:
hb_set_digest_t digest;
private:
hb_accelerate_subtables_context_t::array_t subtables;
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
unsigned cache_user_idx = (unsigned) -1;
@ -4255,11 +4341,11 @@ struct GSUBGPOS
hb_set_t *lookup_indexes /* IN/OUT */) const
{
hb_set_t visited_lookups, inactive_lookups;
OT::hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
c.set_recurse_func (TLookup::template dispatch_recurse_func<hb_closure_lookups_context_t>);
for (unsigned lookup_index : + hb_iter (lookup_indexes))
for (unsigned lookup_index : *lookup_indexes)
reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
hb_set_union (lookup_indexes, &visited_lookups);
@ -4301,7 +4387,7 @@ struct GSUBGPOS
}
#endif
for (unsigned i : feature_indices->iter())
for (unsigned i : hb_iter (feature_indices))
{
hb_tag_t tag = get_feature_tag (i);
if (tag == HB_TAG ('p', 'r', 'e', 'f'))

View File

@ -1465,56 +1465,6 @@ hb_ot_layout_substitute_start (hb_font_t *font,
_hb_ot_layout_set_glyph_props (font, buffer);
}
void
hb_ot_layout_delete_glyphs_inplace (hb_buffer_t *buffer,
bool (*filter) (const hb_glyph_info_t *info))
{
/* Merge clusters and delete filtered glyphs.
* NOTE! We can't use out-buffer as we have positioning data. */
unsigned int j = 0;
unsigned int count = buffer->len;
hb_glyph_info_t *info = buffer->info;
hb_glyph_position_t *pos = buffer->pos;
for (unsigned int i = 0; i < count; i++)
{
if (filter (&info[i]))
{
/* Merge clusters.
* Same logic as buffer->delete_glyph(), but for in-place removal. */
unsigned int cluster = info[i].cluster;
if (i + 1 < count && cluster == info[i + 1].cluster)
continue; /* Cluster survives; do nothing. */
if (j)
{
/* Merge cluster backward. */
if (cluster < info[j - 1].cluster)
{
unsigned int mask = info[i].mask;
unsigned int old_cluster = info[j - 1].cluster;
for (unsigned k = j; k && info[k - 1].cluster == old_cluster; k--)
buffer->set_cluster (info[k - 1], cluster, mask);
}
continue;
}
if (i + 1 < count)
buffer->merge_clusters (i, i + 2); /* Merge cluster forward. */
continue;
}
if (j != i)
{
info[j] = info[i];
pos[j] = pos[i];
}
j++;
}
buffer->len = j;
}
/**
* hb_ot_layout_lookup_substitute_closure:
* @face: #hb_face_t to work upon
@ -1867,7 +1817,7 @@ apply_forward (OT::hb_ot_apply_context_t *c,
while (buffer->idx < buffer->len && buffer->successful)
{
bool applied = false;
if (accel.may_have (buffer->cur().codepoint) &&
if (accel.digest.may_have (buffer->cur().codepoint) &&
(buffer->cur().mask & c->lookup_mask) &&
c->check_glyph_property (&buffer->cur(), c->lookup_props))
{
@ -1894,7 +1844,7 @@ apply_backward (OT::hb_ot_apply_context_t *c,
hb_buffer_t *buffer = c->buffer;
do
{
if (accel.may_have (buffer->cur().codepoint) &&
if (accel.digest.may_have (buffer->cur().codepoint) &&
(buffer->cur().mask & c->lookup_mask) &&
c->check_glyph_property (&buffer->cur(), c->lookup_props))
ret |= accel.apply (c, false);
@ -1908,15 +1858,16 @@ apply_backward (OT::hb_ot_apply_context_t *c,
}
template <typename Proxy>
static inline void
static inline bool
apply_string (OT::hb_ot_apply_context_t *c,
const typename Proxy::Lookup &lookup,
const OT::hb_ot_layout_lookup_accelerator_t &accel)
{
bool ret = false;
hb_buffer_t *buffer = c->buffer;
if (unlikely (!buffer->len || !c->lookup_mask))
return;
return ret;
c->set_lookup_props (lookup.get_props ());
@ -1927,7 +1878,7 @@ apply_string (OT::hb_ot_apply_context_t *c,
buffer->clear_output ();
buffer->idx = 0;
apply_forward (c, accel);
ret = apply_forward (c, accel);
if (!Proxy::always_inplace)
buffer->sync ();
@ -1937,8 +1888,10 @@ apply_string (OT::hb_ot_apply_context_t *c,
/* in-place backward substitution/positioning */
assert (!buffer->have_output);
buffer->idx = buffer->len - 1;
apply_backward (c, accel);
ret = apply_backward (c, accel);
}
return ret;
}
template <typename Proxy>
@ -1957,23 +1910,42 @@ inline void hb_ot_map_t::apply (const Proxy &proxy,
const stage_map_t *stage = &stages[table_index][stage_index];
for (; i < stage->last_lookup; i++)
{
unsigned int lookup_index = lookups[table_index][i].index;
if (!buffer->message (font, "start lookup %d", lookup_index)) continue;
c.set_lookup_index (lookup_index);
c.set_lookup_mask (lookups[table_index][i].mask);
c.set_auto_zwj (lookups[table_index][i].auto_zwj);
c.set_auto_zwnj (lookups[table_index][i].auto_zwnj);
c.set_random (lookups[table_index][i].random);
c.set_per_syllable (lookups[table_index][i].per_syllable);
auto &lookup = lookups[table_index][i];
apply_string<Proxy> (&c,
proxy.table.get_lookup (lookup_index),
proxy.accels[lookup_index]);
(void) buffer->message (font, "end lookup %d", lookup_index);
unsigned int lookup_index = lookup.index;
if (!buffer->message (font, "start lookup %d feature '%c%c%c%c'", lookup_index, HB_UNTAG (lookup.feature_tag))) continue;
/* c.digest is a digest of all the current glyphs in the buffer
* (plus some past glyphs).
*
* Only try applying the lookup if there is any overlap. */
if (proxy.accels[lookup_index].digest.may_have (c.digest))
{
c.set_lookup_index (lookup_index);
c.set_lookup_mask (lookup.mask);
c.set_auto_zwj (lookup.auto_zwj);
c.set_auto_zwnj (lookup.auto_zwnj);
c.set_random (lookup.random);
c.set_per_syllable (lookup.per_syllable);
apply_string<Proxy> (&c,
proxy.table.get_lookup (lookup_index),
proxy.accels[lookup_index]);
}
else
(void) buffer->message (font, "skipped lookup %d feature '%c%c%c%c' because no glyph matches", lookup_index, HB_UNTAG (lookup.feature_tag));
(void) buffer->message (font, "end lookup %d feature '%c%c%c%c'", lookup_index, HB_UNTAG (lookup.feature_tag));
}
if (stage->pause_func)
stage->pause_func (plan, font, buffer);
{
if (stage->pause_func (plan, font, buffer))
{
/* Refresh working buffer digest since buffer changed. */
c.digest = buffer->digest ();
}
}
}
}

View File

@ -102,10 +102,6 @@ HB_INTERNAL void
hb_ot_layout_substitute_start (hb_font_t *font,
hb_buffer_t *buffer);
HB_INTERNAL void
hb_ot_layout_delete_glyphs_inplace (hb_buffer_t *buffer,
bool (*filter) (const hb_glyph_info_t *info));
namespace OT {
struct hb_ot_apply_context_t;
struct hb_ot_layout_lookup_accelerator_t;
@ -552,7 +548,7 @@ _hb_glyph_info_clear_substituted (hb_glyph_info_t *info)
info->glyph_props() &= ~(HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED);
}
static inline void
static inline bool
_hb_clear_substitution_flags (const hb_ot_shape_plan_t *plan HB_UNUSED,
hb_font_t *font HB_UNUSED,
hb_buffer_t *buffer)
@ -561,6 +557,7 @@ _hb_clear_substitution_flags (const hb_ot_shape_plan_t *plan HB_UNUSED,
unsigned int count = buffer->len;
for (unsigned int i = 0; i < count; i++)
_hb_glyph_info_clear_substituted (&info[i]);
return false;
}

View File

@ -45,7 +45,7 @@ void hb_ot_map_t::collect_lookups (unsigned int table_index, hb_set_t *lookups_o
hb_ot_map_builder_t::hb_ot_map_builder_t (hb_face_t *face_,
const hb_segment_properties_t &props_)
{
memset (this, 0, sizeof (*this));
hb_memset (this, 0, sizeof (*this));
feature_infos.init ();
for (unsigned int table_index = 0; table_index < 2; table_index++)
@ -133,7 +133,8 @@ hb_ot_map_builder_t::add_lookups (hb_ot_map_t &m,
bool auto_zwnj,
bool auto_zwj,
bool random,
bool per_syllable)
bool per_syllable,
hb_tag_t feature_tag)
{
unsigned int lookup_indices[32];
unsigned int offset, len;
@ -162,6 +163,7 @@ hb_ot_map_builder_t::add_lookups (hb_ot_map_t &m,
lookup->auto_zwj = auto_zwj;
lookup->random = random;
lookup->per_syllable = per_syllable;
lookup->feature_tag = feature_tag;
}
offset += len;
@ -212,24 +214,26 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
if (feature_infos.length)
{
feature_infos.qsort ();
auto *f = feature_infos.arrayZ;
unsigned int j = 0;
for (unsigned int i = 1; i < feature_infos.length; i++)
if (feature_infos[i].tag != feature_infos[j].tag)
feature_infos[++j] = feature_infos[i];
unsigned count = feature_infos.length;
for (unsigned int i = 1; i < count; i++)
if (f[i].tag != f[j].tag)
f[++j] = f[i];
else {
if (feature_infos[i].flags & F_GLOBAL) {
feature_infos[j].flags |= F_GLOBAL;
feature_infos[j].max_value = feature_infos[i].max_value;
feature_infos[j].default_value = feature_infos[i].default_value;
if (f[i].flags & F_GLOBAL) {
f[j].flags |= F_GLOBAL;
f[j].max_value = f[i].max_value;
f[j].default_value = f[i].default_value;
} else {
if (feature_infos[j].flags & F_GLOBAL)
feature_infos[j].flags ^= F_GLOBAL;
feature_infos[j].max_value = hb_max (feature_infos[j].max_value, feature_infos[i].max_value);
if (f[j].flags & F_GLOBAL)
f[j].flags ^= F_GLOBAL;
f[j].max_value = hb_max (f[j].max_value, f[i].max_value);
/* Inherit default_value from j */
}
feature_infos[j].flags |= (feature_infos[i].flags & F_HAS_FALLBACK);
feature_infos[j].stage[0] = hb_min (feature_infos[j].stage[0], feature_infos[i].stage[0]);
feature_infos[j].stage[1] = hb_min (feature_infos[j].stage[1], feature_infos[i].stage[1]);
f[j].flags |= (f[i].flags & F_HAS_FALLBACK);
f[j].stage[0] = hb_min (f[j].stage[0], f[i].stage[0]);
f[j].stage[1] = hb_min (f[j].stage[1], f[i].stage[1]);
}
feature_infos.shrink (j + 1);
}
@ -239,7 +243,8 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
static_assert ((!(HB_GLYPH_FLAG_DEFINED & (HB_GLYPH_FLAG_DEFINED + 1))), "");
unsigned int next_bit = hb_popcount (HB_GLYPH_FLAG_DEFINED) + 1;
for (unsigned int i = 0; i < feature_infos.length; i++)
unsigned count = feature_infos.length;
for (unsigned int i = 0; i < count; i++)
{
const feature_info_t *info = &feature_infos[i];
@ -308,7 +313,7 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
map->_1_mask = (1u << map->shift) & map->mask;
map->needs_fallback = !found;
}
feature_infos.shrink (0); /* Done with these */
//feature_infos.shrink (0); /* Done with these */
add_gsub_pause (nullptr);
@ -317,6 +322,7 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
for (unsigned int table_index = 0; table_index < 2; table_index++)
{
/* Collect lookup indices for features */
auto &lookups = m.lookups[table_index];
unsigned int stage_index = 0;
unsigned int last_num_lookups = 0;
@ -329,36 +335,39 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
key.variations_index[table_index],
global_bit_mask);
for (unsigned i = 0; i < m.features.length; i++)
if (m.features[i].stage[table_index] == stage)
add_lookups (m, table_index,
m.features[i].index[table_index],
key.variations_index[table_index],
m.features[i].mask,
m.features[i].auto_zwnj,
m.features[i].auto_zwj,
m.features[i].random,
m.features[i].per_syllable);
/* Sort lookups and merge duplicates */
if (last_num_lookups < m.lookups[table_index].length)
for (auto &feature : m.features)
{
m.lookups[table_index].qsort (last_num_lookups, m.lookups[table_index].length);
unsigned int j = last_num_lookups;
for (unsigned int i = j + 1; i < m.lookups[table_index].length; i++)
if (m.lookups[table_index][i].index != m.lookups[table_index][j].index)
m.lookups[table_index][++j] = m.lookups[table_index][i];
else
{
m.lookups[table_index][j].mask |= m.lookups[table_index][i].mask;
m.lookups[table_index][j].auto_zwnj &= m.lookups[table_index][i].auto_zwnj;
m.lookups[table_index][j].auto_zwj &= m.lookups[table_index][i].auto_zwj;
}
m.lookups[table_index].shrink (j + 1);
if (feature.stage[table_index] == stage)
add_lookups (m, table_index,
feature.index[table_index],
key.variations_index[table_index],
feature.mask,
feature.auto_zwnj,
feature.auto_zwj,
feature.random,
feature.per_syllable,
feature.tag);
}
last_num_lookups = m.lookups[table_index].length;
/* Sort lookups and merge duplicates */
if (last_num_lookups < lookups.length)
{
lookups.as_array ().sub_array (last_num_lookups, lookups.length - last_num_lookups).qsort ();
unsigned int j = last_num_lookups;
for (unsigned int i = j + 1; i < lookups.length; i++)
if (lookups.arrayZ[i].index != lookups.arrayZ[j].index)
lookups.arrayZ[++j] = lookups.arrayZ[i];
else
{
lookups.arrayZ[j].mask |= lookups.arrayZ[i].mask;
lookups.arrayZ[j].auto_zwnj &= lookups.arrayZ[i].auto_zwnj;
lookups.arrayZ[j].auto_zwj &= lookups.arrayZ[i].auto_zwj;
}
lookups.shrink (j + 1);
}
last_num_lookups = lookups.length;
if (stage_index < stages[table_index].length && stages[table_index][stage_index].index == stage) {
hb_ot_map_t::stage_map_t *stage_map = m.stages[table_index].push ();

View File

@ -69,6 +69,7 @@ struct hb_ot_map_t
unsigned short random : 1;
unsigned short per_syllable : 1;
hb_mask_t mask;
hb_tag_t feature_tag;
HB_INTERNAL static int cmp (const void *pa, const void *pb)
{
@ -78,7 +79,9 @@ struct hb_ot_map_t
}
};
typedef void (*pause_func_t) (const struct hb_ot_shape_plan_t *plan, hb_font_t *font, hb_buffer_t *buffer);
/* Pause functions return true if new glyph indices might have been
* added to the buffer. This is used to update buffer digest. */
typedef bool (*pause_func_t) (const struct hb_ot_shape_plan_t *plan, hb_font_t *font, hb_buffer_t *buffer);
struct stage_map_t {
unsigned int last_lookup; /* Cumulative */
@ -87,13 +90,13 @@ struct hb_ot_map_t
void init ()
{
memset (this, 0, sizeof (*this));
hb_memset (this, 0, sizeof (*this));
features.init ();
features.init0 ();
for (unsigned int table_index = 0; table_index < 2; table_index++)
{
lookups[table_index].init ();
stages[table_index].init ();
lookups[table_index].init0 ();
stages[table_index].init0 ();
}
}
void fini ()
@ -239,7 +242,8 @@ struct hb_ot_map_builder_t
bool auto_zwnj = true,
bool auto_zwj = true,
bool random = false,
bool per_syllable = false);
bool per_syllable = false,
hb_tag_t feature_tag = HB_TAG(' ',' ',' ',' '));
struct feature_info_t {
hb_tag_t tag;

View File

@ -77,11 +77,11 @@ struct MathConstants
HBINT16 *p = c->allocate_size<HBINT16> (HBINT16::static_size * 2);
if (unlikely (!p)) return_trace (nullptr);
memcpy (p, percentScaleDown, HBINT16::static_size * 2);
hb_memcpy (p, percentScaleDown, HBINT16::static_size * 2);
HBUINT16 *m = c->allocate_size<HBUINT16> (HBUINT16::static_size * 2);
if (unlikely (!m)) return_trace (nullptr);
memcpy (m, minHeight, HBUINT16::static_size * 2);
hb_memcpy (m, minHeight, HBUINT16::static_size * 2);
unsigned count = ARRAY_LENGTH (mathValueRecords);
for (unsigned i = 0; i < count; i++)
@ -786,7 +786,7 @@ struct MathGlyphAssembly
if (parts_count)
{
int64_t mult = font->dir_mult (direction);
for (auto _ : hb_zip (partRecords.sub_array (start_offset, parts_count),
for (auto _ : hb_zip (partRecords.as_array ().sub_array (start_offset, parts_count),
hb_array (parts, *parts_count)))
_.first.extract (_.second, mult, font);
}
@ -855,7 +855,7 @@ struct MathGlyphConstruction
if (variants_count)
{
int64_t mult = font->dir_mult (direction);
for (auto _ : hb_zip (mathGlyphVariantRecord.sub_array (start_offset, variants_count),
for (auto _ : hb_zip (mathGlyphVariantRecord.as_array ().sub_array (start_offset, variants_count),
hb_array (variants, *variants_count)))
_.second = {_.first.variantGlyph, font->em_mult (_.first.advanceMeasurement, mult)};
}

View File

@ -84,7 +84,7 @@ struct meta
{
if (count)
{
+ table->dataMaps.sub_array (start_offset, count)
+ table->dataMaps.as_array ().sub_array (start_offset, count)
| hb_map (&DataMap::get_tag)
| hb_map ([](hb_tag_t tag) { return (hb_ot_meta_tag_t) tag; })
| hb_sink (hb_array (entries, *count))

View File

@ -30,10 +30,55 @@
#include "hb-open-type.hh"
#include "hb-ot-name-language.hh"
#include "hb-aat-layout.hh"
#include "hb-utf.hh"
namespace OT {
template <typename in_utf_t, typename out_utf_t>
inline unsigned int
hb_ot_name_convert_utf (hb_bytes_t bytes,
unsigned int *text_size /* IN/OUT */,
typename out_utf_t::codepoint_t *text /* OUT */)
{
unsigned int src_len = bytes.length / sizeof (typename in_utf_t::codepoint_t);
const typename in_utf_t::codepoint_t *src = (const typename in_utf_t::codepoint_t *) bytes.arrayZ;
const typename in_utf_t::codepoint_t *src_end = src + src_len;
typename out_utf_t::codepoint_t *dst = text;
hb_codepoint_t unicode;
const hb_codepoint_t replacement = HB_BUFFER_REPLACEMENT_CODEPOINT_DEFAULT;
if (text_size && *text_size)
{
(*text_size)--; /* Save room for NUL-termination. */
const typename out_utf_t::codepoint_t *dst_end = text + *text_size;
while (src < src_end && dst < dst_end)
{
const typename in_utf_t::codepoint_t *src_next = in_utf_t::next (src, src_end, &unicode, replacement);
typename out_utf_t::codepoint_t *dst_next = out_utf_t::encode (dst, dst_end, unicode);
if (dst_next == dst)
break; /* Out-of-room. */
dst = dst_next;
src = src_next;
}
*text_size = dst - text;
*dst = 0; /* NUL-terminate. */
}
/* Accumulate length of rest. */
unsigned int dst_len = dst - text;
while (src < src_end)
{
src = in_utf_t::next (src, src_end, &unicode, replacement);
dst_len += out_utf_t::encode_len (unicode);
}
return dst_len;
}
#define entry_score var.u16[0]
#define entry_index var.u16[1]
@ -97,12 +142,68 @@ struct NameRecord
return UNSUPPORTED;
}
NameRecord* copy (hb_serialize_context_t *c, const void *base) const
NameRecord* copy (hb_serialize_context_t *c, const void *base
#ifdef HB_EXPERIMENTAL_API
, const hb_hashmap_t<hb_ot_name_record_ids_t, hb_bytes_t> *name_table_overrides
#endif
) const
{
TRACE_SERIALIZE (this);
HB_UNUSED auto snap = c->snapshot ();
auto *out = c->embed (this);
if (unlikely (!out)) return_trace (nullptr);
out->offset.serialize_copy (c, offset, base, 0, hb_serialize_context_t::Tail, length);
#ifdef HB_EXPERIMENTAL_API
hb_ot_name_record_ids_t record_ids (platformID, encodingID, languageID, nameID);
hb_bytes_t* name_bytes;
if (name_table_overrides->has (record_ids, &name_bytes)) {
hb_bytes_t encoded_bytes = *name_bytes;
char *name_str_utf16_be = nullptr;
if (platformID != 1)
{
unsigned text_size = hb_ot_name_convert_utf<hb_utf8_t, hb_utf16_be_t> (*name_bytes, nullptr, nullptr);
text_size++; // needs to consider NULL terminator for use in hb_ot_name_convert_utf()
unsigned byte_len = text_size * hb_utf16_be_t::codepoint_t::static_size;
name_str_utf16_be = (char *) hb_calloc (byte_len, 1);
if (!name_str_utf16_be)
{
c->revert (snap);
return_trace (nullptr);
}
hb_ot_name_convert_utf<hb_utf8_t, hb_utf16_be_t> (*name_bytes, &text_size,
(hb_utf16_be_t::codepoint_t *) name_str_utf16_be);
unsigned encoded_byte_len = text_size * hb_utf16_be_t::codepoint_t::static_size;
if (!encoded_byte_len || !c->check_assign (out->length, encoded_byte_len, HB_SERIALIZE_ERROR_INT_OVERFLOW)) {
c->revert (snap);
hb_free (name_str_utf16_be);
return_trace (nullptr);
}
encoded_bytes = hb_bytes_t (name_str_utf16_be, encoded_byte_len);
}
else
{
// mac platform, copy the UTF-8 string(all ascii characters) as is
if (!c->check_assign (out->length, encoded_bytes.length, HB_SERIALIZE_ERROR_INT_OVERFLOW)) {
c->revert (snap);
return_trace (nullptr);
}
}
out->offset = 0;
c->push ();
encoded_bytes.copy (c);
c->add_link (out->offset, c->pop_pack (), hb_serialize_context_t::Tail, 0);
hb_free (name_str_utf16_be);
}
else
#endif
{
out->offset.serialize_copy (c, offset, base, 0, hb_serialize_context_t::Tail, length);
}
return_trace (out);
}
@ -216,29 +317,61 @@ struct name
hb_requires (hb_is_source_of (Iterator, const NameRecord &))>
bool serialize (hb_serialize_context_t *c,
Iterator it,
const void *src_string_pool)
const void *src_string_pool
#ifdef HB_EXPERIMENTAL_API
, const hb_vector_t<hb_ot_name_record_ids_t>& insert_name_records
, const hb_hashmap_t<hb_ot_name_record_ids_t, hb_bytes_t> *name_table_overrides
#endif
)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min ((*this)))) return_trace (false);
unsigned total_count = it.len ()
#ifdef HB_EXPERIMENTAL_API
+ insert_name_records.length
#endif
;
this->format = 0;
this->count = it.len ();
if (!c->check_assign (this->count, total_count, HB_SERIALIZE_ERROR_INT_OVERFLOW))
return false;
NameRecord *name_records = (NameRecord *) hb_calloc (it.len (), NameRecord::static_size);
NameRecord *name_records = (NameRecord *) hb_calloc (total_count, NameRecord::static_size);
if (unlikely (!name_records)) return_trace (false);
hb_array_t<NameRecord> records (name_records, it.len ());
hb_array_t<NameRecord> records (name_records, total_count);
for (const NameRecord& record : it)
{
memcpy (name_records, &record, NameRecord::static_size);
hb_memcpy (name_records, &record, NameRecord::static_size);
name_records++;
}
#ifdef HB_EXPERIMENTAL_API
for (unsigned i = 0; i < insert_name_records.length; i++)
{
const hb_ot_name_record_ids_t& ids = insert_name_records[i];
NameRecord record;
record.platformID = ids.platform_id;
record.encodingID = ids.encoding_id;
record.languageID = ids.language_id;
record.nameID = ids.name_id;
record.length = 0; // handled in NameRecord copy()
record.offset = 0;
memcpy (name_records, &record, NameRecord::static_size);
name_records++;
}
#endif
records.qsort ();
c->copy_all (records, src_string_pool);
c->copy_all (records,
src_string_pool
#ifdef HB_EXPERIMENTAL_API
, name_table_overrides
#endif
);
hb_free (records.arrayZ);
@ -256,6 +389,11 @@ struct name
name *name_prime = c->serializer->start_embed<name> ();
if (unlikely (!name_prime)) return_trace (false);
#ifdef HB_EXPERIMENTAL_API
const hb_hashmap_t<hb_ot_name_record_ids_t, hb_bytes_t> *name_table_overrides =
c->plan->name_table_overrides;
#endif
auto it =
+ nameRecordZ.as_array (count)
| hb_filter (c->plan->name_ids, &NameRecord::nameID)
@ -265,10 +403,48 @@ struct name
(c->plan->flags & HB_SUBSET_FLAGS_NAME_LEGACY)
|| namerecord.isUnicode ();
})
#ifdef HB_EXPERIMENTAL_API
| hb_filter ([&] (const NameRecord& namerecord) {
if (name_table_overrides->is_empty ())
return true;
hb_ot_name_record_ids_t rec_ids (namerecord.platformID,
namerecord.encodingID,
namerecord.languageID,
namerecord.nameID);
hb_bytes_t *p;
if (name_table_overrides->has (rec_ids, &p) &&
(*p).length == 0)
return false;
return true;
})
#endif
;
name_prime->serialize (c->serializer, it, std::addressof (this + stringOffset));
return_trace (name_prime->count);
#ifdef HB_EXPERIMENTAL_API
hb_vector_t<hb_ot_name_record_ids_t> insert_name_records;
if (!name_table_overrides->is_empty ())
{
if (unlikely (!insert_name_records.alloc (name_table_overrides->get_population ())))
return_trace (false);
for (const auto& record_ids : name_table_overrides->keys ())
{
if (name_table_overrides->get (record_ids).length == 0)
continue;
if (has_name_record_with_ids (record_ids))
continue;
insert_name_records.push (record_ids);
}
}
#endif
return (name_prime->serialize (c->serializer, it,
std::addressof (this + stringOffset)
#ifdef HB_EXPERIMENTAL_API
, insert_name_records
, name_table_overrides
#endif
));
}
bool sanitize_records (hb_sanitize_context_t *c) const
@ -378,6 +554,23 @@ struct name
hb_vector_t<hb_ot_name_entry_t> names;
};
private:
// sometimes NameRecords are not sorted in the font file, so use linear search
// here
bool has_name_record_with_ids (const hb_ot_name_record_ids_t& record_ids) const
{
for (const auto& record : nameRecordZ.as_array (count))
{
if (record.platformID == record_ids.platform_id &&
record.encodingID == record_ids.encoding_id &&
record.languageID == record_ids.language_id &&
record.nameID == record_ids.name_id)
return true;
}
return false;
}
public:
/* We only implement format 0 for now. */
HBUINT16 format; /* Format selector (=0/1). */
HBUINT16 count; /* Number of name records. */

View File

@ -64,52 +64,6 @@ hb_ot_name_list_names (hb_face_t *face,
return (const hb_ot_name_entry_t *) name.names;
}
template <typename in_utf_t, typename out_utf_t>
static inline unsigned int
hb_ot_name_convert_utf (hb_bytes_t bytes,
unsigned int *text_size /* IN/OUT */,
typename out_utf_t::codepoint_t *text /* OUT */)
{
unsigned int src_len = bytes.length / sizeof (typename in_utf_t::codepoint_t);
const typename in_utf_t::codepoint_t *src = (const typename in_utf_t::codepoint_t *) bytes.arrayZ;
const typename in_utf_t::codepoint_t *src_end = src + src_len;
typename out_utf_t::codepoint_t *dst = text;
hb_codepoint_t unicode;
const hb_codepoint_t replacement = HB_BUFFER_REPLACEMENT_CODEPOINT_DEFAULT;
if (text_size && *text_size)
{
(*text_size)--; /* Same room for NUL-termination. */
const typename out_utf_t::codepoint_t *dst_end = text + *text_size;
while (src < src_end && dst < dst_end)
{
const typename in_utf_t::codepoint_t *src_next = in_utf_t::next (src, src_end, &unicode, replacement);
typename out_utf_t::codepoint_t *dst_next = out_utf_t::encode (dst, dst_end, unicode);
if (dst_next == dst)
break; /* Out-of-room. */
dst = dst_next;
src = src_next;
}
*text_size = dst - text;
*dst = 0; /* NUL-terminate. */
}
/* Accumulate length of rest. */
unsigned int dst_len = dst - text;
while (src < src_end)
{
src = in_utf_t::next (src, src_end, &unicode, replacement);
dst_len += out_utf_t::encode_len (unicode);
}
return dst_len;
}
template <typename utf_t>
static inline unsigned int
hb_ot_name_get_utf (hb_face_t *face,
@ -130,10 +84,10 @@ hb_ot_name_get_utf (hb_face_t *face,
hb_bytes_t bytes = name.get_name (idx);
if (width == 2) /* UTF16-BE */
return hb_ot_name_convert_utf<hb_utf16_be_t, utf_t> (bytes, text_size, text);
return OT::hb_ot_name_convert_utf<hb_utf16_be_t, utf_t> (bytes, text_size, text);
if (width == 1) /* ASCII */
return hb_ot_name_convert_utf<hb_ascii_t, utf_t> (bytes, text_size, text);
return OT::hb_ot_name_convert_utf<hb_ascii_t, utf_t> (bytes, text_size, text);
}
if (text_size)
@ -227,5 +181,4 @@ hb_ot_name_get_utf32 (hb_face_t *face,
return hb_ot_name_get_utf<hb_utf32_t> (face, name_id, language, text_size, text);
}
#endif

View File

@ -212,17 +212,6 @@ struct OS2
TRACE_SUBSET (this);
OS2 *os2_prime = c->serializer->embed (this);
if (unlikely (!os2_prime)) return_trace (false);
if (c->plan->flags & HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES)
return_trace (true);
/* when --gids option is not used, no need to do collect_mapping that is
* iterating all codepoints in each subtable, which is not efficient */
uint16_t min_cp, max_cp;
find_min_and_max_codepoint (c->plan->unicodes, &min_cp, &max_cp);
os2_prime->usFirstCharIndex = min_cp;
os2_prime->usLastCharIndex = max_cp;
_update_unicode_ranges (c->plan->unicodes, os2_prime->ulUnicodeRange);
if (c->plan->user_axes_location->has (HB_TAG ('w','g','h','t')) &&
!c->plan->pinned_at_default)
@ -244,6 +233,18 @@ struct OS2
return_trace (false);
}
if (c->plan->flags & HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES)
return_trace (true);
/* when --gids option is not used, no need to do collect_mapping that is
* iterating all codepoints in each subtable, which is not efficient */
uint16_t min_cp, max_cp;
find_min_and_max_codepoint (c->plan->unicodes, &min_cp, &max_cp);
os2_prime->usFirstCharIndex = min_cp;
os2_prime->usLastCharIndex = max_cp;
_update_unicode_ranges (c->plan->unicodes, os2_prime->ulUnicodeRange);
return_trace (true);
}

View File

@ -78,14 +78,14 @@ HB_INTERNAL bool postV2Tail::subset (hb_subset_context_t *c) const
post::accelerator_t _post (c->plan->source);
hb_hashmap_t<hb_bytes_t, unsigned, true> glyph_name_to_new_index;
hb_hashmap_t<hb_bytes_t, uint32_t, true> glyph_name_to_new_index;
for (hb_codepoint_t new_gid = 0; new_gid < num_glyphs; new_gid++)
{
hb_codepoint_t old_gid = reverse_glyph_map.get (new_gid);
unsigned old_index = glyphNameIndex[old_gid];
unsigned new_index;
const unsigned *new_index2;
const uint32_t *new_index2;
if (old_index <= 257) new_index = old_index;
else if (old_new_index_map.has (old_index, &new_index2))
{

View File

@ -84,7 +84,7 @@ struct post
post *post_prime = c->allocate_min<post> ();
if (unlikely (!post_prime)) return_trace (false);
memcpy (post_prime, this, post::min_size);
hb_memcpy (post_prime, this, post::min_size);
if (!glyph_names)
return_trace (c->check_assign (post_prime->version.major, 3,
HB_SERIALIZE_ERROR_INT_OVERFLOW)); // Version 3 does not have any glyph names.

View File

@ -341,7 +341,7 @@ _hb_ot_shape_normalize (const hb_ot_shape_plan_t *plan,
{
unsigned int end;
for (end = buffer->idx + 1; end < count; end++)
if (unlikely (_hb_glyph_info_is_unicode_mark (&buffer->info[end])))
if (_hb_glyph_info_is_unicode_mark (&buffer->info[end]))
break;
if (end < count)

View File

@ -91,9 +91,11 @@ hb_ot_shape_planner_t::hb_ot_shape_planner_t (hb_face_t *fac
script_zero_marks = shaper->zero_width_marks != HB_OT_SHAPE_ZERO_WIDTH_MARKS_NONE;
script_fallback_mark_positioning = shaper->fallback_position;
#ifndef HB_NO_AAT_SHAPE
/* https://github.com/harfbuzz/harfbuzz/issues/1528 */
if (apply_morx && shaper != &_hb_ot_shaper_default)
shaper = &_hb_ot_shaper_dumber;
#endif
}
void
@ -864,7 +866,7 @@ hb_ot_hide_default_ignorables (hb_buffer_t *buffer,
}
}
else
hb_ot_layout_delete_glyphs_inplace (buffer, _hb_glyph_info_is_default_ignorable);
buffer->delete_glyphs_inplace (_hb_glyph_info_is_default_ignorable);
}

Some files were not shown because too many files have changed in this diff Show More